code
stringlengths 1
199k
|
|---|
import tensorflow as tf
import hyperchamber as hc
import hypergan as hg
import numpy as np
from hypergan.losses.least_squares_loss import LeastSquaresLoss
from hypergan.ops import TensorflowOps
from unittest.mock import MagicMock
from tests.mocks import mock_gan
loss_config = {'test': True, 'reduce':'reduce_mean', 'labels': [0,1,0]}
class LeastSquaresLossTest(tf.test.TestCase):
def test_config(self):
with self.test_session():
loss = LeastSquaresLoss(mock_gan(), loss_config)
self.assertTrue(loss.config.test)
def test_create(self):
with self.test_session():
gan = mock_gan()
loss = LeastSquaresLoss(gan, loss_config)
d_loss, g_loss = loss.create()
d_shape = gan.ops.shape(d_loss)
g_shape = gan.ops.shape(g_loss)
self.assertEqual(sum(d_shape), 0)
self.assertEqual(sum(g_shape), 0)
if __name__ == "__main__":
tf.test.main()
|
class Kifu:
def __init__(self):
self.kifu = []
def add(self, from_x, from_y, to_x, to_y, promote, koma):
self.kifu.append((from_x, from_y, to_x, to_y, promote, koma))
def pop(self):
return self.kifu.pop()
|
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
desk_properties = ("search_bar", "notifications", "chat", "list_sidebar",
"bulk_actions", "view_switcher", "form_sidebar", "timeline", "dashboard")
class Role(Document):
def before_rename(self, old, new, merge=False):
if old in ("Guest", "Administrator", "System Manager", "All"):
frappe.throw(frappe._("Standard roles cannot be renamed"))
def after_insert(self):
frappe.cache().hdel('roles', 'Administrator')
def validate(self):
if self.disabled:
self.disable_role()
else:
self.set_desk_properties()
def disable_role(self):
if self.name in ("Guest", "Administrator", "System Manager", "All"):
frappe.throw(frappe._("Standard roles cannot be disabled"))
else:
self.remove_roles()
def set_desk_properties(self):
# set if desk_access is not allowed, unset all desk properties
if self.name == 'Guest':
self.desk_access = 0
if not self.desk_access:
for key in desk_properties:
self.set(key, 0)
def remove_roles(self):
frappe.db.sql("delete from `tabHas Role` where role = %s", self.name)
frappe.clear_cache()
def on_update(self):
'''update system user desk access if this has changed in this update'''
if frappe.flags.in_install: return
if self.has_value_changed('desk_access'):
for user_name in get_users(self.name):
user = frappe.get_doc('User', user_name)
user_type = user.user_type
user.set_system_user()
if user_type != user.user_type:
user.save()
def get_info_based_on_role(role, field='email'):
''' Get information of all users that have been assigned this role '''
users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent as user_name"])
return get_user_info(users, field)
def get_user_info(users, field='email'):
''' Fetch details about users for the specified field '''
info_list = []
for user in users:
user_info, enabled = frappe.db.get_value("User", user.get("user_name"), [field, "enabled"])
if enabled and user_info not in ["admin@example.com", "guest@example.com"]:
info_list.append(user_info)
return info_list
def get_users(role):
return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])]
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def role_query(doctype, txt, searchfield, start, page_len, filters):
report_filters = [['Role', 'name', 'like', '%{}%'.format(txt)], ['Role', 'is_custom', '=', 0]]
if filters and isinstance(filters, list):
report_filters.extend(filters)
return frappe.get_all('Role', limit_start=start, limit_page_length=page_len,
filters=report_filters, as_list=1)
|
"""
Created on Tue Dec 13 23:10:40 2016
@author: zhouyu
"""
import pandas as pd
import numpy as np
import os
import re
import nltk
from nltk.corpus import stopwords
from bs4 import BeautifulSoup
os.chdir('/Users/zhouyu/Documents/Zhou_Yu/DS/kaggle_challenge/text processing')
import glob
alltrainfiles = glob.glob("*.csv")
raw_text =pd.concat((pd.read_csv(f,index_col = None, header =0) for f in alltrainfiles),ignore_index = True)
def text_to_words(rawtext):
#split into individual words, remove HTML, only keep letters and number
# convert letters to lower case
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(" ".join(meaningful_words))
def target_to_words(rawtext):
#only return the first target word
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(meaningful_words[0])
cleaned_post = []
cleaned_target = []
sz = raw_text.shape[0]
for i in range(0,sz):
raw_post = raw_text['title'][i]+' '+raw_text['content'][i]
raw_post = BeautifulSoup(raw_post).get_text()
cleaned_post.append(text_to_words(raw_post))
cleaned_target.append(target_to_words(raw_text['tags'][i]))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,sz)
from sklearn.feature_extraction.text import CountVectorizer
count_vect = CountVectorizer(analyzer = "word", \
tokenizer = None, \
preprocessor = None, \
stop_words = None, \
max_features = 5000)
X_train_counts = count_vect.fit_transform(cleaned_post)
from sklearn.feature_extraction.text import TfidfTransformer
tf_transformer = TfidfTransformer(use_idf = False).fit(X_train_counts)
X_train_tf = tf_transformer.transform(X_train_counts)
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier(n_estimators = 10)
forest = rf.fit(X_train_tf, cleaned_target)
pred = rf.predict(X_train_tf)
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
import matplotlib.pyplot as plt
import itertools
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
cnf_matrix = confusion_matrix(cleaned_target,pred)
target_names = list(OrderedDict.fromkeys(cleaned_target))
print(classification_report(cleaned_target,pred,target_names = target_names))
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
sample = np.random.choice(87000,1000,replace = False)
tf_pred = []
tf_target = []
for i in range(0,1000):
r = sample[i];
tf_target.append(cleaned_target[r])
tf_post = X_train_tf.getrow(r).toarray()
tf_post_max = tf_post.argmax()
tf_pred.append(count_vect.get_feature_names()[tf_post_max])
tf_cnf_matrix = confusion_matrix(tf_target,tf_pred)
target_names = list(OrderedDict.fromkeys(tf_pred+tf_target))
print(classification_report(tf_target, tf_pred,target_names =target_names))
test = pd.read_csv('test/test.csv')
cleaned_test = []
test_sz = test.shape[0]
for i in range(0,test_sz):
test_post = test['title'][i]+' '+test['content'][i]
test_post = BeautifulSoup(test_post).get_text()
cleaned_test.append(text_to_words(test_post))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,test_sz)
X_test_counts = count_vect.fit_transform(cleaned_test)
X_test_tf = tf_transformer.transform(X_test_counts)
result = forest.predict(X_test_counts)
test_pred = []
for i in range(0,test_sz):
tf_test = X_test_tf.getrow(i).toarray()
# just return one tag
#tf_test_max = tf_test.argmax()
#test_pred.append(count_vect.get_feature_names()[tf_test_max])
ind = np.argpartition(tf_test,-4)[:,-4:]
pred_tags = [count_vect.get_feature_names()[j] for j in ind[0,:].tolist()]
test_pred.append( " ".join(pred_tags))
if((i+1)%1000==0):
print "Predicting %d of %d\n" % (i+1,test_sz)
result = test_pred
submission = pd.read_csv('test/sample_submission.csv')
submission.iloc[:,1] = result
submission.to_csv('test/submission.csv',index = None)
n_features = 5000
n_topics = 10
n_samples = test_sz
n_top_words = 4
def get_top_words(model, feature_names, n_top_words):
res = []
for topic_idx, topic in enumerate(model.components_):
tags = " ".join([feature_names[i]
for i in topic.argsort()[:-n_top_words - 1:-1]])
res.append(tags)
return res
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF
from time import time
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2,
max_features=n_features,
stop_words='english')
tfidf = tfidf_vectorizer.fit_transform(cleaned_test)
print("Fitting the NMF model (Frobenius norm) with tf-idf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_topics, random_state=1,
alpha=.1, l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in NMF model (Frobenius norm):")
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
result = get_top_words(nmf,tfidf_feature_names,n_top_words)
|
CELERY_RESULT_BACKEND = 'database'
CELERY_RESULT_DBURI = 'sqlite:///mydatabase.db'
BROKER_TRANSPORT = 'sqlalchemy'
BROKER_HOST = 'sqlite:///tasks.db'
BROKER_PORT = 5672
BROKER_VHOST = '/'
BROKER_USER = 'guest'
BROKER_PASSWORD = 'guest'
CELERYD_CONCURRENCY = 1
CELERYD_TASK_TIME_LIMIT = 20
CELERYD_LOG_LEVEL = 'INFO'
|
def add_without_op(x, y):
while y !=0:
carry = x & y
x = x ^ y
y = carry << 1
print(x)
def main():
x, y = map(int, input().split())
add_without_op(x, y)
if __name__ == "__main__":
main()
|
__author__ = 'Sean Yu'
__mail__ = 'try.dash.now@gmail.com'
import sqlite3
def CreateTable(dbname, table,table_define):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute("""create table %s ( %s )"""%(table,table_define))
db.commit()
cu.close()
db.close()
def InsertRecord(dbname, table,record):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''insert into %s values(%s)'''%(table,record))
db.commit()
cu.close()
db.close()
def UpdateRecord(dbname,table, action, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''update %s set %s where %s'''%(table,action,condition))
db.commit()
cu.close()
db.close()
def RemoveRecord(dbname,table, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''delete from %s where %s'''%(table,condition))
db.commit()
cu.close()
db.close()
def FetchRecord(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
result =[]
for i in records:
i= list(i)
result.append(i)
db.commit()
cu.close()
db.close()
return result
def FetchOne(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
records =cu.fetchone()
if records:
result =list(records)
else:
result=None
db.commit()
cu.close()
db.close()
return result
|
import logging
from mwoauth import ConsumerToken, Handshaker, AccessToken
from mwoauth.errors import OAuthException
import urllib.parse
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login, authenticate
from django.contrib.auth.models import User
from django.core.exceptions import DisallowedHost, PermissionDenied
from django.urls import reverse_lazy
from django.http import HttpResponseRedirect
from django.http.request import QueryDict
from django.views.generic.base import View
from django.utils.translation import get_language, gettext as _
from urllib.parse import urlencode
from .models import Editor
logger = logging.getLogger(__name__)
def _localize_oauth_redirect(redirect):
"""
Given an appropriate mediawiki oauth handshake url, return one that will
present the user with a login page of their preferred language.
"""
logger.info("Localizing oauth handshake URL.")
redirect_parsed = urllib.parse.urlparse(redirect)
redirect_query = urllib.parse.parse_qs(redirect_parsed.query)
localized_redirect = redirect_parsed.scheme
localized_redirect += "://"
localized_redirect += redirect_parsed.netloc
localized_redirect += redirect_parsed.path
localized_redirect += "?title="
localized_redirect += "Special:UserLogin"
localized_redirect += "&uselang="
localized_redirect += get_language()
localized_redirect += "&returnto="
localized_redirect += str(redirect_query["title"][0])
localized_redirect += "&returntoquery="
localized_redirect += "%26oauth_consumer_key%3D"
localized_redirect += str(redirect_query["oauth_consumer_key"][0])
localized_redirect += "%26oauth_token%3D"
localized_redirect += str(redirect_query["oauth_token"][0])
return localized_redirect
def _get_handshaker():
consumer_token = ConsumerToken(
settings.TWLIGHT_OAUTH_CONSUMER_KEY, settings.TWLIGHT_OAUTH_CONSUMER_SECRET
)
handshaker = Handshaker(settings.TWLIGHT_OAUTH_PROVIDER_URL, consumer_token)
return handshaker
def _dehydrate_token(token):
"""
Convert the request token into a dict suitable for storing in the session.
"""
session_token = {}
session_token["key"] = token.key
session_token["secret"] = token.secret
return session_token
def _rehydrate_token(token):
"""
Convert the stored dict back into a request token that we can use for
getting an access grant.
"""
request_token = ConsumerToken(token["key"], token["secret"])
return request_token
class OAuthBackend(object):
def _get_username(self, identity):
# The Username is globally unique, but Wikipedia allows it to
# have characters that the Django username system rejects. However,
# wiki userID should be unique, and limited to ASCII.
return "{sub}".format(sub=identity["sub"])
def _create_user(self, identity):
# This can't be super informative because we don't want to log
# identities.
logger.info("Creating user.")
# if not self._meets_minimum_requirement(identity):
# This needs to be reworked to actually check against global_userinfo.
# Don't create a User or Editor if this person does not meet the
# minimum account quality requirement. It would be nice to provide
# some user feedback here, but we can't; exception messages don't
# get passed on as template context in Django 1.8. (They do in
# 1.10, so this can be revisited in future.)
# logger.warning('User did not meet minimum requirements; not created.')
# messages.add_message (request, messages.WARNING,
# _('You do not meet the minimum requirements.'))
# raise PermissionDenied
# -------------------------- Create the user ---------------------------
try:
email = identity["email"]
except KeyError:
email = None
username = self._get_username(identity)
# Since we are not providing a password argument, this will call
# set_unusable_password, which is exactly what we want; users created
# via OAuth should only be allowed to log in via OAuth.
user = User.objects.create_user(username=username, email=email)
logger.info("User user successfully created.")
return user
def _create_editor(self, user, identity):
# ------------------------- Create the editor --------------------------
logger.info("Creating editor.")
editor = Editor()
editor.user = user
editor.wp_sub = identity["sub"]
lang = get_language()
editor.update_from_wikipedia(identity, lang) # This call also saves the editor
logger.info("Editor successfully created.")
return editor
def _create_user_and_editor(self, identity):
user = self._create_user(identity)
editor = self._create_editor(user, identity)
return user, editor
def _get_and_update_user_from_identity(self, identity):
"""
If we have an Editor and User matching the identity returned by
Wikipedia, update the editor with the identity parameters and return its
associated user. If we don't, create an Editor and User, and return that
user.
If the wikipedia account does not meet our eligibility criteria, create
a TWLight account if needed, but set it as inactive. Also deactivate
any existing accounts that have become ineligible.
Also return a boolean that is True if we created a user during this
call and False if we did not.
"""
logger.info("Attempting to update editor after OAuth login.")
try:
username = self._get_username(identity)
user = User.objects.get(username=username)
# This login path should only be used for accounts created via
# Wikipedia login, which all have editor objects.
if hasattr(user, "editor"):
editor = user.editor
lang = get_language()
editor.update_from_wikipedia(
identity, lang
) # This call also saves the editor
logger.info("Editor updated.")
created = False
else:
try:
logger.warning(
"A user tried using the Wikipedia OAuth "
"login path but does not have an attached editor."
)
editor = self._create_editor(user, identity)
created = True
except:
raise PermissionDenied
except User.DoesNotExist:
logger.info("Can't find user; creating one.")
user, editor = self._create_user_and_editor(identity)
created = True
return user, created
def authenticate(self, request=None, access_token=None, handshaker=None):
logger.info("Authenticating user...")
if not request or not access_token or not handshaker:
logger.info(
"Missing OAuth authentication elements; falling back"
"to another authentication method."
)
# You must have meant to use a different authentication backend.
# Returning None will make Django keep going down its list of
# options.
return None
try:
assert isinstance(access_token, AccessToken)
except AssertionError as e:
logger.exception(e)
return None
# Get identifying information about the user. This doubles as a way
# to authenticate the access token, which only Wikimedia can do,
# and thereby to authenticate the user (which is hard for us to do as
# we have no password.)
logger.info("Identifying user...")
try:
identity = handshaker.identify(access_token, 15)
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This error message is shown when there's a problem with the authenticated login process.
_("You tried to log in but presented an invalid access token."),
)
raise PermissionDenied
# Get or create the user.
logger.info("User has been identified; getting or creating user.")
user, created = self._get_and_update_user_from_identity(identity)
if created:
try:
user.editor.save()
except AssertionError:
# This was used to handle users not setting a home wiki
# but that information is no longer collected
pass
else:
logger.info("User has been updated.")
request.session["user_created"] = created
# The authenticate() function of a Django auth backend must return
# the user.
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist as e:
logger.exception(e)
return None
class OAuthInitializeView(View):
"""
Ask Wikipedia for a temporary key/secret for the user, and redirect
them to their home Wikipedia to confirm authorization.
"""
def get(self, request, *args, **kwargs):
# The site might be running under multiple URLs, so find out the current
# one (and make sure it's legit).
# The Sites framework was designed for different URLs that correspond to
# different databases or functionality - it's not a good fit here.
domain = self.request.get_host()
try:
assert domain in settings.ALLOWED_HOSTS # safety first!
except (AssertionError, DisallowedHost) as e:
logger.exception(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails because the request came from the wrong website. Don't translate {domain}.
_("{domain} is not an allowed host.").format(domain=domain),
)
raise PermissionDenied
# Try to capture the relevant page state, including desired destination
try:
request.session["get"] = request.GET
logger.info("Found get parameters for post-login redirection.")
except Exception as e:
logger.warning(e)
pass
# If the user has already logged in, let's not spam the OAuth provider.
if self.request.user.is_authenticated:
# We're using this twice. Not very DRY.
# Send user either to the destination specified in the 'next'
# parameter or to their own editor detail page.
try:
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(urlencode(request.session["get"]), mutable=True)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
# We don't need it here.
query_dict.pop("from_homepage", None)
# If there is anything left in the QueryDict after popping
# 'next', append it to the return url. This preserves state
# for filtered lists and redirected form submissions like
# the partner suggestion form.
if query_dict:
return_url += "&" + urlencode(query_dict)
logger.info(
"User is already authenticated. Sending them on "
'for post-login redirection per "next" parameter.'
)
except KeyError as e:
return_url = reverse_lazy("homepage")
logger.warning(e)
return HttpResponseRedirect(return_url)
# If the user isn't logged in
else:
# Get handshaker for the configured wiki oauth URL.
handshaker = _get_handshaker()
logger.info("handshaker gotten.")
try:
redirect, request_token = handshaker.initiate()
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This warning message is shown to users when OAuth handshaker can't be initiated.
_("Handshaker not initiated, please try logging in again."),
)
raise PermissionDenied
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(urlencode(request.session["get"]), mutable=True)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
from_homepage = query_dict.pop("from_homepage", None)
if from_homepage:
logger.info("Logging in from homepage, redirecting to Meta login")
local_redirect = _localize_oauth_redirect(redirect)
else:
logger.info(
"Trying to access a link while not logged in, redirecting to homepage"
)
messages.add_message(
request,
messages.INFO,
# fmt: off
# Translators: this message is displayed to users that don't have accounts and clicked on a proxied link.
_("To view this link you need to be an eligible library user. Please login to continue."),
# fmt: on
)
if return_url:
homepage = reverse_lazy("homepage")
local_redirect = "{homepage}?next_url={return_url}".format(
homepage=homepage, return_url=return_url
)
else:
local_redirect = reverse_lazy("homepage")
logger.info("handshaker initiated.")
self.request.session["request_token"] = _dehydrate_token(request_token)
return HttpResponseRedirect(local_redirect)
class OAuthCallbackView(View):
"""
Receive the redirect from Wikipedia and parse the response token.
"""
def get(self, request, *args, **kwargs):
request_meta_qs = request.META["QUERY_STRING"]
request_get = request.GET
response_qs = None
if request_meta_qs:
response_qs = request_meta_qs
elif "oauth_token" in request_get and "oauth_verifier" in request_get:
response_qs = request_get.urlencode()
try:
response_qs_parsed = urllib.parse.parse_qs(response_qs)
assert "oauth_token" in response_qs_parsed
assert "oauth_verifier" in response_qs_parsed
except (AssertionError, TypeError) as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This warning message is shown to users when the response received from Wikimedia OAuth servers is not a valid one.
_("Did not receive a valid oauth response."),
)
raise PermissionDenied
# Get the handshaker. It should have already been constructed by
# OAuthInitializeView.
domain = self.request.get_host()
try:
assert domain in settings.ALLOWED_HOSTS
except (AssertionError, DisallowedHost) as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails because the request came from the wrong website. Don't translate {domain}.
_("{domain} is not an allowed host.").format(domain=domain),
)
raise PermissionDenied
try:
handshaker = _get_handshaker()
except AssertionError as e:
# get_handshaker will throw AssertionErrors for invalid data.
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("Could not find handshaker."),
)
raise PermissionDenied
# Get the session token placed by OAuthInitializeView.
session_token = request.session.pop("request_token", None)
if not session_token:
logger.info("No session token.")
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("No session token."),
)
raise PermissionDenied
# Rehydrate it into a request token.
request_token = _rehydrate_token(session_token)
if not request_token:
logger.warning("No request token.")
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("No request token."),
)
raise PermissionDenied
# See if we can complete the OAuth process.
try:
access_token = handshaker.complete(request_token, response_qs)
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("Access token generation failed."),
)
raise PermissionDenied
user = authenticate(
request=request, access_token=access_token, handshaker=handshaker
)
created = request.session.pop("user_created", False)
if user and not user.is_active:
# Do NOT log in the user.
if created:
messages.add_message(
request,
messages.WARNING,
# fmt: off
# Translators: If the user tries to log in, but their account does not meet certain requirements, they cannot login.
_("Your Wikipedia account does not meet the eligibility criteria in the terms of use, so your Wikipedia Library Card Platform account cannot be activated."),
# fmt: on
)
else:
messages.add_message(
request,
messages.WARNING,
# fmt: off
# Translators: If the user tries to log in, but their account does not meet certain requirements, they cannot login.
_("Your Wikipedia account no longer meets the eligibility criteria in the terms of use, so you cannot be logged in. If you think you should be able to log in, please email wikipedialibrary@wikimedia.org."),
# fmt: on
)
return_url = reverse_lazy("terms")
elif user:
login(request, user)
if created:
messages.add_message(
request,
messages.INFO,
# Translators: this message is displayed to users with brand new accounts.
_("Welcome! Please agree to the terms of use."),
)
return_url = reverse_lazy("terms")
else:
# We're using this twice. Not very DRY.
# Send user either to the destination specified in the 'next'
# parameter or to their own editor detail page.
if user.userprofile.terms_of_use:
try:
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(
urlencode(request.session["get"]), mutable=True
)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
# We don't need it here.
query_dict.pop("from_homepage", None)
# If there is anything left in the QueryDict after popping
# 'next', append it to the return url. This preserves state
# for filtered lists and redirected form submissions like
# the partner suggestion form.
if query_dict:
return_url += "&" + urlencode(query_dict)
logger.info(
"User authenticated. Sending them on for "
'post-login redirection per "next" parameter.'
)
except KeyError as e:
return_url = reverse_lazy("homepage")
logger.warning(e)
else:
return_url = reverse_lazy("terms")
else:
return_url = reverse_lazy("homepage")
return HttpResponseRedirect(return_url)
|
"""
@author: Tobias
"""
"""@brief List of register classes"""
_registerClasses = [
['al', 'ah', 'ax', 'eax', 'rax'],
['bl', 'bh', 'bx', 'ebx', 'rbx'],
['cl', 'ch', 'cx', 'ecx', 'rcx'],
['dl', 'dh', 'dx', 'edx', 'rdx'],
['bpl', 'bp', 'ebp', 'rbp'],
['dil', 'di', 'edi', 'rdi'],
['sil', 'si', 'esi', 'rsi'],
['spl', 'sp', 'esp', 'rsp'],
['r8l', 'r8w', 'r8d', 'r8'],
['r9l', 'r9w', 'r9d', 'r9'],
['r10l', 'r10w', 'r10d', 'r10'],
['r11l', 'r11w', 'r11d', 'r11'],
['r12l', 'r12w', 'r12d', 'r12'],
['r13l', 'r13w', 'r13d', 'r13'],
['r14l', 'r14w', 'r14d', 'r14'],
['r15l', 'r15w', 'r15d', 'r15']
]
def get_reg_class(reg):
"""
@brief Determines the register class of a given reg.
All different register names that address the same register
belong to the same register class e.g.: 'ax' and 'eax'
@param reg name of register
@return register class
"""
lreg = reg.lower()
ret_value = None
for pos, reg_list in enumerate(_registerClasses):
for reg in reg_list:
found = False
if reg == lreg:
found = True
ret_value = pos
break
if found:
break
return ret_value
def get_reg_by_size(reg_class, reg_size):
"""
@brief Determines the register by its size and class
@param reg_class The register class of the register
@param reg_size The size of the register
@return Name of the register
"""
if reg_class >= len(_registerClasses):
return None
num_regs = len(_registerClasses[reg_class])
if num_regs < 4:
return None
reg_index = -1
if reg_size > 32: # 64-bit regs
reg_index = num_regs - 1
elif reg_size > 16: # 32-bit regs
reg_index = num_regs - 2
elif reg_size > 8: # 16-bit regs
reg_index = num_regs - 3
elif reg_size > 0: # 8-bit regs
reg_index = 0
else:
return None
return _registerClasses[reg_class][reg_index]
def get_size_by_reg(reg):
"""
@brief Determines the size of the given register
@param reg Register
@return Size of register
"""
reg_class = get_reg_class(reg)
num_regs = len(_registerClasses[reg_class])
for index, test_reg in enumerate(_registerClasses[reg_class]):
if test_reg == reg:
break
else: # no break
return None
if index == (num_regs-1):
return 64
elif index == (num_regs-2):
return 32
elif index == (num_regs-3):
return 16
else:
return 8
def get_reg_class_lst(reg_class):
"""
@return Returns the whole list of a given register class
"""
return _registerClasses[reg_class]
|
"""Remove brief status column
Revision ID: 590
Revises: 580
Create Date: 2016-03-03 14:56:59.218753
"""
revision = '590'
down_revision = '580'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('briefs', 'status')
def downgrade():
op.add_column('briefs', sa.Column('status', sa.VARCHAR(), autoincrement=False, nullable=True))
op.execute("""
UPDATE briefs SET status = (CASE WHEN published_at is not NULL THEN 'live' ELSE 'draft' END)
""")
op.alter_column('briefs', sa.Column('status', sa.VARCHAR(), nullable=False))
|
class Penguin(object):
def __init__(self, name, mood, id=None):
self.name = name
self.mood = mood
self.id = id
def __repr__(self):
return '< %s the %s penguin >' % (self.name, self.mood)
class Goose(object):
def __init__(self, name, favorite_penguin, id=None):
self.name = name
self.favorite_penguin = favorite_penguin
self.id = id
def __repr__(self):
template = '< %s, the goose that likes %s >'
return template % (self.name, repr(self.favorite_penguin))
|
from unittest import TestCase
from firstinbattle.deck import Card
from firstinbattle.json_util import js
class TestJson(TestCase):
def test_encode_loads(self):
cards = {
Card(5, 'diamond'),
Card(9, 'heart'),
}
encoded_str = js.encode({
'message': 'test_msg',
'cards': cards,
})
decoded_obj = js.loads(encoded_str)
self.assertEqual(decoded_obj['message'], 'test_msg')
for card in cards:
self.assertIn(
{'number': card.number, 'suit': card.suit},
decoded_obj['cards']
)
|
from collections import OrderedDict
n = int(input())
occurrences = OrderedDict()
for _ in range(0, n):
word = input().strip()
occurrences[word] = occurrences.get(word, 0) + 1
print(len(occurrences))
print(sep=' ', *[count for _, count in occurrences.items()])
|
from distutils.core import setup
setup(
name = 'ical_dict',
packages = ['ical_dict'],
version = '0.2',
description = 'A Python library to convert an .ics file into a Dictionary object.',
author = 'Jay Ravaliya',
author_email = 'jayrav13@gmail.com',
url = 'https://github.com/jayrav13/ical_dict',
download_url = 'https://github.com/jayrav13/ical_dict/tarball/0.2',
keywords = ['calendar', 'ical', 'ics', 'json', 'dictionary', 'python'],
classifiers = [],
)
|
import os, pygame
x = 320
y = 200
size_mult = 4
bright_mult = 4
pygame.init()
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
screen = pygame.display.set_mode((x*size_mult, y*size_mult))
screen2 = pygame.Surface((x,y))
|
from twisted.internet import defer
from nodeset.common import log
from nodeset.core import config
class Observer(object):
def __init__(self, callable, *args, **kwargs):
self.callable = callable
self.args = args
self.kwargs = kwargs
#print "-- %s, %s" % (self.args, self.kwargs)
self.assertfunc = lambda x: True
def setAssert(self, assertfunc):
self.assertfunc = assertfunc
def run(self, *args, **kwargs):
a = tuple(list(args) + list(self.args))
kw = dict(kwargs.items() + self.kwargs.items())
return self.callable(*a, **kw)
class ObserverCarousel(object):
def twist(self, observers, eventDict):
defers = []
if config.Configurator['verbose']:
log.msg("twist carousel %s, %s" % (observers, eventDict))
for i in observers:
defers.append(defer.maybeDeferred(i.run, eventDict))
return defers
|
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LocalNetworkGatewaysOperations(object):
"""LocalNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.LocalNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'LocalNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.LocalNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.LocalNetworkGateway"]
"""Creates or updates a local network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:param parameters: Parameters supplied to the create or update local network gateway operation.
:type parameters: ~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either LocalNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
"""Gets the specified local network gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LocalNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified local network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.LocalNetworkGateway"]
"""Updates a local network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:param parameters: Parameters supplied to update local network gateway tags.
:type parameters: ~azure.mgmt.network.v2018_01_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either LocalNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LocalNetworkGatewayListResult"]
"""Gets all the local network gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LocalNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LocalNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways'} # type: ignore
|
import sys
sys.path.insert(0, '../../build/swig/python')
import cueify
import struct
import unittest
def TRACK_DESCRIPTOR(session, adr, ctrl, track,
abs_min, abs_sec, abs_frm, min, sec, frm):
return [session, (((adr & 0xF) << 4) | (ctrl & 0xF)), 0, track,
abs_min, abs_sec, abs_frm, 0, min, sec, frm]
serialized_mock_full_toc = [(((13 + 2 * 3) * 11 + 2) >> 8),
(((13 + 2 * 3) * 11 + 2) & 0xFF), 1, 2]
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA0, 0, 0, 0, 1, cueify.SESSION_MODE_1, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA1, 0, 0, 0, 12, 0, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA2, 0, 0, 0, 51, 44, 26))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 1, 0, 0, 0, 0, 2, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 2, 0, 0, 0, 4, 47, 70))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 3, 0, 0, 0, 7, 42, 57))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 4, 0, 0, 0, 13, 47, 28))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 5, 0, 0, 0, 18, 28, 50))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 6, 0, 0, 0, 21, 56, 70))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 7, 0, 0, 0, 24, 56, 74))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 8, 0, 0, 0, 30, 10, 55))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 9, 0, 0, 0, 34, 17, 20))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 10, 0, 0, 0, 39, 18, 66))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 11, 0, 0, 0, 43, 16, 40))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 12, 0, 0, 0, 47, 27, 61))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA0, 0, 0, 0, 13, cueify.SESSION_MODE_2, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA1, 0, 0, 0, 13, 0, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA2, 0, 0, 0, 57, 35, 13))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 13, 1, 2, 3, 54, 16, 26))
class TestFullTOCFunctions(unittest.TestCase):
def test_serialization(self):
# Test both deserialization and serialization (since, unlike
# in the C code, the Python library does not support directly
# specifying the mock TOC.
full_toc = cueify.FullTOC()
self.assertTrue(
full_toc.deserialize(
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc)))
s = full_toc.serialize()
self.assertEqual(full_toc.errorCode, cueify.OK)
self.assertEqual(len(s), len(serialized_mock_full_toc))
self.assertEqual(
s,
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc))
def test_getters(self):
full_toc = cueify.FullTOC()
self.assertTrue(
full_toc.deserialize(
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc)))
self.assertEqual(full_toc.firstSession, 1)
self.assertEqual(full_toc.lastSession, 2)
self.assertEqual(len(full_toc.tracks), 13)
self.assertEqual(full_toc.tracks[0].session, 1)
self.assertEqual(full_toc.tracks[12].session, 2)
self.assertEqual(full_toc.tracks[0].controlFlags, 4)
self.assertEqual(full_toc.tracks[12].controlFlags, 6)
self.assertEqual(full_toc.tracks[0].subQChannelFormat, 1)
self.assertEqual(full_toc.tracks[12].subQChannelFormat, 1)
self.assertEqual(len(full_toc.sessions), 2)
self.assertEqual(len(full_toc.sessions[0].pseudotracks), 3)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_FIRST_TRACK_PSEUDOTRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LAST_TRACK_PSEUDOTRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].controlFlags, 6)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_FIRST_TRACK_PSEUDOTRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LAST_TRACK_PSEUDOTRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.tracks[0].pointAddress.min, 0)
self.assertEqual(full_toc.tracks[0].pointAddress.sec, 0)
self.assertEqual(full_toc.tracks[0].pointAddress.frm, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.min, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.sec, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.frm, 0)
self.assertEqual(full_toc.tracks[12].pointAddress.min, 1)
self.assertEqual(full_toc.tracks[12].pointAddress.sec, 2)
self.assertEqual(full_toc.tracks[12].pointAddress.frm, 3)
self.assertEqual(full_toc.tracks[0].address.min, 0)
self.assertEqual(full_toc.tracks[0].address.sec, 2)
self.assertEqual(full_toc.tracks[0].address.frm, 0)
self.assertEqual(full_toc.tracks[12].address.min, 54)
self.assertEqual(full_toc.tracks[12].address.sec, 16)
self.assertEqual(full_toc.tracks[12].address.frm, 26)
self.assertEqual(full_toc.sessions[0].firstTrack, 1)
self.assertEqual(full_toc.sessions[1].firstTrack, 13)
self.assertEqual(full_toc.sessions[0].lastTrack, 12)
self.assertEqual(full_toc.sessions[1].lastTrack, 13)
self.assertEqual(full_toc.firstTrack, 1)
self.assertEqual(full_toc.lastTrack, 13)
self.assertEqual(full_toc.sessions[0].type, cueify.SESSION_MODE_1)
self.assertEqual(full_toc.sessions[1].type, cueify.SESSION_MODE_2)
self.assertEqual(full_toc.sessions[1].leadoutAddress.min, 57)
self.assertEqual(full_toc.sessions[1].leadoutAddress.sec, 35)
self.assertEqual(full_toc.sessions[1].leadoutAddress.frm, 13)
self.assertEqual(full_toc.discLength.min, 57)
self.assertEqual(full_toc.discLength.sec, 35)
self.assertEqual(full_toc.discLength.frm, 13)
self.assertEqual(full_toc.tracks[11].length.min, 4)
self.assertEqual(full_toc.tracks[11].length.sec, 16)
self.assertEqual(full_toc.tracks[11].length.frm, 40)
self.assertEqual(full_toc.sessions[1].length.min, 3)
self.assertEqual(full_toc.sessions[1].length.sec, 18)
self.assertEqual(full_toc.sessions[1].length.frm, 62)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, unicode_literals
from qproject.celery import app as celery_app
__all__ = ['celery_app']
|
from mqtt_as import MQTTClient, config
from config import wifi_led, blue_led
import uasyncio as asyncio
import network
import gc
TOPIC = 'shed' # For demo publication and last will use same topic
outages = 0
rssi = -199 # Effectively zero signal in dB.
async def pulse(): # This demo pulses blue LED each time a subscribed msg arrives.
blue_led(True)
await asyncio.sleep(1)
blue_led(False)
def sub_cb(topic, msg, retained):
print((topic, msg))
asyncio.create_task(pulse())
async def get_rssi():
global rssi
s = network.WLAN()
ssid = config['ssid'].encode('UTF8')
while True:
try:
rssi = [x[3] for x in s.scan() if x[0] == ssid][0]
except IndexError: # ssid not found.
rssi = -199
await asyncio.sleep(30)
async def wifi_han(state):
global outages
wifi_led(not state) # Light LED when WiFi down
if state:
print('We are connected to broker.')
else:
outages += 1
print('WiFi or broker is down.')
await asyncio.sleep(1)
async def conn_han(client):
await client.subscribe('foo_topic', 1)
async def main(client):
try:
await client.connect()
except OSError:
print('Connection failed.')
return
n = 0
s = '{} repubs: {} outages: {} rssi: {}dB free: {}bytes'
while True:
await asyncio.sleep(5)
gc.collect()
m = gc.mem_free()
print('publish', n)
# If WiFi is down the following will pause for the duration.
await client.publish(TOPIC, s.format(n, client.REPUB_COUNT, outages, rssi, m), qos = 1)
n += 1
config['subs_cb'] = sub_cb
config['wifi_coro'] = wifi_han
config['will'] = (TOPIC, 'Goodbye cruel world!', False, 0)
config['connect_coro'] = conn_han
config['keepalive'] = 120
MQTTClient.DEBUG = True
client = MQTTClient(config)
asyncio.create_task(get_rssi())
try:
asyncio.run(main(client))
finally: # Prevent LmacRxBlk:1 errors.
client.close()
blue_led(True)
asyncio.new_event_loop()
|
import subprocess
import os
def start_service():
subprocess.Popen("ipy start_srv.py", stdout=subprocess.PIPE)
return 0
def close_service():
os.system("taskkill /im ipy.exe /f")
|
from openslides.core.config import config
from openslides.motions.exceptions import WorkflowError
from openslides.motions.models import Motion, State, Workflow
from openslides.users.models import User
from openslides.utils.test import TestCase
class ModelTest(TestCase):
def setUp(self):
self.motion = Motion.objects.create(title='v1')
self.test_user = User.objects.create(username='blub')
# Use the simple workflow
self.workflow = Workflow.objects.get(pk=1)
def test_create_new_version(self):
motion = self.motion
self.assertEqual(motion.versions.count(), 1)
# new data, but no new version
motion.title = 'new title'
motion.save()
self.assertEqual(motion.versions.count(), 1)
# new data and new version
motion.text = 'new text'
motion.save(use_version=motion.get_new_version())
self.assertEqual(motion.versions.count(), 2)
self.assertEqual(motion.title, 'new title')
self.assertEqual(motion.text, 'new text')
def test_version_data(self):
motion = Motion()
self.assertEqual(motion.title, '')
with self.assertRaises(AttributeError):
self._title
motion.title = 'title'
self.assertEqual(motion._title, 'title')
motion.text = 'text'
self.assertEqual(motion._text, 'text')
motion.reason = 'reason'
self.assertEqual(motion._reason, 'reason')
def test_version(self):
motion = self.motion
motion.title = 'v2'
motion.save(use_version=motion.get_new_version())
motion.title = 'v3'
motion.save(use_version=motion.get_new_version())
with self.assertRaises(AttributeError):
self._title
self.assertEqual(motion.title, 'v3')
def test_supporter(self):
self.assertFalse(self.motion.is_supporter(self.test_user))
self.motion.supporters.add(self.test_user)
self.assertTrue(self.motion.is_supporter(self.test_user))
self.motion.supporters.remove(self.test_user)
self.assertFalse(self.motion.is_supporter(self.test_user))
def test_state(self):
self.motion.reset_state()
self.assertEqual(self.motion.state.name, 'submitted')
self.motion.state = State.objects.get(pk=5)
self.assertEqual(self.motion.state.name, 'published')
with self.assertRaises(WorkflowError):
self.motion.create_poll()
self.motion.state = State.objects.get(pk=6)
self.assertEqual(self.motion.state.name, 'permitted')
self.assertEqual(self.motion.state.get_action_word(), 'Permit')
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['support'])
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['unsupport'])
def test_new_states_or_workflows(self):
workflow_1 = Workflow.objects.create(name='W1')
state_1 = State.objects.create(name='S1', workflow=workflow_1)
workflow_1.first_state = state_1
workflow_1.save()
workflow_2 = Workflow.objects.create(name='W2')
state_2 = State.objects.create(name='S2', workflow=workflow_2)
workflow_2.first_state = state_2
workflow_2.save()
state_3 = State.objects.create(name='S3', workflow=workflow_1)
with self.assertRaises(WorkflowError):
workflow_2.first_state = state_3
workflow_2.save()
with self.assertRaises(WorkflowError):
state_1.next_states.add(state_2)
state_1.save()
def test_two_empty_identifiers(self):
Motion.objects.create(title='foo', text='bar', identifier='')
Motion.objects.create(title='foo2', text='bar2', identifier='')
def test_do_not_create_new_version_when_permit_old_version(self):
motion = Motion()
motion.title = 'foo'
motion.text = 'bar'
motion.save()
first_version = motion.get_last_version()
motion = Motion.objects.get(pk=motion.pk)
motion.title = 'New Title'
motion.save(use_version=motion.get_new_version())
new_version = motion.get_last_version()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = new_version
motion.save()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = first_version
motion.save(use_version=False)
self.assertEqual(motion.versions.count(), 2)
def test_unicode_with_no_active_version(self):
motion = Motion.objects.create(
title='test_title_Koowoh1ISheemeey1air',
text='test_text_zieFohph0doChi1Uiyoh',
identifier='test_identifier_VohT1hu9uhiSh6ooVBFS')
motion.active_version = None
motion.save(update_fields=['active_version'])
# motion.__unicode__() raised an AttributeError
self.assertEqual(str(motion), 'test_title_Koowoh1ISheemeey1air')
def test_is_amendment(self):
config['motions_amendments_enabled'] = True
amendment = Motion.objects.create(title='amendment', parent=self.motion)
self.assertTrue(amendment.is_amendment())
self.assertFalse(self.motion.is_amendment())
def test_set_identifier_allready_set(self):
"""
If the motion already has a identifier, the method does nothing.
"""
motion = Motion(identifier='My test identifier')
motion.set_identifier()
self.assertEqual(motion.identifier, 'My test identifier')
def test_set_identifier_manually(self):
"""
If the config is set to manually, the method does nothing.
"""
config['motions_identifier'] = 'manually'
motion = Motion()
motion.set_identifier()
# If the identifier should be set manually, the method does nothing
self.assertIsNone(motion.identifier)
def test_set_identifier_amendment(self):
"""
If the motion is an amendment, the identifier is the identifier from the
parent + a suffix.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 1')
def test_set_identifier_second_amendment(self):
"""
If a motion has already an amendment, the second motion gets another
identifier.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
Motion.objects.create(title='Amendment1', parent=self.motion)
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 2')
class ConfigTest(TestCase):
def test_stop_submitting(self):
self.assertFalse(config['motions_stop_submitting'])
|
>>> myTuple = (1, 2, 3)
>>> myTuple[1]
2
>>> myTuple[1:3]
(2, 3)
|
"""
eve.methods.post
~~~~~~~~~~~~~~~~
This module imlements the POST method, supported by the resources
endopints.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app as app, abort
from eve.utils import config, parse_request, debug_error_message
from eve.auth import requires_auth
from eve.defaults import resolve_default_values
from eve.validation import ValidationError
from eve.methods.common import parse, payload, ratelimit, \
pre_event, store_media_files, resolve_user_restricted_access, \
resolve_embedded_fields, build_response_document, marshal_write_response, \
resolve_sub_resource_path, resolve_document_etag, oplog_push
from eve.versioning import resolve_document_version, \
insert_versioning_documents
@ratelimit()
@requires_auth('resource')
@pre_event
def post(resource, payl=None):
"""
Default function for handling POST requests, it has decorators for
rate limiting, authentication and for raising pre-request events. After the
decorators are applied forwards to call to :func:`post_internal`
.. versionchanged:: 0.5
Split original post() into post/post_internal combo.
"""
return post_internal(resource, payl, skip_validation=False)
def post_internal(resource, payl=None, skip_validation=False):
"""
Intended for internal post calls, this method is not rate limited,
authentication is not checked and pre-request events are not raised.
Adds one or more documents to a resource. Each document is validated
against the domain schema. If validation passes the document is inserted
and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the
document are returned. If validation fails, a list of validation issues
is returned.
:param resource: name of the resource involved.
:param payl: alternative payload. When calling post() from your own code
you can provide an alternative payload. This can be useful,
for example, when you have a callback function hooked to a
certain endpoint, and want to perform additional post() calls
from there.
Please be advised that in order to successfully use this
option, a request context must be available.
See https://github.com/nicolaiarocci/eve/issues/74 for a
discussion, and a typical use case.
:param skip_validation: skip payload validation before write (bool)
.. versionchanged:: 0.6
Fix: since v0.6, skip_validation = True causes a 422 response (#726).
.. versionchanged:: 0.6
Initialize DELETED field when soft_delete is enabled.
.. versionchanged:: 0.5
Back to resolving default values after validaton as now the validator
can properly validate dependency even when some have default values. See
#353.
Push updates to the OpLog.
Original post() has been split into post() and post_internal().
ETAGS are now stored with documents (#369).
.. versionchanged:: 0.4
Resolve default values before validation is performed. See #353.
Support for document versioning.
.. versionchanged:: 0.3
Return 201 if at least one document has been successfully inserted.
Fix #231 auth field not set if resource level authentication is set.
Support for media fields.
When IF_MATCH is disabled, no etag is included in the payload.
Support for new validation format introduced with Cerberus v0.5.
.. versionchanged:: 0.2
Use the new STATUS setting.
Use the new ISSUES setting.
Raise 'on_pre_<method>' event.
Explictly resolve default values instead of letting them be resolved
by common.parse. This avoids a validation error when a read-only field
also has a default value.
Added ``on_inserted*`` events after the database insert
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
More robust handling of auth_field.
Support for optional HATEOAS.
.. versionchanged: 0.0.9
Event hooks renamed to be more robuts and consistent: 'on_posting'
renamed to 'on_insert'.
You can now pass a pre-defined custom payload to the funcion.
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
.. versionchanged: 0.0.7
Support for Rate-Limiting.
Support for 'extra_response_fields'.
'on_posting' and 'on_posting_<resource>' events are raised before the
documents are inserted into the database. This allows callback functions
to arbitrarily edit/update the documents being stored.
.. versionchanged:: 0.0.6
Support for bulk inserts.
Please note: validation constraints are checked against the database,
and not between the payload documents themselves. This causes an
interesting corner case: in the event of a multiple documents payload
where two or more documents carry the same value for a field where the
'unique' constraint is set, the payload will validate successfully, as
there are no duplicates in the database (yet). If this is an issue, the
client can always send the documents once at a time for insertion, or
validate locally before submitting the payload to the API.
.. versionchanged:: 0.0.5
Support for 'application/json' Content-Type .
Support for 'user-restricted resource access'.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionchanged:: 0.0.3
JSON links. Superflous ``response`` container removed.
"""
date_utc = datetime.utcnow().replace(microsecond=0)
resource_def = app.config['DOMAIN'][resource]
schema = resource_def['schema']
validator = None if skip_validation else app.validator(schema, resource)
documents = []
results = []
failures = 0
if config.BANDWIDTH_SAVER is True:
embedded_fields = []
else:
req = parse_request(resource)
embedded_fields = resolve_embedded_fields(resource, req)
# validation, and additional fields
if payl is None:
payl = payload()
# print "\n\ninside eve post\n\n***************************************"
# print embedded_fields
# print "payl "
# print payl
'''
Added by : LHearen
E-mail : LHearen@126.com
Description: Used to construct our own RESTful interfaces - but the extra
items should not be stored in DB;
'''
if "_id" in payl:
payl["_id"] = '27167fe7-fc9d-47d5-9cd0-717106ef67be'
if "Module" in payl:
del payl["Module"]
if "Method" in payl:
del payl["Method"]
# print "payl "
# print payl
# print "resource "
# print resource
# print "\n\nend here"
if isinstance(payl, dict):
payl = [payl]
if not payl:
# empty bulkd insert
abort(400, description=debug_error_message(
'Empty bulk insert'
))
if len(payl) > 1 and not config.DOMAIN[resource]['bulk_enabled']:
abort(400, description=debug_error_message(
'Bulk insert not allowed'
))
for value in payl:
document = []
doc_issues = {}
try:
document = parse(value, resource)
resolve_sub_resource_path(document, resource)
if skip_validation:
validation = True
else:
validation = validator.validate(document)
if validation: # validation is successful
# validator might be not available if skip_validation. #726.
if validator:
# Apply coerced values
document = validator.document
# Populate meta and default fields
document[config.LAST_UPDATED] = \
document[config.DATE_CREATED] = date_utc
if config.DOMAIN[resource]['soft_delete'] is True:
document[config.DELETED] = False
resolve_user_restricted_access(document, resource)
resolve_default_values(document, resource_def['defaults'])
store_media_files(document, resource)
resolve_document_version(document, resource, 'POST')
else:
# validation errors added to list of document issues
doc_issues = validator.errors
except ValidationError as e:
doc_issues['validation exception'] = str(e)
except Exception as e:
# most likely a problem with the incoming payload, report back to
# the client as if it was a validation issue
app.logger.exception(e)
doc_issues['exception'] = str(e)
if len(doc_issues):
document = {
config.STATUS: config.STATUS_ERR,
config.ISSUES: doc_issues,
}
failures += 1
documents.append(document)
if failures:
# If at least one document got issues, the whole request fails and a
# ``422 Bad Request`` status is return.
for document in documents:
if config.STATUS in document \
and document[config.STATUS] == config.STATUS_ERR:
results.append(document)
else:
results.append({config.STATUS: config.STATUS_OK})
return_code = config.VALIDATION_ERROR_STATUS
else:
# notify callbacks
getattr(app, "on_insert")(resource, documents)
getattr(app, "on_insert_%s" % resource)(documents)
# compute etags here as documents might have been updated by callbacks.
resolve_document_etag(documents, resource)
# bulk insert
ids = app.data.insert(resource, documents)
# update oplog if needed
oplog_push(resource, documents, 'POST')
# assign document ids
for document in documents:
# either return the custom ID_FIELD or the id returned by
# data.insert().
document[resource_def['id_field']] = \
document.get(resource_def['id_field'], ids.pop(0))
# build the full response document
result = document
build_response_document(
result, resource, embedded_fields, document)
# add extra write meta data
result[config.STATUS] = config.STATUS_OK
# limit what actually gets sent to minimize bandwidth usage
result = marshal_write_response(result, resource)
results.append(result)
# insert versioning docs
insert_versioning_documents(resource, documents)
# notify callbacks
getattr(app, "on_inserted")(resource, documents)
getattr(app, "on_inserted_%s" % resource)(documents)
# request was received and accepted; at least one document passed
# validation and was accepted for insertion.
return_code = 201
if len(results) == 1:
response = results.pop(0)
else:
response = {
config.STATUS: config.STATUS_ERR if failures else config.STATUS_OK,
config.ITEMS: results,
}
if failures:
response[config.ERROR] = {
"code": return_code,
"message": "Insertion failure: %d document(s) contain(s) error(s)"
% failures,
}
print "now we're inside post.py, before customizing response"
print response
for key in response.keys():
if key != "_id":
del response[key]
print 'final response'
print response
return response, None, None, return_code
|
from __future__ import unicode_literals
import collections
import hashlib
import logging
import requests
from wxpy.api.messages import Message
from wxpy.ext.talk_bot_utils import get_context_user_id, next_topic
from wxpy.utils.misc import get_text_without_at_bot
from wxpy.utils import enhance_connection
logger = logging.getLogger(__name__)
from wxpy.compatible import *
class XiaoI(object):
"""
与 wxpy 深度整合的小 i 机器人
"""
# noinspection SpellCheckingInspection
def __init__(self, key, secret):
"""
| 需要通过注册获得 key 和 secret
| 免费申请: http://cloud.xiaoi.com/
:param key: 你申请的 key
:param secret: 你申请的 secret
"""
self.key = key
self.secret = secret
self.realm = "xiaoi.com"
self.http_method = "POST"
self.uri = "/ask.do"
self.url = "http://nlp.xiaoi.com/ask.do?platform=custom"
xauth = self._make_http_header_xauth()
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain",
}
headers.update(xauth)
self.session = requests.Session()
self.session.headers.update(headers)
enhance_connection(self.session)
def _make_signature(self):
"""
生成请求签名
"""
# 40位随机字符
# nonce = "".join([str(randint(0, 9)) for _ in range(40)])
nonce = "4103657107305326101203516108016101205331"
sha1 = "{0}:{1}:{2}".format(self.key, self.realm, self.secret).encode("utf-8")
sha1 = hashlib.sha1(sha1).hexdigest()
sha2 = "{0}:{1}".format(self.http_method, self.uri).encode("utf-8")
sha2 = hashlib.sha1(sha2).hexdigest()
signature = "{0}:{1}:{2}".format(sha1, nonce, sha2).encode("utf-8")
signature = hashlib.sha1(signature).hexdigest()
ret = collections.namedtuple("signature_return", "signature nonce")
ret.signature = signature
ret.nonce = nonce
return ret
def _make_http_header_xauth(self):
"""
生成请求认证
"""
sign = self._make_signature()
ret = {
"X-Auth": "app_key=\"{0}\",nonce=\"{1}\",signature=\"{2}\"".format(
self.key, sign.nonce, sign.signature)
}
return ret
def do_reply(self, msg):
"""
回复消息,并返回答复文本
:param msg: Message 对象
:return: 答复文本
"""
ret = self.reply_text(msg)
msg.reply(ret)
return ret
def reply_text(self, msg):
"""
仅返回答复文本
:param msg: Message 对象,或消息文本
:return: 答复文本
"""
error_response = (
"主人还没给我设置这类话题的回复",
)
if isinstance(msg, Message):
user_id = get_context_user_id(msg)
question = get_text_without_at_bot(msg)
else:
user_id = "abc"
question = msg or ""
params = {
"question": question,
"format": "json",
"platform": "custom",
"userId": user_id,
}
resp = self.session.post(self.url, data=params)
text = resp.text
for err in error_response:
if err in text:
return next_topic()
return text
|
"""
Sales module URLs
"""
from django.conf.urls.defaults import *
urlpatterns = patterns('maker.sales.views',
url(r'^(\.(?P<response_format>\w+))?$', 'index', name='sales'),
url(r'^index(\.(?P<response_format>\w+))?/?$', 'index', name='sales_index'),
url(r'^index/open(\.(?P<response_format>\w+))?/?$', 'index_open', name='sales_index_open'),
url(r'^index/assigned(\.(?P<response_format>\w+))?/?$',
'index_assigned', name='sales_index_assigned'),
# Orders
url(r'^order/add(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add'),
url(r'^order/add/lead/(?P<lead_id>\w+)(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add_with_lead'),
url(r'^order/add/opportunity/(?P<opportunity_id>\w+)(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add_with_opportunity'),
url(r'^order/edit/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_edit', name='sales_order_edit'),
url(r'^order/view/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_view', name='sales_order_view'),
url(r'^order/invoice/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_invoice_view', name='sales_order_invoice_view'),
url(r'^order/delete/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_delete', name='sales_order_delete'),
# Products
url(r'^product/index(\.(?P<response_format>\w+))?/?$',
'product_index', name='sales_product_index'),
url(r'^product/add/(?P<parent_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_add', name='sales_product_add'),
url(r'^product/add(\.(?P<response_format>\w+))?/?$',
'product_add', name='sales_product_add'),
url(r'^product/edit/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_edit', name='sales_product_edit'),
url(r'^product/view/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_view', name='sales_product_view'),
url(r'^product/delete/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_delete', name='sales_product_delete'),
# Settings
url(r'^settings/view(\.(?P<response_format>\w+))?/?$', 'settings_view', name='sales_settings_view'),
url(r'^settings/edit(\.(?P<response_format>\w+))?/?$', 'settings_edit', name='sales_settings_edit'),
# Statuses
url(r'^status/add(\.(?P<response_format>\w+))?/?$',
'status_add', name='sales_status_add'),
url(r'^status/edit/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_edit', name='sales_status_edit'),
url(r'^status/view/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_view', name='sales_status_view'),
url(r'^status/delete/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_delete', name='sales_status_delete'),
# Subscriptions
url(r'^subscription/add(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add'),
url(r'^subscription/add/order/(?P<order_id>\w+)/product/(?P<product_id>\w+)(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add_with_order_and_product'),
url(r'^subscription/add/(?P<productset_id>\w+)(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add_with_product'),
url(r'^subscription/edit/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_edit', name='sales_subscription_edit'),
url(r'^subscription/view/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_view', name='sales_subscription_view'),
url(r'^subscription/delete/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_delete', name='sales_subscription_delete'),
# Ordered Products
url(r'^ordered_product/add/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_add', name='sales_ordered_product_add'),
url(r'^ordered_product/edit/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_edit', name='sales_ordered_product_edit'),
url(r'^ordered_product/view/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_view', name='sales_ordered_product_view'),
url(r'^ordered_product/delete/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_delete', name='sales_ordered_product_delete'),
# Sources
url(r'^source/add(\.(?P<response_format>\w+))?/?$',
'source_add', name='sales_source_add'),
url(r'^source/edit/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_edit', name='sales_source_edit'),
url(r'^source/view/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_view', name='sales_source_view'),
url(r'^source/delete/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_delete', name='sales_source_delete'),
# Leads
url(r'^lead/index(\.(?P<response_format>\w+))?/?$',
'lead_index', name='sales_lead_index'),
url(r'^lead/index/assigned(\.(?P<response_format>\w+))?/?$',
'lead_index_assigned', name='sales_lead_index_assigned'),
url(r'^lead/add(\.(?P<response_format>\w+))?/?$',
'lead_add', name='sales_lead_add'),
url(r'^lead/edit/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_edit', name='sales_lead_edit'),
url(r'^lead/view/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_view', name='sales_lead_view'),
url(r'^lead/delete/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_delete', name='sales_lead_delete'),
# Opportunities
url(r'^opportunity/index(\.(?P<response_format>\w+))?/?$',
'opportunity_index', name='sales_opportunity_index'),
url(r'^opportunity/index/assigned(\.(?P<response_format>\w+))?/?$',
'opportunity_index_assigned', name='sales_opportunity_index_assigned'),
url(r'^opportunity/add(\.(?P<response_format>\w+))?/?$',
'opportunity_add', name='sales_opportunity_add'),
url(r'^opportunity/add/lead/(?P<lead_id>\w+)(\.(?P<response_format>\w+))?/?$',
'opportunity_add', name='sales_opportunity_add_with_lead'),
url(r'^opportunity/edit/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_edit', name='sales_opportunity_edit'),
url(r'^opportunity/view/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_view', name='sales_opportunity_view'),
url(r'^opportunity/delete/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_delete', name='sales_opportunity_delete'),
# AJAX lookups
url(r'^ajax/subscription(\.(?P<response_format>\w+))?/?$',
'ajax_subscription_lookup', name='sales_ajax_subscription_lookup'),
)
|
import os
import sys
try:
from lxml import etree
except ImportError:
import xml.etree.ElementTree as etree
from collections import defaultdict
from optparse import OptionParser
SVG_NS = 'http://www.w3.org/2000/svg'
START = 1
END = 2
class Line(object):
def __init__(self, line_element):
a = line_element.attrib
self.x1 = float(a['x1'])
self.y1 = float(a['y1'])
self.x2 = float(a['x2'])
self.y2 = float(a['y2'])
self.strokeWidth = float(a['stroke-width'])
def reverse(self):
self.x1, self.x2 = self.x2, self.x1
self.y1, self.y2 = self.y2, self.y1
def start_hash(self):
return str(self.x1) + ',' + str(self.y1)
def end_hash(self):
return str(self.x2) + ',' + str(self.y2)
def endpoint(self, direction):
if direction == START:
return self.start_hash()
else:
return self.end_hash()
def get_other_hash(self, key):
h = self.start_hash()
if h == key:
h = self.end_hash()
return h
def __repr__(self):
return '((%s,%s),(%s,%s),sw:%s)' % (self.x1, self.y1,
self.x2, self.y2,
self.strokeWidth)
class EndpointHash(object):
def __init__(self, lines):
self.endpoints = defaultdict(list)
for l in lines:
self.endpoints[l.start_hash()].append(l)
self.endpoints[l.end_hash()].append(l)
def count_overlapping_points(self):
count = 0
for key, lines in self.endpoints.iteritems():
l = len(lines)
if l > 1:
count += 1
return count
def _del_line(self, key, line):
self.endpoints[key].remove(line)
if len(self.endpoints[key]) == 0:
del self.endpoints[key]
def remove_line(self, line):
key = line.start_hash()
self._del_line(key, line)
self._del_line(line.get_other_hash(key), line)
def pop_connected_line(self, line, key):
if key in self.endpoints:
line = self.endpoints[key][0]
self.remove_line(line)
return line
else:
return
def parse_svg(fname):
print "Parsing '%s'..." % (fname)
return etree.parse(fname)
def get_lines(svg):
lines = []
for l in svg.getroot().iter('{%s}line' % SVG_NS):
lines.append(Line(l))
return lines
def align_lines(l1, l2):
if ( l1.x1 == l2.x1 and l1.y1 == l2.y1
or l1.x2 == l2.x2 and l1.y2 == l2.y2):
l2.reverse()
def connect_lines(lines, endpoint_hash, line, direction, poly):
while True:
key = line.endpoint(direction)
connected_line = endpoint_hash.pop_connected_line(line, key)
if connected_line:
if direction == START:
poly.insert(0, connected_line)
else:
poly.append(connected_line)
align_lines(line, connected_line)
lines.remove(connected_line)
line = connected_line
else:
break
def find_polylines(lines, endpoint_hash):
polylines = []
while lines:
line = lines.pop()
endpoint_hash.remove_line(line)
poly = [line]
connect_lines(lines, endpoint_hash, line, START, poly)
connect_lines(lines, endpoint_hash, line, END, poly)
polylines.append(poly)
return polylines
def optimize(svg):
lines = get_lines(svg)
print '%s line segments found' % len(lines)
lines_by_width = defaultdict(list)
for l in lines:
lines_by_width[l.strokeWidth].append(l)
del lines
print '%s different stroke widths found:' % len(lines_by_width)
for width, lines in lines_by_width.iteritems():
print ' strokeWidth: %s (%s lines)' % (width, len(lines))
polylines = []
for width, lines in lines_by_width.iteritems():
print 'Finding polylines (strokeWidth: %s)... ' % width
endpoint_hash = EndpointHash(lines)
overlapping_points = endpoint_hash.count_overlapping_points()
print (' %s line segments, %s overlapping points'
% (len(lines), overlapping_points)),
p = find_polylines(lines, endpoint_hash)
print '-> %s polylines' % len(p)
polylines += p
return polylines
def write_svg(polylines, outfile):
print "Writing '%s'..." % outfile
f = open(outfile, 'w')
f.write("""<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="100%" height="100%" xmlns="http://www.w3.org/2000/svg" version="1.1">
""")
def point_to_str(x, y):
return '%s,%s ' % (x, y)
for p in polylines:
points = []
for line in p:
if not points:
points.append(point_to_str(line.x1, line.y1))
points.append(point_to_str(line.x2, line.y2))
f.write('<polyline fill="none" stroke="#000" stroke-width="%s" points="%s"/>\n'
% (p[0].strokeWidth, ' '.join(points)))
f.write('</svg>\n')
f.close()
def get_filesize(fname):
return os.stat(fname).st_size
def print_size_stats(infile, outfile):
insize = get_filesize(infile)
outsize = get_filesize(outfile)
print ('Original file size: %.2fKiB, new file size: %.2fKiB (%.2f)'
% (insize / 1024., outsize / 1024., float(outsize) / insize * 100))
def main():
usage = 'Usage: %prog INFILE OUTFILE'
parser = OptionParser(usage=usage)
options, args = parser.parse_args()
if len(args) < 2:
parser.error('input and output files must be specified')
return 2
infile = args[0]
outfile = args[1]
svg = parse_svg(infile)
polylines = optimize(svg)
print '%s polyline(s) found in total' % len(polylines)
write_svg(polylines, outfile)
print_size_stats(infile, outfile)
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(1)
|
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^', include('symcon.urls')),
url(r'^pages/', include('django.contrib.flatpages.urls')),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += staticfiles_urlpatterns()
|
from django.contrib import admin
from django.contrib.auth.models import User
from .models import Stock, StockHistory, StockSelection, SectorHistory, StockNews
class CommonAdmin(admin.ModelAdmin):
date_hierarchy = 'pub_date'
class SectorAdmin(CommonAdmin):
list_display = ('Symbol', 'Sector', 'pub_date')
search_fields = ['Symbol', 'Sector']
class StockAdmin(CommonAdmin):
list_display = ('Symbol', 'Name','MarketCap', 'Catagory', 'pub_date')
search_fields = ['Symbol']
class StockRelativeAdmin(CommonAdmin):
def stock_info(obj):
return '{}, {}, {}, {}'.format(
obj.stock.Symbol,
obj.stock.Name,
obj.stock.MarketCap,
obj.stock.pub_date,
)
list_display = (stock_info, 'pub_date')
search_fields = ['stock__Symbol']
admin.site.register(Stock, StockAdmin)
admin.site.register(SectorHistory, SectorAdmin)
admin.site.register(StockHistory, StockRelativeAdmin)
admin.site.register(StockSelection, StockRelativeAdmin)
admin.site.register(StockNews, StockRelativeAdmin)
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('multiexplorer', '0006_pullhistory'),
]
operations = [
migrations.CreateModel(
name='PushHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_pushed', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.AddField(
model_name='memo',
name='signature',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='memo',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
"""
Jump Search
Find an element in a sorted array.
"""
import math
def jump_search(arr,target):
"""
Worst-case Complexity: O(√n) (root(n))
All items in list must be sorted like binary search
Find block that contains target value and search it linearly in that block
It returns a first target value in array
reference: https://en.wikipedia.org/wiki/Jump_search
"""
length = len(arr)
block_size = int(math.sqrt(length))
block_prev = 0
block= block_size
# return -1 means that array doesn't contain target value
# find block that contains target value
if arr[length - 1] < target:
return -1
while block <= length and arr[block - 1] < target:
block_prev = block
block += block_size
# find target value in block
while arr[block_prev] < target :
block_prev += 1
if block_prev == min(block, length) :
return -1
# if there is target value in array, return it
if arr[block_prev] == target :
return block_prev
return -1
|
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
os.chdir(sys.path[0])
sys.path.append('/mnt/sda2/github/TSF1KEV/TSFpy')
from TSF_io import *
from TSF_shuffle import *
from TSF_match import *
from TSF_calc import *
from TSF_time import *
TSF_Forth_init(TSF_io_argvs(),[TSF_shuffle_Initwords,TSF_match_Initwords,TSF_calc_Initwords,TSF_time_Initwords])
TSF_Forth_setTSF("TSF_Tab-Separated-Forth:",
"\t".join(["UTF-8","#TSF_encoding","replace:","#TSF_this","help:","#TSF_echothe","0","#TSF_fin."]),
TSF_style="T")
TSF_Forth_setTSF("help:",
"\t".join(["usage: ./TSF.py [command|file.tsf] [argv] ...",
"commands:",
" --help this commands view",
" --about about TSF UTF-8 text (Japanese) view\" ",
" --python TSF.tsf to Python.py view or save\" ",
" --helloworld \"Hello world 1 #TSF_echoN\" sample",
" --quine TSF_Forth_viewthey() Quine (self source) sample",
" --99beer 99 Bottles of Beer sample",
" --fizzbuzz ([0]#3Z1~0)+([0]#5Z2~0) Fizz Buzz Fizz&Buzz sample",
" --zundoko Zun Zun Zun Zun Doko VeronCho sample",
" --fibonacci Fibonacci number 0,1,1,2,3,5,8,13,21,55... sample",
" --prime prime numbers 2,3,5,7,11,13,17,19,23,29... sample",
" --calcFX fractions calculator \"1/3-m1|2\"-> p5|6 sample",
" --calcDC fractions calculator \"1/3-m1|2\"-> 0.8333... sample",
" --calcKN fractions calculator \"1/3-m1|2\"-> 6 bunno 5 sample",
" --calender \"@000y@0m@0dm@wdec@0h@0n@0s\"-> TSF_time_getdaytime() sample"]),
TSF_style="N")
TSF_Forth_setTSF("replace:",
"\t".join(["replaceN:","#TSF_carbonthe","#TSF_calender","replaceN:","0","#TSF_pokethe","help:","replaceO:","replaceN:","#TSF_replacestacks"]),
TSF_style="T")
TSF_Forth_setTSF("replaceO:",
"\t".join(["TSF_time_getdaytime()"]),
TSF_style="N")
TSF_Forth_setTSF("replaceN:",
"\t".join(["@000y@0m@0dm@wdec@0h@0n@0s"]),
TSF_style="N")
TSF_Forth_addfin(TSF_io_argvs())
TSF_Forth_argvsleftcut(TSF_io_argvs(),1)
TSF_Forth_run()
|
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisBizStatusAvailable(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.biz/status_available.txt"
host = "whois.biz"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, None)
def test_available(self):
eq_(self.record.available, True)
def test_domain(self):
eq_(self.record.domain, "u34jedzcq.biz")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(self.record.admin_contacts, [])
def test_registered(self):
eq_(self.record.registered, False)
def test_created_on(self):
eq_(self.record.created_on, None)
def test_registrar(self):
eq_(self.record.registrar, None)
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(self.record.registrant_contacts, [])
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(self.record.technical_contacts, [])
def test_updated_on(self):
eq_(self.record.updated_on, None)
def test_domain_id(self):
eq_(self.record.domain_id, None)
def test_expires_on(self):
eq_(self.record.expires_on, None)
|
"""
Redis Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.redis
settings:
redis:
# bind: 0.0.0.0 # Set the bind address specifically (Default: 127.0.0.1)
"""
import re
from fabric.decorators import task
from fabric.utils import abort
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
from refabric.operations import run
__all__ = ['start', 'stop', 'restart', 'setup', 'configure']
blueprint = blueprints.get(__name__)
start = debian.service_task('redis-server', 'start')
stop = debian.service_task('redis-server', 'stop')
restart = debian.service_task('redis-server', 'restart')
@task
def setup():
"""
Install and configure Redis
"""
install()
configure()
def install():
with sudo():
debian.apt_get('install', 'redis-server')
def get_installed_version():
"""
Get installed version as tuple.
Parsed output format:
Redis server v=2.8.4 sha=00000000:0 malloc=jemalloc-3.4.1 bits=64 build=a...
"""
retval = run('redis-server --version')
m = re.match('.+v=(?P<version>[0-9\.]+).+', retval.stdout)
try:
_v = m.group('version')
v = tuple(map(int, str(_v).split('.')))
return v
except IndexError:
abort('Failed to get installed redis version')
@task
def configure():
"""
Configure Redis
"""
context = {
'bind': blueprint.get('bind', '127.0.0.1')
}
version = get_installed_version()
if version <= (2, 4):
config = 'redis-2.4.conf'
elif version < (3, 0):
config = 'redis-2.8.conf'
else:
config = 'redis-3.conf'
uploads = blueprint.upload(config, '/etc/redis/redis.conf', context)
if uploads:
if debian.lbs_release() >= '16.04':
debian.chown(location='/etc/redis/redis.conf',
owner='redis', group='root')
restart()
|
import json
from flask import Flask
from flask import request
from flask import jsonify
import time
from psutil import net_io_counters
from asyncftp import __version__
import threading
from asyncftp.Logger import _LogFormatter
t = time.time()
net = net_io_counters()
formatter = _LogFormatter(color=False)
log_message = str()
def make_app(server, queue):
app = Flask(__name__)
@app.route('/api/info', methods=['GET'])
def speed():
if request.method == 'GET':
global t
global net
temp_t = time.time()
p = net_io_counters()
result = dict()
result['speed'] = dict(
up=(p[0] - net[0]) / (temp_t - t),
down=(p[1] - net[1]) / (temp_t - t)
)
result['up_time'] = server.up_time
result['running'] = True if server.up_time else False
t = temp_t
net = p
return jsonify(result)
@app.route('/api/start', methods=['GET'])
def run_server():
if not server.running:
thread = threading.Thread(target=server.run)
thread.start()
return 'ok'
@app.route('/api/stop', methods=['GET'])
def close_server():
server.close()
return 'ok'
@app.route('/api/config', methods=['GET', 'POST'])
def config():
if request.method == 'GET':
return jsonify({
'host': server.host,
'port': str(server.port),
'version': __version__,
'refuse_ip': server.ip_refuse
})
if request.method == 'POST':
data = json.loads(request.data.decode('utf-8'))
for ip in data['refuse_ip']:
server.add_refuse_ip(ip)
return 'ok'
@app.route('/api/log', methods=['GET'])
def log():
if request.method == 'GET':
result = str()
while not queue.empty():
record = queue.get(block=False)
result += formatter.format(record) + '\n'
global log_message
log_message += result
return log_message
return app
|
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('lipame')
class CeleryConfig(AppConfig):
name = 'lipame.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
|
from src.interfacing.ogs.connect import Authentication
import codecs
import sys
import os
from time import sleep
def loadList(pNameFile):
iList = []
with codecs.open(pNameFile, "r", "utf-8") as f:
for line in f:
iList.append(line)
return iList
if __name__ == "__main__":
a = Authentication("Kuksu League", "", testing=False);
iGroupNames = loadList("E:/Project/OGS/OGS-League/group_names.txt");
iGroupIDs = loadList("E:/Project/OGS/OGS-League/group_ids.txt");
nGroups = len(iGroupNames);
for i in range(nGroups):
iGroupNames[i] = iGroupNames[i].replace("\r\n", "");
iGroupNames[i] = iGroupNames[i].replace("\n", "");
iGroupIDs[i] = iGroupIDs[i].replace("\r\n", "");
iGroupIDs[i] = iGroupIDs[i].replace("\n", "");
iGroupIDs[i] = int(iGroupIDs[i]);
iDescription = """
Kuksu Main Title Tournament 9th Cycle Group %s
Title Holder: <a href='https://online-go.com/user/view/35184/vitality'>vitality (5d)</a>
Previous cycles:
<table style="text-align:center;" border='2'>
<tr><th rowspan=2>Cycle</th><td colspan=3><b>Title Match</b></td><td colspan=3><b>Title Tournament</b></td></tr>
<tr>
<th>Winner</th><th>Score</th><th>Runner-up</th>
<th>Winner<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/gold_title_19.png' alt='Gold'></img></th>
<th>Runner-up<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/silver_title_19.png' alt='Silver'></img></th>
<th>3rd Place<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/bronze_title_19.png' alt='Bronze'></img></th>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2375'>1</a></td>
<td><b>luke</b></td><td></td><td></td>
<td><b>luke (2d)</b></td><td>davos</td><td>gomad361</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2384'>2</a></td>
<td><b>gomad361</b></td><td>3-2</td><td>luke</td>
<td><b>luke (2d)</b></td><td>gomad361</td><td>hotspur</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2391'>3</a></td>
<td><b>Uberdude</b></td><td>∗</td><td>gomad361</td>
<td><b>Uberdude (6d)</b></td><td>KyuT</td><td>marigo</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2406'>4</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>KyuT</td>
<td><b>KyuT (4d)</b></td><td>quiller</td><td>morituri</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2424'>5</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>gomad361</td>
<td><b>gomad361 (2d)</b></td><td>morituri</td><td>betterlife</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2439'>6</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>Elin</td>
<td><b>Elin (3d)</b></td><td>gomad361</td><td>morituri</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2460'>7</a></td>
<td><b>Uberdude</b></td><td>3-2</td><td>vitality</td>
<td><b>vitality (5d)</b></td><td>Elin</td><td>gomad361</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2475'>8</a></td>
<td><b>vitality</b></td><td>∗</td><td>Uberdude</td>
<td><b>vitality (5d)</b></td><td>nrx</td><td>gojohn</td>
</tr>
<tr>
<td rowspan=5><a href='#'>9</a></td>
<td rowspan=5 colspan=3></td>
<td colspan=3>
<a href='https://online-go.com/tournament/12653'>[A]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12654'>[B1]</a>
<a href='https://online-go.com/tournament/12655'>[B2]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12656'>[C1]</a>
<a href='https://online-go.com/tournament/12657'>[C2]</a>
<a href='https://online-go.com/tournament/12658'>[C3]</a>
<a href='https://online-go.com/tournament/12659'>[C4]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12660'>[D1]</a>
<a href='https://online-go.com/tournament/12661'>[D2]</a>
<a href='https://online-go.com/tournament/12662'>[D3]</a>
<a href='https://online-go.com/tournament/12663'>[D4]</a>
<a href='https://online-go.com/tournament/12664'>[D5]</a>
<a href='https://online-go.com/tournament/12665'>[D6]</a>
<a href='https://online-go.com/tournament/12666'>[D7]</a>
<a href='https://online-go.com/tournament/12667'>[D8]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12668'>[E1]</a>
<a href='https://online-go.com/tournament/12669'>[E2]</a>
<a href='https://online-go.com/tournament/12670'>[E3]</a>
<a href='https://online-go.com/tournament/12671'>[E4]</a>
<a href='https://online-go.com/tournament/12672'>[E5]</a>
<a href='https://online-go.com/tournament/12673'>[E6]</a>
</td>
</tr>
</table>
∗ means the games were finished by timeout or retiring.
Rules could be found <a href='https://forums.online-go.com/t/league-format-kuksu-title-tournament-rules-and-discussion/5191'>here</a>.
""" % iGroupNames[i];
a.put(['tournaments', iGroupIDs[i]], {"description": iDescription
});
print("Tournament %s with id %d updated.\n" % (iGroupNames[i], iGroupIDs[i]));
sleep(2);
"""
iTournament = a.post(['tournaments'],{
"id":12650,
"name":"Test Tournament 2",
"group":515,
"tournament_type":"roundrobin",
"description":"<b>Test 3</b>",
"board_size":19,
"handicap":0, #default -1 for auto
"time_start": "2015-12-01T00:00:00Z",
"time_control_parameters":{
"time_control":"fischer",
"initial_time":604800,
"max_time":604800,
"time_increment":86400
},
"rules": "korean",
"exclusivity": "invite", # open, group. default
"exclude_provisional": False, # default
"auto_start_on_max": True, # default
"analysis_enabled": True, #default
"settings":{
"maximum_players":10,
},
"players_start": 6, #default
"first_pairing_method": "slide", #slaughter, random, slide, strength . default
"subsequent_pairing_method": "slide", # default
"min_ranking":0,
"max_ranking":36
});
print(iTournament["id"]);
"""
|
"""
Helper module for Python version 3.0 and above
- Ordered dictionaries
- Encoding/decoding urls
- Unicode/Bytes (for sending/receiving data from/to socket, base64)
- Exception handling (except Exception as e)
"""
import base64
from urllib.parse import unquote, quote
from collections import OrderedDict
def modulename():
return 'Helper module for Python version 3.0 and above'
def url_decode(uri):
return unquote(uri)
def url_encode(uri):
return quote(uri)
def new_dictionary():
return OrderedDict()
def dictionary_keys(dictionary):
return list(dictionary.keys())
def dictionary_values(dictionary):
return list(dictionary.values())
def data_read(data):
# Convert bytes to string
return data.decode('utf8')
def data_write(data):
# Convert string to bytes
return bytes(data, 'utf8')
def base64_decode(data):
# Base64 returns decoded byte string, decode to convert to UTF8 string
return base64.b64decode(data).decode('utf8')
def base64_encode(data):
# Base64 needs ascii input to encode, which returns Base64 byte string, decode to convert to UTF8 string
return base64.b64encode(data.encode('ascii')).decode('utf8')
def unicode_chr(code):
return chr(code)
def unicode_string(string):
# Python 3.* uses unicode by default
return string
def is_digit(string):
# Check if string is digit
return isinstance(string, str) and string.isdigit()
def is_number(value):
return isinstance(value, int)
|
"""
Created on 27/04/2015
@author: C&C - HardSoft
"""
from util.HOFs import *
from util.CobolPatterns import *
from util.homogenize import Homogenize
def calc_length(copy):
if isinstance(copy, list):
book = copy
else:
if isinstance(copy, str):
book = copy.splitlines()
else:
book = []
lines = Homogenize(book)
havecopy = filter(isCopy, lines)
if havecopy:
bkm = ''.join(havecopy[0].split('COPY')[1].replace('.', '').split())
msg = 'COPY {} deve ser expandido.'.format(bkm)
return {'retorno': False, 'msg': msg, 'lrecl': 0}
lrecl = 0
redefines = False
occurs = 0
dicoccurs = {}
level_redefines = 0
for line in lines:
match = CobolPatterns.row_pattern.match(line.strip())
if not match:
continue
match = match.groupdict()
if not match['level']:
continue
if 'REDEFINES' in line and not match['redefines']:
match['redefines'] = CobolPatterns.row_pattern_redefines.search(line).groupdict().get('redefines')
if 'OCCURS' in line and not match['occurs']:
match['occurs'] = CobolPatterns.row_pattern_occurs.search(line).groupdict().get('occurs')
level = int(match['level'])
if redefines:
if level > level_redefines:
continue
redefines = False
level_redefines = 0
if match['redefines']:
level_redefines = level
redefines = True
continue
if occurs:
if level > dicoccurs[occurs]['level']:
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
continue
while True:
if occurs == 1:
lrecl += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
else:
dicoccurs[occurs-1]['length'] += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
del dicoccurs[occurs]
occurs -= 1
if not occurs:
break
if level > dicoccurs[occurs]['level']:
break
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
if occurs:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
else:
lrecl += FieldLength(match['pic'], match['usage'])
return {'retorno': True, 'msg': None, 'lrecl': lrecl}
def FieldLength(pic_str, usage):
if pic_str[0] == 'S':
pic_str = pic_str[1:]
while True:
match = CobolPatterns.pic_pattern_repeats.search(pic_str)
if not match:
break
match = match.groupdict()
expanded_str = match['constant'] * int(match['repeat'])
pic_str = CobolPatterns.pic_pattern_repeats.sub(expanded_str, pic_str, 1)
len_field = len(pic_str.replace('V', ''))
if not usage:
usage = 'DISPLAY'
if 'COMP-3' in usage or 'COMPUTATIONAL-3' in usage:
len_field = len_field / 2 + 1
elif 'COMP' in usage or 'COMPUTATIONAL' in usage or 'BINARY' in usage:
len_field = len_field / 2
elif 'SIGN' in usage:
len_field += 1
return len_field
|
from threading import local
from django.contrib.sites.models import Site
import os
_locals = local()
def get_current_request():
return getattr(_locals, 'request', None)
def get_current_site():
request = get_current_request()
host = request.get_host()
try:
return Site.objects.get(domain__iexact=host)
except:
return Site.objects.all()[0]
class GlobalRequestMiddleware(object):
def process_request(self, request):
_locals.request = request
|
from msrest.serialization import Model
class GenerateArmTemplateRequest(Model):
"""Parameters for generating an ARM template for deploying artifacts.
:param virtual_machine_name: The resource name of the virtual machine.
:type virtual_machine_name: str
:param parameters: The parameters of the ARM template.
:type parameters: list[~azure.mgmt.devtestlabs.models.ParameterInfo]
:param location: The location of the virtual machine.
:type location: str
:param file_upload_options: Options for uploading the files for the
artifact. UploadFilesAndGenerateSasTokens is the default value. Possible
values include: 'UploadFilesAndGenerateSasTokens', 'None'
:type file_upload_options: str or
~azure.mgmt.devtestlabs.models.FileUploadOptions
"""
_attribute_map = {
'virtual_machine_name': {'key': 'virtualMachineName', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[ParameterInfo]'},
'location': {'key': 'location', 'type': 'str'},
'file_upload_options': {'key': 'fileUploadOptions', 'type': 'str'},
}
def __init__(self, virtual_machine_name=None, parameters=None, location=None, file_upload_options=None):
self.virtual_machine_name = virtual_machine_name
self.parameters = parameters
self.location = location
self.file_upload_options = file_upload_options
|
import factory
from data.tests.factories import DepartmentFactory
from ..models import Tourist, TouristCard
class TouristFactory(factory.DjangoModelFactory):
class Meta:
model = Tourist
first_name = 'Dave'
last_name = 'Greel'
email = 'greel@musicians.com'
class TouristCardFactory(factory.DjangoModelFactory):
class Meta:
model = TouristCard
tourist = factory.SubFactory(TouristFactory)
current_department = factory.SubFactory(DepartmentFactory)
|
__author__ = 'kjoseph'
import itertools
import Queue
from collections import defaultdict
from dependency_parse_object import DependencyParseObject, is_noun, is_verb
def get_parse(dp_objs):
term_map = {}
map_to_head = defaultdict(list)
for parse_object in dp_objs:
if parse_object.head > 0:
map_to_head[parse_object.head].append(parse_object.id)
term_map[parse_object.id] = parse_object
# first manually combine MWE
#mwe_to_combine = get_mwe_combinations(map_to_head,term_map)
#for mwe in mwe_to_combine:
# combine_terms(mwe,term_map,map_to_head)
#conj_to_combine = get_conj_combinations(map_to_head,term_map)
#for conj in conj_to_combine:
# combine_terms(conj,term_map,map_to_head)
# now manually chunk the nouns together
nouns_to_combine = get_noun_combinations(map_to_head,term_map)
for noun_set in nouns_to_combine:
combine_terms(noun_set,term_map, map_to_head)
verbs_to_combine = get_verb_combinations(map_to_head,term_map)
for verb_set in verbs_to_combine:
combine_terms(verb_set,term_map, map_to_head)
roots =[]
non_terms = []
for parse_object in term_map.values():
if parse_object.head == 0:
roots.append(parse_object)
elif parse_object.head == -1:
non_terms.append(parse_object)
# now build the parse tree
to_parse = Queue.LifoQueue()
for root in reversed(roots):
to_parse.put([root,0])
return to_parse, term_map, map_to_head, non_terms
def get_noun_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not (is_noun(head.postag) or head.postag in ['D','@','A','R']) :
continue
for child_id in children:
child = term_map[child_id]
if is_noun(child.postag) or child.postag in ['D','@','A','R']:
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_verb_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not is_verb(head.postag):
continue
for child_id in children:
child = term_map[child_id]
if is_verb(child.postag) and child.id == (head.id +1):
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_mwe_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'MWE':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_conj_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'CONJ':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_combinations(to_combine):
combination_found = True
while combination_found:
combination_found = False
combos = itertools.combinations(to_combine,2)
removed = []
for d in combos:
if len([d[0] == r or d[1] == r for r in removed]):
continue
if len(d[0].intersection(d[1])) > 0:
combination_found = True
to_combine.append(set.union(d[0],d[1]))
[to_combine.remove(x) for x in to_combine if x == d[0]]
[to_combine.remove(x) for x in to_combine if x == d[1]]
removed.append(d[0])
removed.append(d[1])
return to_combine
def combine_terms(noun_set,term_map, map_to_head):
new_parse_obj = DependencyParseObject(object_ids=noun_set,term_map=term_map)
# okay, we've created a new parse object
# now we need to update the relations to it
for id in noun_set:
if id == new_parse_obj.id:
term_map[id] = new_parse_obj
if id in map_to_head:
for child_id in noun_set:
if child_id in map_to_head[id]:
map_to_head[id].remove(child_id)
else:
# things dependent on this thing need to become dependent on the new parse object
if id in map_to_head:
for child in map_to_head[id]:
if child not in noun_set:
map_to_head[new_parse_obj.id].append(child)
term_map[child].head = new_parse_obj.id
del map_to_head[id]
del term_map[id]
def print_parse(parse_out, term_map, map_to_head):
while not parse_out.empty():
curr_head,level = parse_out.get()
print " "*level + str(level) +" " + curr_head.__unicode__()
for child in reversed(map_to_head.get(curr_head.id,[])):
parse_out.put([term_map[child],level+1])
def get_entities_from_parse(term_map):
all_proper = []
all_entities = []
all_entities_original_ids = []
all_proper_original_ids = []
for k,v in term_map.iteritems():
if is_noun(v.postag) or v.postag == '@' or v.postag == '#':
text = []
split_text = v.text.split()
ent_ids = []
for x in range(len(split_text)):
t = split_text[x]#.strip(string.punctuation)
#if x == 0 and t in stopwords:
# continue
text.append(t)
ent_ids.append(v.all_original_ids[x])
if len(text) > 0 and v.postag != 'O':
if '^' in v.postag and v.text[0].isupper():
all_proper.append(" ".join(text))
all_proper_original_ids.append(sorted(v.all_original_ids))
all_entities.append(" ".join([t.lower() for t in text]))
all_entities_original_ids.append(sorted(ent_ids))
return all_entities, all_proper, all_entities_original_ids, all_proper_original_ids
|
""" Python's random module includes a function choice(data) that returns a
random element from a non-empty sequence. The random module includes
a more basic function randrange, with parametrization similar to
the built-in range function, that return a random choice from the given
range. Using only the randrange function, implement your own version
of the choice function.
>>> data = [2,3,4,5,6,7,8,9,10,11,10,9,8,7,6,5,4,3,2,1]
>>> results = list()
>>> for x in range(len(data)*20):
... val = custom_choice(data)
... results.append(val in data)
>>> print(results)
[True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True]
"""
def custom_choice(data):
import random
return data[random.randrange(0,len(data))]
|
num = input("What is the numerator")
dem = input("What is the denominator")
counta = 2
countb = 2
def math (num,dem):
remainsa = 1
remainsb = 1
remains = remainsa - remainsb
while remains > 0:
a = num / counta
b = dem / countb
remainsa = num % counta
remainsb = num % countb
remains = remainsa - remainsb
if remains =
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rii_Api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='location',
name='state',
field=models.CharField(max_length=2),
),
migrations.AlterField(
model_name='location',
name='venueName',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='year',
name='year',
field=models.IntegerField(choices=[(1919, 1919), (1920, 1920), (1921, 1921), (1922, 1922), (1923, 1923), (1924, 1924), (1925, 1925)], default=1919),
),
migrations.AlterField(
model_name='year',
name='yearSummary',
field=models.TextField(default='', max_length=2000),
),
]
|
"""Some classes to support import of data files
"""
import os, glob
import numpy
import time
try:
import ConfigParser as configparser #gets rename to lowercase in python 3
except:
import configparser
class _BaseDataFile(object):
"""
"""
def __init__(self, filepath):
"""
"""
self.filepath = filepath
self.info = self._loadHeader()
self.data = self._loadData()
def _findFile(self, ending='', orSimilar=False):
"""Finds files using the base filename and the optional `ending` param (used to separate data from header)
If orSimilar==True then this function will first search for the exact file and then for any file of the appropriate
format in that folder. (For a header file that can be useful, just to retrieve the
"""
#fetch all header/data files matching path
searchPattern = self.filepath+'*'+ending
filenames = glob.glob(searchPattern)
#check if we have exactly one matching file
filename=None
if len(filenames)==0 and orSimilar:
folder = os.path.split(os.path.abspath(self.filepath))[0]
print('No exact match found for\n\t %s' %(searchPattern))
searchPattern = os.path.join(folder, '*'+ending)
print('...searching instead for\n\t %s' %(searchPattern))
filenames = glob.glob(searchPattern)
if len(filenames)==0:
print('No file found: %s' %(searchPattern))
elif len(filenames)>1:
print('Multiple files found')
else:
filename = filenames[0]
return filename
class DBPA_file(_BaseDataFile):
"""
DBPA amplifiers are made by Sensorium. Excellent signal to noise on the amp, with a very basic file format -
a 5-line ASCII header file (config-style file) and a binary data file.
Example usage:
datFile = io.DBPA_file('jwp_2013_18_02') #don't include file extension
print datFile.info #print the header info (samples, seconds etc)
{'channels': 122, 'duration': 761, 'rate': 1000, 'samples': 761000}
print datFile.data.shape
(122, 761000) #channels, samples
"""
def _loadHeader(self):
"""Load info from a header file ('*.h.txt')
"""
filename = self._findFile(ending='h.txt', orSimilar=True)
if not filename:
print('No header file')
#this header file looks like a config file with a single section
cfg = configparser.ConfigParser()
hdr = {}
f = open(filename)
cfg.readfp(f) #operates in place (doesn't return anything)
f.close()
hdr['channels'] = cfg.items('File Information')
for name, val in cfg.items('File Information'): #reads entries in File Info section as a list of tuples
if name.lower()=='number of channels':
hdr['channels']=int(val.replace('"', '')) # convert '"200"' to 200
elif name.lower()=='samples per second':
hdr['rate']=int(val.replace('"', '')) # convert '"200"' to 200
return hdr
def _loadData(self):
"""
:param offset: the sample number to start reading from
"""
data = []
filename = self._findFile(ending='dat')
fileSize = os.stat(filename).st_size
self.info['duration'] = int(fileSize/self.info['rate']/self.info['channels']/4) #4 bytes per sample
self.info['samples'] = self.info['duration']*self.info['rate']
if not filename:
print('No data file')
fileSize = os.stat(filename).st_size
data = numpy.fromfile(filename, dtype='>f')# data are big-endian float32
data = data.reshape([self.info['samples'],self.info['channels']])
data = data.transpose() # to get (channels, time)
return data
|
__author__ = 'emre'
print "hello world"
|
import os
from flask import Flask, g, session, redirect, request, url_for, jsonify
from requests_oauthlib import OAuth2Session
OAUTH2_CLIENT_ID = os.environ['OAUTH2_CLIENT_ID']
OAUTH2_CLIENT_SECRET = os.environ['OAUTH2_CLIENT_SECRET']
OAUTH2_REDIRECT_URI = 'http://localhost:5000/callback'
API_BASE_URL = os.environ.get('API_BASE_URL', 'https://discordapp.com/api')
AUTHORIZATION_BASE_URL = API_BASE_URL + '/oauth2/authorize'
TOKEN_URL = API_BASE_URL + '/oauth2/token'
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = OAUTH2_CLIENT_SECRET
if 'http://' in OAUTH2_REDIRECT_URI:
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = 'true'
def token_updater(token):
session['oauth2_token'] = token
def make_session(token=None, state=None, scope=None):
return OAuth2Session(
client_id=OAUTH2_CLIENT_ID,
token=token,
state=state,
scope=scope,
redirect_uri=OAUTH2_REDIRECT_URI,
auto_refresh_kwargs={
'client_id': OAUTH2_CLIENT_ID,
'client_secret': OAUTH2_CLIENT_SECRET,
},
auto_refresh_url=TOKEN_URL,
token_updater=token_updater)
@app.route('/')
def index():
scope = request.args.get(
'scope',
'identify email connections guilds guilds.join')
discord = make_session(scope=scope.split(' '))
authorization_url, state = discord.authorization_url(AUTHORIZATION_BASE_URL)
session['oauth2_state'] = state
return redirect(authorization_url)
@app.route('/callback')
def callback():
if request.values.get('error'):
return request.values['error']
discord = make_session(state=session.get('oauth2_state'))
token = discord.fetch_token(
TOKEN_URL,
client_secret=OAUTH2_CLIENT_SECRET,
authorization_response=request.url)
session['oauth2_token'] = token
return redirect(url_for('.me'))
@app.route('/me')
def me():
discord = make_session(token=session.get('oauth2_token'))
user = discord.get(API_BASE_URL + '/users/@me').json()
guilds = discord.get(API_BASE_URL + '/users/@me/guilds').json()
connections = discord.get(API_BASE_URL + '/users/@me/connections').json()
return jsonify(user=user, guilds=guilds, connections=connections)
if __name__ == '__main__':
app.run()
|
from MirrorAI.dataset.directional.label_image import label_image
import numpy
def test():
d = numpy.array([1, 1, 0])
answer = label_image(d, target=0)
solution = [0, 0, 1]
assert (answer == solution).all()
|
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 45888
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
|
from __future__ import unicode_literals
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('teams', '0001_initial'),
('accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Division',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('division_rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='division_representative', to=settings.AUTH_USER_MODEL)),
('teams', models.ManyToManyField(blank=True, to='teams.Team')),
],
),
migrations.CreateModel(
name='Session',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('game', models.CharField(max_length=100)),
('start_date', models.DateTimeField(verbose_name='start date')),
('end_date', models.DateTimeField(verbose_name='end date')),
('division', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Division')),
],
),
migrations.CreateModel(
name='SessionEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.TimeField()),
('date', models.DateField()),
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Session')),
],
),
migrations.CreateModel(
name='Sub',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True, verbose_name='sub date')),
('session_event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.SessionEvent')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.CustomUser')),
],
),
]
|
import re
import logging
from google.appengine.ext import db
from google.appengine.api import users, memcache
from handler import Handler, SlashRedirect
from webapp2_extras.routes import RedirectRoute, PathPrefixRoute
import webapp2
from webapp2_extras.routes import RedirectRoute, PathPrefixRoute
import articles
import api
from contents import panels
class Main(Handler):
def get(self):
logging.debug('%s'%self.request)
if 'main' in self.request.url:
self.redirect('/')
self.render('base.html')
def handle_404(request, response, exception):
Handler(request, response).throw_error(404)
def handle_500(request, response, exception):
Handler(request, response).throw_error(500)
PAGE_RE = r'((?:[a-zA-Z0-9_-]+/?)*)'
app = webapp2.WSGIApplication(
[
#Adding /? after everything allows for an option trailing slash
RedirectRoute('(.*)//+', SlashRedirect, 'slash-redirect', strict_slash=True), #Strip multiple trailing slashes
RedirectRoute('/', Main, 'home', strict_slash=False),
RedirectRoute('/main', Main, 'home', strict_slash=True),
# API calls:
PathPrefixRoute('/api', [
RedirectRoute('/articles/get', api.GetArticles, 'get-articles', strict_slash=True),
RedirectRoute('/articles/upvote', api.UpVote, 'upvote-article', strict_slash=True),
RedirectRoute('/articles/devote', api.DeVote, 'downvote-article', strict_slash=True),
RedirectRoute('/coursematerials/get', api.GetCourseMaterials, 'get-course-materials', strict_slash=True),
RedirectRoute('/coursematerials/set', api.MakeNote, 'make-note', strict_slash=True) # Testing only
]),
RedirectRoute('/votes', articles.ListVotes, 'list-votes', strict_slash=True), # Testing only
RedirectRoute('/notes', panels.ListNotes, 'list-notes', strict_slash=True), # Testing only
], debug=True)
app.error_handlers[404] = handle_404
app.error_handlers[500] = handle_500
|
from datetime import datetime
import time
import json
from Commit import Commit;
import Constant;
import collections
from yattag import Doc
def generateHTML(commits, projectName, commitData, fileExtensionMap):
totalAuthors = len(commitData)
generateBestAuthors(projectName, commitData)
generateFileByExtension(fileExtensionMap, projectName)
totalLines, totalLinesAdded, totalLinesDeleted = generateLinesByDate(commits, projectName)
totalFiles = generateFilesByDate(commits, projectName)
generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, len(commits), totalAuthors)
def generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, totalCommits, totalAuthors):
with open(Constant.INDEX_HTML_TEMPLATE, "rt") as fin:
with open(Constant.INDEX_HTML, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$time' in line:
fout.write(line.replace('$time', time.strftime('%l:%M%p %Z on %b %d, %Y')))
elif '$files' in line:
fout.write(line.replace('$files', str(totalFiles)))
elif '$commits' in line:
fout.write(line.replace('$commits', str(totalCommits)))
elif '$totallines' in line:
fout.write(line.replace('$totallines', str(totalLines)))
elif '$linesadded' in line:
fout.write(line.replace('$linesadded', str(totalLinesAdded)))
elif '$linesdeleted' in line:
fout.write(line.replace('$linesdeleted', str(totalLinesDeleted)))
elif '$author' in line:
fout.write(line.replace('$author', str(totalAuthors)))
else:
fout.write(line)
def generateBestAuthors(projectName, commitData):
# Generate best author table
fields = ['author', 'commit_number', 'lines_added', 'lines_deleted']
doc, tag, text = Doc().tagtext()
with tag('table', ('class', 'table table-bordered table-condensed table-hover')):
with tag('tr'):
for i in range(len(fields)):
with tag('th'):
text(fields[i])
for commitdata in commitData:
with tag('tr'):
for i in range(len(fields)):
with tag('td', ('align', 'center')):
text(commitdata[i])
with open(Constant.BEST_AUTHORS_TEMPLATE, "rt") as fin:
with open(Constant.BEST_AUTHORS, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', doc.getvalue()))
else:
fout.write(line)
def generateLinesByDate(commits, projectName):
totalLines = 0
totalLinesAdded = 0
totalLinesDeleted = 0;
mydic = collections.OrderedDict()
for commit in reversed(commits):
dateKey = int(commit.date.strftime("%s")) * 1000
totalLinesAdded = totalLinesAdded + commit.linesAdded
totalLinesDeleted = totalLinesDeleted + commit.linesDeleted
linesDiff = commit.linesAdded - commit.linesDeleted;
totalLines = totalLines + linesDiff
if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + linesDiff
else:
mydic[dateKey] = totalLines + linesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.LINES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.LINES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalLines, totalLinesAdded, totalLinesDeleted
def generateFilesByDate(commits, projectName):
totalFiles = 0
mydic = collections.OrderedDict()
for commit in reversed(commits):
dateKey = int(commit.date.strftime("%s")) * 1000
filesDiff = commit.filesAdded - commit.filesDeleted;
totalFiles = totalFiles + filesDiff
if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + filesDiff
else:
mydic[dateKey] = totalFiles + filesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.FILES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalFiles
def generateFileByExtension(fileExtensionMap, projectName):
exts = fileExtensionMap.keys()
data = fileExtensionMap.values()
totalFiles = sum(data)
threshold = int(totalFiles/200)
for ext in fileExtensionMap.keys():
if fileExtensionMap[ext] <= threshold:
if 'other' not in fileExtensionMap:
fileExtensionMap['other'] = 0
fileExtensionMap['other'] += fileExtensionMap[ext]
del fileExtensionMap[ext]
with open(Constant.FILES_BY_EXTENSION_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_EXTENSION, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', '[' + ','.join(str(e) for e in fileExtensionMap.values()) + ']' ))
elif '$extensions' in line:
fout.write(line.replace('$extensions', json.dumps(fileExtensionMap.keys())))
else:
fout.write(line)
|
"""
Django settings for gnucash_explorer project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = None
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gnucash_explorer.urls'
WSGI_APPLICATION = 'gnucash_explorer.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
try:
from gnucash_explorer.local_settings import *
except ImportError as e:
print('You should set up your local_settings.py')
|
import sublime
from . import SblmCmmnFnctns
class Spinner:
SYMBOLS_ROW = u'←↑→↓'
SYMBOLS_BOX = u'⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏'
def __init__(self, symbols, view, startStr, endStr):
self.symbols = symbols
self.length = len(symbols)
self.position = 0
self.stopFlag = False
self.view = view
self.startStr = startStr
self.endStr = endStr
def __next__(self):
self.position = self.position + 1
return self.startStr + self.symbols[self.position % self.length] + self.endStr
def start(self):
if not self.stopFlag:
self.view.set_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER, self.__next__())
sublime.set_timeout(lambda: self.start(), 300)
def stop(self):
self.view.erase_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER)
self.stopFlag = True
|
from django.conf.urls import url
from sms import views
app_name = 'sms'
urlpatterns = [
url(r'^$', views.index, name="index"),
]
|
f = open('input.txt')
triangles = [map(int,l.split()) for l in f.readlines()]
possible = 0
for t in triangles:
t.sort()
if t[0] + t[1] > t[2]:
possible += 1
print(possible)
|
import asyncio
import sys
import config
import sender
import receiver
print(sys.argv)
async def receiveMessageFromSerial():
return "Message"
def help():
print('Luiza 1.0 - (luiza.santost@hotmail.com)')
print('Usage: python3 app.py [Options][Message][source][dest]')
print('')
print('SENDING MESSAGE')
print(' You will send a message from source to dest. 3 containing the text "Sending Message from Luiza"')
print(' python3 app.py --send "Sending Message from Luiza" 1 3')
print('RECEIVING MESSAGE')
print(' You will receive a message using the address 3')
print(' python3 app.py --read 3')
quit()
if len(sys.argv) == 1:
help()
if(sys.argv[1] == '--send'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid.')
help()
else:
if(len(sys.argv[2]) < 10):
print('ERR: Message size must be less than 10.')
quit()
sender.message(sys.argv[3], sys.argv[4], sys.argv[2])
if(sys.argv[1] == '--read'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid. Dest to read not informed !!')
help()
loop = asyncio.get_event_loop()
loop.run_until_complete(receiver.start())
loop.close()
|
"""
Testing module from creating product using
http://www.sendfromchina.com/default/index/webservice
"""
from flask import Flask, render_template, request
from flask_wtf.csrf import CSRFProtect
from werkzeug.datastructures import MultiDict
from forms import SFCCreateOrder, SFCCreateProduct, SFCOrderDetail, SFCASNInfo, SFCgetOrderByCode
from sfc_api import SFCAPI
app = Flask(__name__)
csrf = CSRFProtect(app)
csrf.init_app(app)
app.secret_key = 's3cr3tasdasdasdasd'
header_request = {'customerId': 'R2036',
'appToken': 'MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDK1YNcdunmWXoK1ys6hyi+LWQdPx6Vmr/9kNlKOw4cK5Q8FWA3nfGeeG49Pq2TlYKVLdSw1fr60AAJFQOuXmol6lmyn+/xwx6j21XLx9/4vdDNSTR8Hcp7oqGNNr5DlI0onhJ7sd+rAxhIOwLNnZv6T/XtVqQNuGVXTq/dX0zkaQIDAQAB',
'appKey': 'MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDGmDLbsI4jELqCHgWikWqACICp299WSoiWgqghEXkQfvdEvwS5XWpdsSmdQwryR2rtg0DiS3vf74oVYBDJVHgcUdc2ov7QI5TPBqXJped7OoyrqYzaYFYshzGWgYC0wu5RCb71p2+4Z8NwDoJlvMVU4/fD9pL59PW8yYH1u3x4ewIDAQAB'}
wsdl = 'http://fulfill.sendfromchina.com/default/svc/wsdl'
sfcapi = SFCAPI(p_wsdl=wsdl, p_header_request=header_request)
@app.route("/")
def hello():
return render_template('index.html')
@app.route("/create_product", methods=['GET', 'POST'])
def create_product():
res = None
if request.method == 'POST':
product_info = {k: v for k, v in request.form.items() if k not in ('csrf_token', 'pocId', 'poValue', 'imgUrl')}
product_info['image'] = {'imgUrl': request.form['imgUrl']}
product_info['qcs'] = {'pocId': request.form['pocId'], 'poValue': request.form['poValue']}
res = sfcapi.create_product(product_info)
form = SFCCreateProduct(request.form)
else:
form = SFCCreateProduct()
return render_template('create_product.html', form=form, res=res)
@app.route("/create_order", methods=['GET', 'POST'])
def create_order():
res = None
if request.method == 'POST':
order_info = {k: v for k, v in request.form.items() if k in
[i for i in SFCCreateOrder.__dict__.keys() if i[0] != '_']}
order_detail = {k: v for k, v in request.form.items() if
k in [i for i in SFCOrderDetail.__dict__.keys() if i[0] != '_']}
res = sfcapi.create_order(p_order_info=order_info, p_order_detail=order_detail)
form_order = SFCCreateOrder(MultiDict(order_info))
form_order_detail = SFCOrderDetail(MultiDict(order_detail))
else:
form_order = SFCCreateOrder()
form_order_detail = SFCOrderDetail()
return render_template('create_order.html', form_master=form_order, form_detail=form_order_detail, res=res)
@app.route("/create_asn", methods=['GET', 'POST'])
def create_asn():
res = None
if request.method == 'POST':
asn_info = {k: v for k, v in request.form.items() if k in
[i for i in SFCASNInfo.__dict__.keys() if i[0] != '_']}
order_detail = {k: v for k, v in request.form.items() if
k in [i for i in SFCOrderDetail.__dict__.keys() if i[0] != '_']}
form_asn = SFCASNInfo(MultiDict(asn_info))
form_order_detail = SFCOrderDetail(MultiDict(order_detail))
res = sfcapi.create_asn(p_asn_info=asn_info, p_order_detail=order_detail)
else:
form_asn = SFCASNInfo()
form_order_detail = SFCOrderDetail()
return render_template('create_asn.html', form_master=form_asn, form_detail=form_order_detail, res=res)
@app.route("/get_order_by_code", methods=['GET', 'POST'])
def get_order():
res = None
if request.method == 'POST':
res = sfcapi.get_order_by_code(order_code=request.form['ordersCode'], detail_level=request.form['detailLevel'])
form_get_order = SFCgetOrderByCode(
MultiDict({'ordersCode': request.form['ordersCode'], 'detailLevel': request.form['detailLevel']}))
else:
form_get_order = SFCgetOrderByCode()
return render_template('get_order_by_code.html', form=form_get_order, res=res)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
|
'''
python 2.7.3 (win)
0
1
<...>
998
boom
'''
def f(n):
print n
f(n+1)
try:
f(0)
except:
print 'boom'
|
"""
A unit test for ext_pylib file module's Parsable mixin class.
"""
import pytest
from . import utils
from ext_pylib.files import File, Parsable
class ParsableFile(Parsable, File):
"""Dummy class extending Parsable and File."""
FILE = """This is a sample file.
This is a sample file.
This is a sample file.
DocumentRoot /var/www/google.com
This is a sample file.
DEBUG = True
SECURE = False
DocumentRoot /var/www/example.com
LIST = first_item
LIST = second_item
"""
EMPTY_FILE = ''
def test_parsable_parse_with_existing_attribute():
"""Test Parsable setup_parsing() method on an existing attribute."""
parsable = ParsableFile()
parsable.existing = 'already exists' # pylint: disable=attribute-defined-outside-init
with pytest.raises(AttributeError):
parsable.setup_parsing({'existing' : '*'})
def test_parsable_setup_parsing():
"""Test Parsable setup_parsing() method."""
the_file = Parsable()
Parsable.read = utils.mock_read_data
the_file.data = FILE
the_file.setup_parsing({
'htdocs' : ('DocumentRoot (.*)',),
'debug' : 'DEBUG = (.*)',
'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'),
'speed' : ('SPEED[ ]*=[ ]*([^ \n]*)', 'SPEED = {0}'),
'list' : ('LIST[ ]*=[ ]*([^ \n]*)', 'LIST = {0}'),
})
assert the_file.htdocs[0] == '/var/www/google.com'
assert the_file.htdocs[1] == '/var/www/example.com'
assert the_file.debug == 'True'
assert the_file.secure == 'False'
the_file.secure = 'True'
assert the_file.secure == 'True'
assert the_file.speed is None
the_file.speed = 'fastest'
assert the_file.speed == 'fastest'
the_file.speed = 'fastest' # setting it more than once with the same value
# shouldn't affect the number of times it is added.
assert isinstance(the_file.speed, str) \
or isinstance(the_file.speed, unicode) # Shouldn't be a list, checking unicode
# for Python 2 support.
assert len(the_file.list) == 2 # Should be a list
def test_parsable_setup_parsing_on_empty_file():
"""Test Parsable setup_paring() using an empty file."""
the_file = Parsable()
Parsable.read = utils.mock_read_data
the_file.data = EMPTY_FILE
the_file.setup_parsing({
'htdocs' : ('DocumentRoot (.*)', 'DocumentRoot {0}'),
'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'),
})
assert the_file.htdocs is None
the_file.htdocs = '/var/www/google.com'
assert the_file.htdocs == '/var/www/google.com'
assert the_file.secure is None
the_file.secure = 'True'
assert the_file.secure == 'True'
|
import sys
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
import xbmcplugin
xbmcplugin.setContent(addon_handle, 'episodes')
import urlparse
args = urlparse.parse_qs(sys.argv[2][1:])
mode = args.get('mode', None)
from urllib import FancyURLopener, urlencode
class URLOpener(FancyURLopener):
version = 'Mozilla/5.0 (X11; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0 Iceweasel/31.0'
urlopen = URLOpener().open
urlmake = lambda query: base_url + '?' + urlencode(query)
rooturl = 'http://nick.walla.co.il'
def getpage(url):
if url.startswith('/'): url = rooturl + url
elif not url.startswith('http://'): url = rooturl + '/' + url
resets = 0
for tries in range(5):
try:
page = urlopen(url).read()
break
except IOError:
page = u''
if isinstance(page, str): page = page.decode('windows-1255', 'replace')
page = page.encode('utf-8')
return page
import re
vidregexp = re.compile(
'class="vitem.*?"',
re.DOTALL
)
nextregexp = re.compile(
'<a class="p_r" style="" href="(.+?)"'
)
def vidsfromseason(url):
page = getpage(url)
vids = vidregexp.findall(page)
for nexturl in nextregexp.findall(page):
vids += vidregexp.findall(getpage(nexturl))
return vids
def vidsfromshow(showurl):
return [vidsfromseason(url) for url in re.findall(
'href="([^"]*)"[^>]*>[^<]*פרקים מלאים',
getpage(showurl)
)]
import xbmcgui
if mode is None:
for show in re.findall(
'<a href="([^"]+)" class="item right w3" style=".*?">([^<]+)</a>',
getpage('/')
):
xbmcplugin.addDirectoryItem(
handle=addon_handle,
url=urlmake({'mode': 'show', 'showurl': show[0]}),
listitem=xbmcgui.ListItem(show[1]),
isFolder=True
)
xbmcplugin.endOfDirectory(addon_handle)
elif mode[0] == 'show':
print(vidsfromshow(args['showurl'][0]))
xbmcplugin.addDirectoryItem(
handle=addon_handle,
url='/',
listitem=xbmcgui.ListItem('Video')
)
xbmcplugin.endOfDirectory(addon_handle)
|
import operator
import ply.lex as lex
from jpp.parser.operation import Operation
from jpp.parser.expression import SimpleExpression
reserved = {
'extends': 'EXTENDS',
'import': 'IMPORT',
'local': 'LOCAL',
'imported': 'IMPORTED',
'user_input': 'USER_INPUT',
}
NAME_TOK = 'NAME'
tokens = [
'INTEGER',
'STRING_LITERAL',
'COLON',
NAME_TOK,
'COMMA',
'LCURL',
'RCURL',
'LBRAC',
'RBRAC',
'LPAREN',
'RPAREN',
'DOT',
'SEMICOLON',
'BOOLEAN',
'MINUS',
'COMPARISON_OP',
'PLUS',
'MUL_OP',
'BIT_SHIFT_OPS',
'BITWISE_OPS',
'INVERT',
'POW',
'FUNC',
]
tokens.extend(reserved.values())
t_DOT = r'\.'
t_LCURL = r'\{'
t_RCURL = r'\}'
t_COLON = r'\:'
t_LBRAC = r'\['
t_RBRAC = r'\]'
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_COMMA = ','
t_SEMICOLON = ';'
def _create_operation_token(t):
t.value = Operation(t.value)
return t
def t_BIT_SHIFT_OPS(t):
"""
<<|>>
"""
return _create_operation_token(t)
def t_COMPARISON_OP(t):
"""
<|<=|==|!=|>=
"""
return _create_operation_token(t)
def t_BITWISE_OPS(t):
r"""
&|\^|\|
"""
return _create_operation_token(t)
def t_PLUS(t):
r"""
\+
"""
return _create_operation_token(t)
def t_MINUS(t):
r"""
-
"""
t.value = Operation(t.value, operator.sub)
return t
def t_POW(t):
r"""
\*\*
"""
return _create_operation_token(t)
def t_MUL_OP(t):
r"""
\*|//|/|%
"""
return _create_operation_token(t)
def t_INVERT(t):
"""
~
"""
return _create_operation_token(t)
def t_FUNC(t):
"""
bool|abs
"""
return _create_operation_token(t)
def t_INTEGER(t):
r"""
\d+
"""
t.value = SimpleExpression(int(t.value))
return t
def t_STRING_LITERAL(t):
"""
"[^"\n]*"
"""
t.value = SimpleExpression(str(t.value).strip('"'))
return t
def t_BOOLEAN(t):
"""
true|false
"""
t.value = SimpleExpression(t.value == 'true')
return t
def t_NAME(t):
"""
[a-zA-Z_][a-zA-Z_0-9]*
"""
t.type = reserved.get(t.value, NAME_TOK) # Check for reserved words
return t
def t_COMMENT(t):
r"""
\#.*
"""
# No return value. Token discarded
pass
def t_newline(t):
r"""
\n+
"""
t.lexer.lineno += len(t.value)
def t_error(_):
return
t_ignore = ' \t'
def create_lexer():
return lex.lex(debug=False)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib import auth
admin.autodiscover()
urlpatterns = patterns('stepup.views',
# Examples:
# url(r'^$', 'volunteer.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
#auth
url(r'^admin/', include(admin.site.urls)),
# homepage
url(r'^$', 'index', name = 'homepage'),
# about
url(r'^about/$', 'about', name = 'view_about'),
# person
url(r'^person/(?P<slug>[^\.]+)', 'person', name = 'view_person'),
# all persons
url(r'^person/', 'all_person', name = 'view_all_person'),
# opportunity
url(r'^opportunity/(?P<slug>[^\.]+)', 'opportunity', name = 'view_opportunity'),
# all opportunities
url(r'^opportunity/', 'all_opportunity', name = 'view_all_opportunity'),
# organization
url(r'^organization/(?P<slug>[^\.]+)', 'organization', name = 'view_organization'),
#url(r'^tag/(?P<slug>[^\.]+)', 'tag', name = 'view_tag'),
# all organizations
url(r'^organization/', 'all_organizations', name = 'view_all_organization'),
# comments
url(r'^comments/', include('django.contrib.comments.urls')),
# search
url(r'^search/', 'search', name = 'view_search'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'login.html'},
name='mysite_login'),
url(r'^logout/$', 'logout', {'next_page': '/'}, name='mysite_logout'),
)
|
"""
This program collects Portugal weather forecasts from IPMA and uploads them to the Orion Context Broker.
It uploads the list of stations on the fly from
- http://api.ipma.pt/json/locations.json.
Legal notes:
- http://www.ipma.pt/en/siteinfo/index.html?page=index.xml
Examples:
- get the weather forecast from IPMA:
curl -X GET --header 'Accept: application/json' \
'http://api.ipma.pt/json/alldata/1110600.json'
AsyncIO name convention:
async def name - entry point for asynchronous data processing/http requests and post processing
async def name_bounded - intermediate step to limit amount of parallel workers
async def name_one - worker process
"""
from aiohttp import ClientSession, ClientConnectorError
from argparse import ArgumentTypeError, ArgumentParser
from asyncio import Semaphore, ensure_future, gather, run, TimeoutError as ToE, set_event_loop_policy
from copy import deepcopy
from datetime import datetime, timedelta
from pytz import timezone
from re import sub
from requests import get, exceptions
from sys import stdout
from time import sleep
from uvloop import EventLoopPolicy
from yajl import dumps, loads
from yaml import safe_load as load
import logging
default_latest = False # preserve only latest values
default_limit_entities = 50 # amount of entities per 1 request to Orion
default_limit_source = 10 # amount of parallel request to IPMA
default_limit_target = 50 # amount of parallel request to Orion
default_log_level = 'INFO'
default_orion = 'http://orion:1026' # Orion Contest Broker endpoint
default_timeout = -1 # if value != -1, then work as a service
http_ok = [200, 201, 204]
log_levels = ['ERROR', 'INFO', 'DEBUG']
logger = None
logger_req = None
stations = dict() # preprocessed list of stations
tz = timezone('UTC')
tz_wet = 'Europe/Lisbon'
tz_azot = 'Atlantic/Azores'
tz_azot_codes = ['3490100', '3480200', '3470100', '3460200', '3450200', '3440100', '3420300', '3410100']
url_observation = 'http://api.ipma.pt/json/alldata/{}.json'
url_stations = 'http://api.ipma.pt/json/locations.json'
template = {
'id': 'urn:ngsi-ld:WeatherForecast:Portugal-WeatherForecast-',
'type': 'WeatherForecast',
'address': {
'type': 'PostalAddress',
'value': {
'addressCountry': 'PT',
'addressLocality': None,
'postalCode': None
}
},
'dateIssued': {
'type': 'DateTime',
'value': None
},
'dataProvider': {
'type': 'Text',
'value': 'FIWARE'
},
'dateRetrieved': {
'type': 'DateTime',
'value': None
},
'dayMaximum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'dayMinimum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'feelsLikeTemperature': {
'type': 'Number',
'value': None
},
'precipitationProbability': {
'type': 'Number',
'value': None
},
'relativeHumidity': {
'type': 'Number',
'value': None
},
'source': {
'type': 'URL',
'value': 'http://www.ipma.pt'
},
'temperature': {
'type': 'Number',
'value': None
},
'validFrom': {
'type': 'DateTime',
'value': None
},
'validTo': {
'type': 'DateTime',
'value': None
},
'validity': {
'type': 'Text',
'value': None
},
'weatherType': {
'type': 'Text',
'value': None
},
'windDirection': {
'type': 'Number',
'value': None
},
'windSpeed': {
'type': 'Number',
'value': None
}
}
def check_entity(forecast, item):
if item in forecast:
if forecast[item] != '-99.0' and forecast[item] != -99:
return forecast[item]
return None
def decode_weather_type(item):
out = {
0: None,
1: 'clearSky',
2: 'partlyCloudy',
3: 'sunnyIntervals',
4: 'cloudy',
5: 'highClouds',
6: 'showers',
7: 'lightShowers',
8: 'heavyShowers',
9: 'rain',
10: 'lightRain',
11: 'heavyRain',
12: 'intermittentRain',
13: 'intermittentLightRain',
14: 'intermittentHeavyRain',
15: 'drizzle',
16: 'mist',
17: 'fog',
18: 'snow',
19: 'thunderstorms',
20: 'showersAndThunderstorms',
21: 'hail',
22: 'frost',
23: 'rainAndThunderstorms',
24: 'convectiveClouds',
25: 'partyCloudy',
26: 'fog',
27: 'cloudy'
}.get(item, None)
if out is None and item != 0:
logger.error('Unknown value of WeatherType detected, %s', item)
return out if out else None
def decode_wind_direction(item):
"""
North: 180
North-West: 135
West: 90
South-West: 45
South: 0
South-East: -45
East: -90
North-East: -135
"""
out = {
'9': 180,
'8': 135,
'7': 90,
'6': 45,
'5': 0,
'4': -45,
'3': -90,
'2': -135,
'N': 180,
'NW': 135,
'W': 90,
'SW': 45,
'S': 0,
'SE': -45,
'E': -90,
'NE': -135
}.get(item, None)
if out is None:
logger.error('Unknown value of WindDirection detected, %s', item)
return out if out else None
async def collect():
logger.debug('Connecting data from IPMA started')
tasks = list()
sem = Semaphore(limit_source)
async with ClientSession() as session:
for station in stations:
task = ensure_future(collect_bounded(station, sem, session))
tasks.append(task)
result = await gather(*tasks)
while False in result:
result.remove(False)
logger.debug('Collecting data from IPMA ended')
return result
async def collect_bounded(station, sem, session):
async with sem:
return await collect_one(station, session)
async def collect_one(station, session):
try:
async with session.get(stations[station]['url']) as response:
result = await response.text()
status = response.status
except ClientConnectorError:
logger.error('Collecting data from IPMA station %s failed due to the connection problem', station)
return False
except ToE:
logger.error('Collecting link from IPMA station %s failed due to the timeout problem', station)
return False
if status not in http_ok:
logger.error('Collecting data from IPMA station %s failed due to the return code %s', station, status)
return False
content = loads(result)
result = dict()
result['id'] = station
result['retrieved'] = datetime.now().replace(microsecond=0)
result['forecasts'] = dict()
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
for forecast in content:
if forecast['idPeriodo'] != 24:
continue
date = forecast['dataPrev']
if date not in [today, tomorrow]:
continue
result['forecasts'][date] = dict()
result['forecasts'][date]['feelsLikeTemperature'] = check_entity(forecast, 'utci')
result['forecasts'][date]['issued'] = datetime.strptime(forecast['dataUpdate'], '%Y-%m-%dT%H:%M:%S')
result['forecasts'][date]['period'] = forecast['idPeriodo']
result['forecasts'][date]['precipitationProbability'] = check_entity(forecast, 'probabilidadePrecipita')
result['forecasts'][date]['relativeHumidity'] = check_entity(forecast, 'hR')
result['forecasts'][date]['temperature'] = check_entity(forecast, 'tMed')
result['forecasts'][date]['tMax'] = check_entity(forecast, 'tMax')
result['forecasts'][date]['tMin'] = check_entity(forecast, 'tMin')
result['forecasts'][date]['weatherType'] = check_entity(forecast, 'idTipoTempo')
result['forecasts'][date]['windDirection'] = check_entity(forecast, 'ddVento')
result['forecasts'][date]['windSpeed'] = check_entity(forecast, 'ffVento')
return result
def log_level_to_int(log_level_string):
if log_level_string not in log_levels:
message = 'invalid choice: {0} (choose from {1})'.format(log_level_string, log_levels)
raise ArgumentTypeError(message)
return getattr(logging, log_level_string, logging.ERROR)
async def post(body):
logger.debug('Posting data to Orion started')
tasks = list()
headers = {
'Content-Type': 'application/json'
}
if service:
headers['FIWARE-SERVICE'] = service
if path:
headers['FIWARE-SERVICEPATH'] = path
sem = Semaphore(limit_target)
# splitting list to list of lists to fit into limits
block = 0
items = 0
body_divided = dict()
body_divided[0] = list()
while True:
if len(body) > 0:
if items < limit_entities:
body_divided[block].append(body.pop())
items += 1
else:
items = 0
block += 1
body_divided[block] = list()
else:
break
async with ClientSession() as session:
for item in body_divided:
task = ensure_future(post_bounded(body_divided[item], headers, sem, session))
tasks.append(task)
response = await gather(*tasks)
response = list(set(response))
if True in response:
response.remove(True)
for item in response:
logger.error('Posting data to Orion failed due to the %s', item)
logger.debug('Posting data to Orion ended')
async def post_bounded(item, headers, sem, session):
async with sem:
return await post_one(item, headers, session)
async def post_one(item, headers, session):
payload = {
'actionType': 'APPEND',
'entities': item
}
payload = dumps(payload)
url = orion + '/v2/op/update'
try:
async with session.post(url, headers=headers, data=payload) as response:
status = response.status
except ClientConnectorError:
return 'connection problem'
except ToE:
return 'timeout problem'
if status not in http_ok:
return 'response code ' + str(status)
return True
async def prepare_schema(source):
logger.debug('Schema preparation started')
tasks = list()
for item in source:
task = ensure_future(prepare_schema_one(item))
tasks.append(task)
result = await gather(*tasks)
logger.debug('Schema preparation ended')
return [j for i in result for j in i]
async def prepare_schema_one(source):
result = list()
id_local = source['id']
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
retrieved = source['retrieved'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
for date in source['forecasts']:
item = deepcopy(template)
forecast = source['forecasts'][date]
issued = forecast['issued'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
forecast_date = datetime.strptime(date, '%Y-%m-%dT00:00:00')
valid_from = forecast_date.replace(tzinfo=tz)
valid_to = valid_from + timedelta(hours=24)
valid_from_iso = valid_from.isoformat().replace('+00:00', 'Z')
valid_from_short = valid_from.strftime('%H:%M:%S')
valid_to_iso = valid_to.isoformat().replace('+00:00', 'Z')
valid_to_short = valid_to.strftime('%H:%M:%S')
if latest:
if date == today:
item['id'] = item['id'] + id_local + '_today_' + valid_from_short + '_' + valid_to_short
if date == tomorrow:
item['id'] = item['id'] + id_local + '_tomorrow_' + valid_from_short + '_' + valid_to_short
else:
item['id'] = item['id'] + id_local + '_' + valid_from_iso + '_' + valid_to_iso
item['address']['value']['addressLocality'] = stations[id_local]['addressLocality']
item['address']['value']['postalCode'] = stations[id_local]['postalCode']
item['dateIssued']['value'] = issued
item['dateRetrieved']['value'] = retrieved
if 'tMax' in forecast:
item['dayMaximum']['value']['temperature'] = float(forecast['tMax'])
else:
del item['dayMaximum']
if 'tMin' in forecast:
item['dayMinimum']['value']['temperature'] = float(forecast['tMin'])
else:
del item['dayMinimum']
if forecast['feelsLikeTemperature'] is not None:
item['feelsLikeTemperature']['value'] = float(forecast['feelsLikeTemperature'])
else:
del item['feelsLikeTemperature']
if forecast['precipitationProbability'] is not None:
item['precipitationProbability']['value'] = float(forecast['precipitationProbability'] / 100)
else:
del item['precipitationProbability']
if forecast['relativeHumidity'] is not None:
item['relativeHumidity']['value'] = float(forecast['relativeHumidity'])
else:
del item['relativeHumidity']
if forecast['temperature'] is not None:
item['temperature']['value'] = float(forecast['temperature'])
else:
del item['temperature']
item['validFrom']['value'] = valid_from_iso
item['validTo']['value'] = valid_to_iso
item['validity']['value'] = valid_from_iso + '/' + valid_to_iso
if forecast['weatherType'] is not None:
item['weatherType']['value'] = decode_weather_type(forecast['weatherType'])
if item['weatherType']['value'] is None:
del item['weatherType']
if forecast['windDirection'] is not None:
item['windDirection']['value'] = decode_wind_direction(forecast['windDirection'])
if item['windDirection']['value'] is None:
del item['windDirection']
if forecast['windSpeed'] is not None:
item['windSpeed']['value'] = round(float(forecast['windSpeed']) * 0.28, 2)
else:
del item['windSpeed']
result.append(item)
return result
def reply_status():
logger.info('Orion: %s', orion)
logger.info('FIWARE Service: %s', service)
logger.info('FIWARE Service-Path: %s', path)
logger.info('Timeout: %s', str(timeout))
logger.info('Stations: %s', str(len(stations)))
logger.info('Latest: %s', str(latest))
logger.info('Limit_source: %s', str(limit_source))
logger.info('limit_target: %s', str(limit_target))
logger.info('Log level: %s', args.log_level)
logger.info('Started')
def sanitize(str_in):
return sub(r"[<(>)\"\'=;-]", "", str_in)
def setup_logger():
local_logger = logging.getLogger('root')
local_logger.setLevel(log_level_to_int(args.log_level))
handler = logging.StreamHandler(stdout)
handler.setLevel(log_level_to_int(args.log_level))
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%dT%H:%M:%SZ')
handler.setFormatter(formatter)
local_logger.addHandler(handler)
local_logger_req = logging.getLogger('requests')
local_logger_req.setLevel(logging.WARNING)
return local_logger, local_logger_req
def setup_stations(stations_limit):
result = dict()
limit_on = False
limit_off = False
resp = None
if 'include' in stations_limit:
limit_on = True
if 'exclude' in stations_limit:
limit_off = True
try:
resp = get(url_stations)
except exceptions.ConnectionError:
exit(1)
if resp.status_code not in http_ok:
logger.error('Collecting the list of stations from IPMA failed due to the return code %s', resp.status_code)
exit(1)
content = loads(resp.text)
for station in content:
station_code = str(station['globalIdLocal'])
if limit_on:
if station_code not in stations_limit['include']:
continue
if limit_off:
if station_code in stations_limit['exclude']:
continue
result[station_code] = dict()
result[station_code]['postalCode'] = station_code
result[station_code]['addressLocality'] = sanitize(station['local'])
result[station_code]['url'] = url_observation.format(station_code)
if station_code in tz_azot_codes:
result[station_code]['timezone'] = tz_azot
else:
result[station_code]['timezone'] = tz_wet
if limit_on:
if len(result) != len(stations_limit['include']):
logger.error('Errors in the list of stations detected')
exit(1)
return result
def setup_stations_config(f):
local_stations = dict()
if f:
try:
with open(f, 'r', encoding='utf8') as f:
content = f.read()
config = sub(r'-.*\n?', setup_config_re, content)
f.close()
source = load(config)
if 'exclude' in source and 'include' in source:
logging.error('Config file is empty or wrong')
exit(1)
if 'exclude' in source:
local_stations['exclude'] = list()
for item in source['exclude']:
local_stations['exclude'].append(item)
if 'include' in source:
local_stations['include'] = list()
for item in source['include']:
local_stations['include'].append(item)
except TypeError:
logging.error('Config file is empty or wrong')
exit(1)
except FileNotFoundError:
logging.error('Config file not found')
exit(1)
return local_stations
def setup_config_re(station):
fix = sub('-', '', station.group()).strip()
return "- '{}'\n".format(fix)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--config',
dest='config',
help='YAML file with list of stations to be collected or excluded from collecting')
parser.add_argument('--latest',
action='store_true',
default=default_latest,
dest='latest',
help='Collect only latest forecast')
parser.add_argument('--limit-entities',
default=default_limit_entities,
dest='limit_entities',
help='Limit amount of entities per 1 request to orion')
parser.add_argument('--limit-source',
default=default_limit_source,
dest='limit_source',
help='Limit amount of parallel requests to IPMA')
parser.add_argument('--limit-target',
default=default_limit_target,
dest='limit_target',
help='Limit amount of parallel requests to Orion')
parser.add_argument('--log-level',
default=default_log_level,
dest='log_level',
help='Set the logging output level. {0}'.format(log_levels),
nargs='?')
parser.add_argument('--orion',
action='store',
default=default_orion,
dest='orion',
help='Orion Context Broker endpoint')
parser.add_argument('--path',
action='store',
dest='path',
help='FIWARE Service Path')
parser.add_argument('--service',
action='store',
dest="service",
help='FIWARE Service')
parser.add_argument('--timeout',
action='store',
default=default_timeout,
dest='timeout',
help='Run as a service')
args = parser.parse_args()
latest = args.latest
limit_entities = int(args.limit_entities)
limit_source = int(args.limit_source)
limit_target = int(args.limit_target)
orion = args.orion
timeout = int(args.timeout)
if 'path' in args:
path = args.path
if 'service' in args:
service = args.service
logger, logger_req = setup_logger()
set_event_loop_policy(EventLoopPolicy())
res = setup_stations_config(args.config)
stations = setup_stations(res)
reply_status()
while True:
res = run(collect())
if res:
res = run(prepare_schema(res))
run(post(res))
if timeout == -1:
break
else:
logger.debug('Sleeping for the %s seconds', timeout)
sleep(timeout)
logger.info('Ended')
exit(0)
|
from aiohttp import ClientSession
from aiowing import settings
async def test_unauthenticated_records(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.get(test_app.router['admin_records'].url(),
allow_redirects=False)
assert resp.headers.get('Location') == test_app.router['admin_login'].url()
await resp.release()
async def test_unauthenticated_login(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.post(test_app.router['admin_login'].url(),
data={'email': settings.SUPERUSER_EMAIL,
'password': settings.SUPERUSER_PASSWORD},
allow_redirects=False)
assert resp.headers.get('Location') == \
test_app.router['admin_records'].url()
await resp.release()
async def test_unauthenticated_logout(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.get(test_app.router['admin_logout'].url(),
allow_redirects=False)
assert resp.headers.get('Location') == test_app.router['admin_login'].url()
await resp.release()
async def test_authenticated_records(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.post(test_app.router['admin_login'].url(),
data={'email': settings.SUPERUSER_EMAIL,
'password': settings.SUPERUSER_PASSWORD},
allow_redirects=False)
resp = await cli.get(test_app.router['admin_records'].url(),
allow_redirects=False)
assert resp.status == 200
await resp.release()
async def test_authenticated_login(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.post(test_app.router['admin_login'].url(),
data={'email': settings.SUPERUSER_EMAIL,
'password': settings.SUPERUSER_PASSWORD},
allow_redirects=False)
resp = await cli.get(test_app.router['admin_login'].url(),
allow_redirects=False)
assert resp.headers.get('Location') == \
test_app.router['admin_records'].url()
await resp.release()
async def test_authenticated_logout(test_app, test_client):
cli = await test_client(test_app)
resp = await cli.post(test_app.router['admin_login'].url(),
data={'email': settings.SUPERUSER_EMAIL,
'password': settings.SUPERUSER_PASSWORD},
allow_redirects=False)
resp = await cli.get(test_app.router['admin_logout'].url(),
allow_redirects=False)
assert resp.headers.get('Location') == test_app.router['admin_login'].url()
await resp.release()
|
class Slot(object):
"""
To use comb, you should create a python module file. we named *slot*.
A legal slot must be named 'Slot' in your module file and it must be at least contain four method:
* `initialize`
initial resource, e.g: database handle
* `__enter__`
get next data to do,you can fetch one or more data.
* `slot`
user custom code
* `__exit__`
when slot finished, call this method
"""
def __init__(self, combd):
"""Don't override this method unless what you're doing.
"""
self.threads_num = combd.threads_num
self.sleep = combd.sleep
self.sleep_max = combd.sleep_max
self.debug = combd.debug
self.combd = combd
self.initialize()
def initialize(self):
"""Hook for subclass initialization.
This block is execute before thread initial
Example::
class UserSlot(Slot):
def initialize(self):
self.threads_num = 10
def slot(self, result):
...
"""
pass
def __enter__(self):
"""You **MUST** return False when no data to do.
The return value will be used in `Slot.slot`
"""
print("You should override __enter__ method by subclass")
return False
def __exit__(self, exc_type, exc_val, exc_tb):
"""When slot done, will call this method.
"""
print("You should override __exit__ method by subclass")
pass
def slot(self, msg):
"""
Add your custom code at here.
For example, look at:
* `comb.demo.list`
* `comb.demo.mongo`
* `comb.demo.redis`
"""
pass
# @staticmethod
# def options():
# """
# replace this method if you want add user options
# :return:
# """
# return ()
# pass
|
from biohub.core.plugins import PluginConfig
class TestConfig(PluginConfig):
name = 'tests.core.plugins.test'
title = ''
author = ''
description = ''
|
from flask.ext.wtf import Form
from wtforms import TextField
from wtforms.validators import Required
class NameForm(Form):
name = TextField('What is your name?', validators = [ Required() ])
|
from django.test import TestCase
from django.core.urlresolvers import reverse
class TestHomePage(TestCase):
def test_uses_index_template(self):
response = self.client.get(reverse("home"))
self.assertTemplateUsed(response, "home/index.html")
def test_uses_base_template(self):
response = self.client.get(reverse("home"))
self.assertTemplateUsed(response, "base.html")
|
import os
import sys
import warnings
from setuptools import setup
version_contents = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "shippo", "version.py"), encoding="utf-8") as f:
exec(f.read(), version_contents)
setup(
name='shippo',
version=version_contents['VERSION'],
description='Shipping API Python library (USPS, FedEx, UPS and more)',
author='Shippo',
author_email='support@goshippo.com',
url='https://goshippo.com/',
packages=['shippo', 'shippo.test', 'shippo.test.integration'],
package_data={'shippo': ['../VERSION']},
install_requires=[
'requests >= 2.21.0, <= 2.27.1',
'simplejson >= 3.16.0, <= 3.17.2',
],
test_suite='shippo.test.all',
tests_require=['unittest2', 'mock', 'vcrpy'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
|
from __future__ import absolute_import
from functools import partial
from pkg_resources import Requirement, resource_filename
import re
from mako.template import Template
from sqlalchemy import MetaData, select, create_engine, text
from sqlalchemy.exc import ArgumentError
from fixturegen.exc import (
NoSuchTable,
WrongDSN,
WrongNamingColumn,
NonValidRowClassName
)
_FIXTURE_TEMPLATE = 'fixturegen/templates/fixture.mako'
valid_class_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
def sqlalchemy_data(table, dsn, limit=None, where=None, order_by=None):
try:
engine = create_engine(dsn)
except ArgumentError:
raise WrongDSN
metadata = MetaData()
metadata.reflect(bind=engine)
try:
mapped_table = metadata.tables[table]
except KeyError:
raise NoSuchTable
query = select(mapped_table.columns)
if where:
query = query.where(whereclause=text(where))
if order_by:
query = query.order_by(text(order_by))
if limit:
query = query.limit(limit)
columns = [column.name for column in mapped_table.columns]
rows = engine.execute(query).fetchall()
return table, tuple(columns), tuple(rows)
def get_row_class_name(row, table_name, naming_column_ids):
class_name = '{0}_{1}'.format(table_name, '_'
.join((str(row[i]).replace('-', '_')
for i in naming_column_ids)))
if valid_class_name_re.match(class_name):
return class_name
raise NonValidRowClassName(class_name)
def generate(table, columns, rows, with_import=True,
fixture_class_name=None, row_naming_columns=None):
if not row_naming_columns:
try:
naming_column_ids = [columns.index('id')]
except ValueError:
raise WrongNamingColumn()
else:
try:
naming_column_ids = [columns.index(column_name)
for column_name in row_naming_columns]
except ValueError:
raise WrongNamingColumn()
row_class_name = partial(get_row_class_name, table_name=table,
naming_column_ids=naming_column_ids)
if not fixture_class_name:
camel_case_table = table.replace('_', ' ').title().replace(' ', '')
fixture_class_name = camel_case_table + 'Data'
filename = resource_filename(Requirement.parse('fixturegen'),
_FIXTURE_TEMPLATE)
template = Template(filename=filename)
return template.render(table=table, columns=columns,
rows=rows, with_import=with_import,
fixture_class_name=fixture_class_name,
row_class_name=row_class_name)
|
"""
Django settings for gamesapi project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '#gskn(2pi4i^9-a9%xt!4_9n_##a-xluh%d*+v5v-76f@v3!$z'
DEBUG = False
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'games.apps.GamesConfig',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'gamesapi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gamesapi.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'games.pagination.LimitOffsetPaginationWithMaxLimit',
'PAGE_SIZE': 5,
'DEFAULT_FILTER_BACKENDS': (
'rest_framework.filters.DjangoFilterBackend',
'rest_framework.filters.SearchFilter',
'rest_framework.filters.OrderingFilter',
),
# IMPORTANT: this app must be deployed on HTTPS, since the authentication below is basic + session.
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.AnonRateThrottle',
'rest_framework.throttling.UserRateThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'anon': '5/hour',
'user': '20/hour',
'game-categories': '30/hour' # this dict key defines a "throttle scope", that must be referenced
# on class-based-view on games/views.py.
}
}
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-coverage',
'--cover-erase', # erase results from the previous run
'--cover-inclusive', # includes all files under the working directory (raise te coverage report accuracy)
'--cover-package=games', # the apps we want to cover
]
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, 'games.log'),
'maxBytes': 50000,
'backupCount': 2,
'formatter': 'standard',
},
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'standard'
},
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
'level': 'WARN',
},
'django.db.backends': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'games': { # here we must create a logger to each django app.
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
}
}
|
import time
from typing import List, Optional
from utils import tasks
from zirc.event import Event
from utils.database import Database
from zirc.wrappers import connection_wrapper
def chunks(l: List, n: int):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def set_mode(irc: connection_wrapper, channel: str, users: List[str], mode: str):
for block in chunks(users, 4):
modes = "".join(mode[1:]) * len(block)
irc.mode(channel, " ".join(block), mode[0] + modes)
def get_users(args: str):
if args.find(",") != -1:
pos = args.find(",")
users_str = args[pos:].strip()
if args[pos + 1] != " ":
users = users_str[1:].split(",")
else:
users = users_str[2:].split(", ")
args = args[:pos].strip().split(" ")
users.append(args[-1])
else:
args_list = args.split(" ")
if len(args_list) == 1:
users = args_list[0]
elif len(args_list) >= 2:
users = args_list[:-1]
return users
def get_user_host(userdb: Database, channel: str, nick: str):
return userdb.get_user_host(channel, nick)
def get_info_tuple(event: Event, args: List[str], userdb: Optional[Database]=None):
if args[0].startswith("#"):
channel = args[0]
str_args = " ".join(args[1:])
del args[0]
else:
channel = event.target
str_args = " ".join(args)
if str_args.find(",") != -1:
users = get_users(str_args)
else:
users = args[-1:]
if " ".join(args[:-len(users)]) != '':
message = " ".join(args[:-len(users)])
else:
message = f"{event.source.nick}"
for (i, v) in enumerate(users):
if not v.find("!") != -1 and userdb is not None:
users[i] = get_user_host(userdb, event.target, v)
return channel, users, message
def unban_after_duration(irc: connection_wrapper, users: List[str], chan: str, duration: int):
duration += int(time.time())
def func(irc: connection_wrapper, users: List[str], chan: str):
for i in users:
irc.unban(chan, i)
tasks.run_at(duration, func, (irc, users, chan))
def strip_colours(s: str):
import re
ccodes = ['\x0f', '\x16', '\x1d', '\x1f', '\x02',
'\x03([1-9][0-6]?)?,?([1-9][0-6]?)?']
for cc in ccodes:
s = re.sub(cc, '', s)
return s
|
from base64 import b64decode, b64encode
from hashlib import sha256
from Crypto import Random
from Crypto.Cipher import AES
from frontstage import app
class Cryptographer:
"""Manage the encryption and decryption of random byte strings"""
def __init__(self):
"""
Set up the encryption key, this will come from an .ini file or from
an environment variable. Change the block size to suit the data supplied
or performance required.
:param key: The encryption key to use when encrypting the data
"""
key = app.config["SECRET_KEY"]
self._key = sha256(key.encode("utf-8")).digest()
def encrypt(self, raw_text):
"""
Encrypt the supplied text
:param raw_text: The data to encrypt, must be a string of type byte
:return: The encrypted text
"""
raw_text = self.pad(raw_text)
init_vector = Random.new().read(AES.block_size)
ons_cipher = AES.new(self._key, AES.MODE_CBC, init_vector)
return b64encode(init_vector + ons_cipher.encrypt(raw_text))
def decrypt(self, encrypted_text):
"""
Decrypt the supplied text
:param encrypted_text: The data to decrypt, must be a string of type byte
:return: The unencrypted text
"""
encrypted_text = b64decode(encrypted_text)
init_vector = encrypted_text[:16]
ons_cipher = AES.new(self._key, AES.MODE_CBC, init_vector)
return self.unpad(ons_cipher.decrypt(encrypted_text[16:]))
def pad(self, data):
"""
Pad the data out to the selected block size.
:param data: The data were trying to encrypt
:return: The data padded out to our given block size
"""
vector = AES.block_size - len(data) % AES.block_size
return data + ((bytes([vector])) * vector)
def unpad(self, data):
"""
Un-pad the selected data.
:param data: Our padded data
:return: The data 'un'padded
"""
return data[0 : -data[-1]]
|
'''
Created on 22/ago/2011
@author: norby
'''
from core.moduleexception import ModuleException, ProbeException, ExecutionException, ProbeSucceed
from core.moduleguess import ModuleGuess
from core.argparse import ArgumentParser, StoredNamespace
from core.argparse import SUPPRESS
from ast import literal_eval
import random
MSG_SH_INTERPRETER_SUCCEED = 'Shell interpreter load succeed'
WARN_SH_INTERPRETER_FAIL = 'Shell interpreters load failed'
class Sh(ModuleGuess):
'''Execute system shell command'''
def _set_vectors(self):
self.vectors.add_vector("system", 'shell.php', "@system('$cmd $no_stderr');")
self.vectors.add_vector("passthru" , 'shell.php', "@passthru('$cmd $no_stderr');")
self.vectors.add_vector("shell_exec", 'shell.php', "echo @shell_exec('$cmd $no_stderr');")
self.vectors.add_vector("exec", 'shell.php', "@exec('$cmd $no_stderr', $r);echo(join(\"\\n\",$r));")
#self.vectors.add_vector("pcntl", 'shell.php', ' $p = pcntl_fork(); if(!$p) {{ pcntl_exec( "/bin/sh", Array("-c", "$cmd")); }} else {{ pcntl_waitpid($p,$status); }}'),
self.vectors.add_vector("popen", 'shell.php', "$h = popen('$cmd','r'); while(!feof($h)) echo(fread($h,4096)); pclose($h);")
self.vectors.add_vector("python_eval", 'shell.php', "python_eval('import os; os.system('$cmd$no_stderr');")
self.vectors.add_vector("perl_system", 'shell.php', "$perl = new perl(); $r = @perl->system('$cmd$no_stderr'); echo $r;")
self.vectors.add_vector("proc_open", 'shell.php', """$p = array(array('pipe', 'r'), array('pipe', 'w'), array('pipe', 'w'));
$h = proc_open('$cmd', $p, $pipes); while(!feof($pipes[1])) echo(fread($pipes[1],4096));
while(!feof($pipes[2])) echo(fread($pipes[2],4096)); fclose($pipes[0]); fclose($pipes[1]);
fclose($pipes[2]); proc_close($h);""")
def _set_args(self):
self.argparser.add_argument('cmd', help='Shell command', nargs='+')
self.argparser.add_argument('-no-stderr', help='Suppress error output', action='store_false')
self.argparser.add_argument('-vector', choices = self.vectors.keys())
self.argparser.add_argument('-just-probe', help=SUPPRESS, action='store_true')
def _init_stored_args(self):
self.stored_args_namespace = StoredNamespace()
setattr(self.stored_args_namespace, 'vector', None )
def _execute_vector(self):
if not getattr(self.stored_args_namespace, 'vector') or self.args['just_probe']:
self.__slacky_probe()
# Execute if is current vector is saved or choosen
if self.current_vector.name in (getattr(self.stored_args_namespace, 'vector'), self.args['vector']):
self._result = self.current_vector.execute( self.formatted_args)
def _prepare_vector(self):
# Format cmd
self.formatted_args['cmd'] = ' '.join(self.args['cmd']).replace( "'", "\\'" )
# Format stderr
if any('$no_stderr' in p for p in self.current_vector.payloads):
if self.args['no_stderr']:
self.formatted_args['no_stderr'] = '2>&1'
else:
self.formatted_args['no_stderr'] = ''
def __slacky_probe(self):
rand = str(random.randint( 11111, 99999 ))
slacky_formats = self.formatted_args.copy()
slacky_formats['cmd'] = 'echo %s' % (rand)
if self.current_vector.execute( slacky_formats) == rand:
setattr(self.stored_args_namespace, 'vector', self.current_vector.name)
# Set as best interpreter
#self.modhandler.interpreter = self.name
if self.args['just_probe']:
self._result = True
raise ProbeSucceed(self.name, MSG_SH_INTERPRETER_SUCCEED)
return
raise ModuleException(self.name, WARN_SH_INTERPRETER_FAIL)
|
from django.db import migrations
def update_domain_forward(apps, schema_editor):
"""Set site domain and name."""
Domain = apps.get_model("domains", "Domain")
Domain.objects.update_or_create(pk=1, name="fedrowanie.siecobywatelska.pl")
class Migration(migrations.Migration):
dependencies = [("domains", "0001_initial")]
operations = [migrations.RunPython(update_domain_forward)]
|
from django.http import HttpResponse
from django.shortcuts import render
from django.views import generic
import api.soql
import json
from api.soql import *
def indexView(request):
context = {
"vehicleAgencies": getUniqueValuesWithAggregate("gayt-taic", "agency", "max(postal_code)"),
"vehicleFuelTypes": getUniqueValues("gayt-taic", "fuel_type"),
"buildingAgencies": getUniqueValues("24pi-kxxa", "department_name")
}
return render(request,'TeamAqua/index.html', context=context)
def getUniqueValues(resource, column):
query = (
api.soql.SoQL(resource)
.select([column])
.groupBy([column])
.orderBy({column: "ASC"})
)
jsonString = query.execute()
return json.loads(jsonString)
def getUniqueValuesWithAggregate(resource, column, aggregate):
query = (
api.soql.SoQL(resource)
.select([column, aggregate])
.groupBy([column])
.orderBy({column: "ASC"})
)
jsonString = query.execute()
return json.loads(jsonString)
|
from __future__ import absolute_import, print_function, unicode_literals
from ..actions.install import install
from ._base import BaseCommand
from .options import dev, no_check, no_clean
class Command(BaseCommand):
name = "install"
description = "Generate Pipfile.lock to synchronize the environment."
arguments = [no_check, dev, no_clean]
def run(self, options):
return install(project=options.project, check=options.check, dev=options.dev,
clean=options.clean)
if __name__ == "__main__":
Command.run_parser()
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
from netutil import *
def run_bind_test(tmpdir, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
nodes = start_nodes(1, tmpdir, [base_args + binds], connect_to)
try:
pid = dogecoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(nodes)
wait_dogecoinds()
def run_allowip_test(tmpdir, allow_ips, rpchost, rpcport):
'''
Start a node with rpcwallow IP, and request getinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
nodes = start_nodes(1, tmpdir, [base_args])
try:
# connect to node through non-loopback interface
url = "http://wowsuchtest:3kt4yEUdDJ4YGzsGNADvjYwubwaFhEEYjotPJDU2XMgG@%s:%d" % (rpchost, rpcport,)
node = AuthServiceProxy(url)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(nodes)
wait_dogecoinds()
def run_test(tmpdir):
assert(sys.platform == 'linux2') # due to OS-specific network stats queries, this test works only on Linux
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
run_bind_test(tmpdir, None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
run_bind_test(tmpdir, ['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
run_bind_test(tmpdir, [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
run_allowip_test(tmpdir, [non_loopback_ip], non_loopback_ip, defaultport)
try:
run_allowip_test(tmpdir, ['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except ValueError:
pass
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave dogecoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing dogecoind/dogecoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
run_test(options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
wait_dogecoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
|
import copy
class Solution:
# @param strs: A list of strings
# @return: A list of strings
def anagrams(self, strs):
# write your code here
str1=copy.deepcopy(strs)
def hashLize(s):
dicts1= dict()
for i in range(26):
dicts1[chr(i+ord("a"))]=0
for j in s:
if j in dicts1.keys():
dicts1[j]+=1
return dicts1
def sortLize(s):
s1=list(s)
s1.sort()
return "".join(s1)
check_dict=dict()
for i in range(len(strs)):
str_s1=sortLize(strs[i])
if str_s1 in check_dict.keys():
check_dict[str_s1].append(strs[i])
else:
check_dict[str_s1]=[]
check_dict[str_s1].append(strs[i])
str_rt=[]
for i in check_dict.keys():
if (len(check_dict[i]) > 1):
str_rt.extend(check_dict[i])
return str_rt
#Total Runtime: 835 ms
# for i in range(len(strs)):
# str1[i]=hashLize(strs[i])
# str_rt=[]
# flag = [0 for i in range(len(strs))]
# for i in range(len(strs)):
# if flag[i]:
# continue
# for j in range(i+1,len(strs)):
# if i==j:
# continue
# if flag[j]:
# continue
# if str1[i]==str1[j]:
# if flag[i]==0:
# str_rt.append(strs[i])
# flag[i] = 1
# flag[j] = 1
# str_rt.append(strs[j])
|
"""
functionモジュールのparticle_resampling関数をテストする
"""
from functions import particles_resampling
import pfoe
robot1 = pfoe.Robot(sensor=4,choice=3,particle_num=100)
for i in range(100):
robot1.particles.distribution[i] = i % 5
robot1.particles.weight[i] = 1.0 / 100.0
robot1.particles = particles_resampling(robot1.particles,5)
print robot1.particles.weight
print robot1.particles.distribution
for i in range(100):
robot1.particles.distribution[i] = i % 5
if i % 5 == 0:
robot1.particles.weight[i] = 1.0 / 20.0
else:
robot1.particles.weight[i] = 0.0
robot1.particles = particles_resampling(robot1.particles,5)
print robot1.particles.weight
print robot1.particles.distribution
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Course', '0005_coursegroup'),
]
operations = [
migrations.AlterField(
model_name='department',
name='hod',
field=models.ForeignKey(default=False, on_delete=django.db.models.deletion.CASCADE, related_name='head_of_dept', to='Profiler.Faculty'),
),
]
|
"""Package. Manages event queues.
Writing event-driven code
-------------------------
Event-driven procedures should be written as python coroutines (extended generators).
To call the event API, yield an instance of the appropriate command. You can use
sub-procedures - just yield the appropriate generator (a minor nuisance is that you
cannot have such sub-procedure return a value).
Example
-------
.. code:: python
from satori.events import *
def countdown():
queue = QueueId('any string will do')
mapping = yield Map({}, queue)
yield Attach(queue)
yield Send(Event(left=10))
while True:
q, event = yield Receive()
if event.left == 0:
break
event.left -= 1
yield Send(event)
yield Unmap(mapping)
yield Detach(queue)
"""
from .api import Event, MappingId, QueueId
from .protocol import Attach, Detach
from .protocol import Map, Unmap
from .protocol import Send, Receive
from .protocol import KeepAlive, Disconnect, ProtocolError
from .api import Manager
from .master import Master
from .slave import Slave
from .client2 import Client2
from .slave2 import Slave2
__all__ = (
'Event', 'MappingId', 'QueueId',
'Attach', 'Detach',
'Map', 'Unmap',
'Send', 'Receive',
'KeepAlive', 'ProtocolError',
'Master', 'Slave',
)
|
from setuptools import setup
from setuptools import find_packages
setup(name='gym_square',
version='0.0.1',
author='Guillaume de Chambrier',
author_email='chambrierg@gmail.com',
description='A simple square world environment for openai/gym',
packages=find_packages(),
url='https://github.com/gpldecha/gym-square',
license='MIT',
install_requires=['gym']
)
|
"""
Read the list of chimeric interactions and generate a file that can be read
by circos.
"""
import sys
import argparse
from collections import defaultdict
from math import log
import pro_clash
def process_command_line(argv):
"""
Return a 2-tuple: (settings object, args list).
`argv` is a list of arguments, or `None` for ``sys.argv[1:]``.
"""
if argv is None:
argv = sys.argv[1:]
# initialize the parser object, replace the description
parser = argparse.ArgumentParser(
description='Generate circos data file.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'reads_in',
help='An output file of map_chimeric_fragments.py with the chimeric'
' fragments.')
parser.add_argument(
'-r', '--region', type=int, default=200,
help='Split the genome to windows of this size.')
parser.add_argument(
'-c', '--chrn', default='chr',
help='Name of chromosome to plot.')
parser.add_argument(
'-p', '--print_chr', default='ecmain',
help='Name of chromosome in circos.')
parser.add_argument(
'-m', '--min_interactions', type=int, default=100,
help='Minimum number of interactions between two regions to plot.')
settings = parser.parse_args(argv)
return settings
def main(argv=None):
settings = process_command_line(argv)
region_interactions, _, _, _=\
pro_clash.read_reads_table(open(settings.reads_in), settings.region)
both_strs = defaultdict(lambda: defaultdict(int))
for reg1 in region_interactions:
if reg1[2] != settings.chrn:
continue
for reg2 in region_interactions[reg1]:
if reg2[2] != settings.chrn:
continue
both_strs[reg1[0]][reg2[0]] += len(region_interactions[reg1][reg2])
for r1 in both_strs:
for r2 in both_strs[r1]:
if both_strs[r1][r2] > settings.min_interactions:
sys.stdout.write('%s %d %d %s %d %d thickness=%dp\n'%(
settings.print_chr, r1+1, r1+settings.region,
settings.print_chr, r2+1, r2+settings.region,
log(both_strs[r1][r2])/log(10)))
return 0 # success
if __name__ == '__main__':
status = main()
sys.exit(status)
|
from __future__ import unicode_literals
from django.db import models
import datetime
from django.db.models.signals import pre_save
from django.urls import reverse
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from source_utils.starters import CommonInfo, GenericCategory
from versatileimagefield.fields import (
VersatileImageField,
PPOIField
)
def upload_location(instance, filename):
return "%s/%s" %(instance.slug, filename)
ASSESEMENT = (
('units', 'Per unit'),
('square feet', 'Square foot'),
('linear feet', 'Linear foot'),
('square meters', 'Square meter'),
('linear meters', 'Linear meter'),
)
class Base(GenericCategory):
"""
This model represents the general type of product category offered.
"""
class Meta:
verbose_name = _('Product Category')
verbose_name_plural = _('Product Categories')
ordering = ["category"]
def get_success_url(self):
return reverse("product:company_list")
def get_absolute_url(self):
return reverse(
"product:base_product_detail",
kwargs={'slug': self.slug}
)
def pre_save_category(sender, instance, *args, **kwargs):
instance.slug = slugify(instance.category)
pre_save.connect(pre_save_category, sender=Base)
class Product(CommonInfo):
"""
This model describes the specific product related to the category.
"""
base = models.ForeignKey(
Base,
on_delete=models.CASCADE
)
supplier = models.ForeignKey(
'company.Company',
on_delete=models.CASCADE
)
item = models.CharField(
max_length=30,
unique=True
)
admin_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
prep_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
field_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
admin_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
prep_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
field_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
quantity_assesement = models.CharField(
max_length=12,
verbose_name=_("Quantity assesement method"),
choices=ASSESEMENT
)
order_if_below = models.SmallIntegerField()
discontinued = models.DateField(
null=True,
blank=True
)
order_now = models.BooleanField(
default=False
)
units_damaged_or_lost = models.SmallIntegerField(
default=0
)
quantity = models.SmallIntegerField(
"Usable quantity",
default=0,
null=True,
blank=True
)
quantity_called_for = models.SmallIntegerField(
default=0,
null=True,
blank=True
)
image = VersatileImageField(
'Image',
upload_to='images/product/',
null=True, blank=True,
width_field='width',
height_field='height',
ppoi_field='ppoi'
)
height = models.PositiveIntegerField(
'Image Height',
blank=True,
null=True
)
width = models.PositiveIntegerField(
'Image Width',
blank=True,
null=True
)
ppoi = PPOIField(
'Image PPOI'
)
no_longer_available = models.BooleanField(default=False)
class Meta:
ordering= ['item']
def __str__(self):
return self.item
def get_time(self):
return self.admin_time + self.prep_time + self.field_time
def get_cost(self):
return self.admin_material + self.prep_material + self.field_material
def get_usable_quantity(self):
return self.quantity - self.units_damaged_or_lost - self.quantity_called_for
def get_success_url(self):
return reverse("product:category_item_list", kwargs={'slug': self.base.slug})
def get_absolute_url(self):
return reverse("product:item_detail", kwargs={'slug': self.slug})
def pre_save_product(sender, instance, *args, **kwargs):
if not instance.no_longer_available:
instance.discontinued = None
elif instance.no_longer_available and instance.discontinued == None:
instance.discontinued = datetime.date.today()
if (
instance.quantity -
instance.units_damaged_or_lost -
instance.quantity_called_for
) < instance.order_if_below:
instance.order_now = True
else:
instance.order_now = False
instance.slug = slugify(instance.item)
pre_save.connect(pre_save_product, sender=Product)
|
import logging
import os
import shlex
import unittest
import sys
from toil.common import toilPackageDirPath
from toil.lib.bioio import getBasicOptionParser, parseSuiteTestOptions
log = logging.getLogger(__name__)
class ToilTest(unittest.TestCase):
"""
A common base class for our tests. Please have every test case directly or indirectly inherit this one.
"""
orig_sys_argv = None
def getScriptPath(self, script_name):
return os.path.join(toilPackageDirPath(), 'utils', script_name + '.py')
@classmethod
def setUpClass(cls):
super(ToilTest, cls).setUpClass()
cls.orig_sys_argv = sys.argv[1:]
sys.argv[1:] = shlex.split(os.environ.get('TOIL_TEST_ARGS', ""))
parser = getBasicOptionParser()
options, args = parseSuiteTestOptions(parser)
sys.argv[1:] = args
@classmethod
def tearDownClass(cls):
sys.argv[1:] = cls.orig_sys_argv
super(ToilTest, cls).tearDownClass()
def setUp(self):
log.info("Setting up %s", self.id())
super(ToilTest, self).setUp()
def tearDown(self):
super(ToilTest, self).tearDown()
log.info("Tearing down down %s", self.id())
|
from setuptools import setup, find_packages
from pip.req import parse_requirements
version = "6.27.20"
requirements = parse_requirements("requirements.txt", session="")
setup(
name='frappe',
version=version,
description='Metadata driven, full-stack web framework',
author='Frappe Technologies',
author_email='info@frappe.io',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link]
)
|
<<<<<<< HEAD
<<<<<<< HEAD
from . import client, rest, session
=======
from . import client, rest, session
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
from . import client, rest, session
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
import os
import subprocess
from pathlib import Path
from time import sleep
PACKAGES = Path('packages')
class Module:
def __init__(self, name, path=None, files=None, dependencies=None):
self.name = name
if path is None:
path = PACKAGES / name
self.path = path
self.files = files or ["src/", "style/"]
self.dependencies = dependencies or []
self.old_sum = 0
#self.check_dir()
def check_dir(self):
"""Check if a file has changed in the package"""
time_list = []
for file in self.files:
file_list = []
file_path = self.path / Path(file)
if not file.endswith("/"):
file_list = [file_path]
else:
for root, _, files in os.walk(file_path):
root = Path(root)
file_list = [root / f for f in files]
time_list += [os.stat(f).st_mtime for f in file_list]
new_sum = sum(time_list)
result = new_sum != self.old_sum
self.old_sum = new_sum
return result
def run(self):
print("Building", self.name)
process = subprocess.Popen(
"npm run build",
shell=True,
cwd=self.path,
)
status = process.wait()
if status:
raise Exception("NPM run failed")
def check(self, run=True, visited={}):
"""Check if the module or its dependencies has changed"""
if self in visited:
return visited[self]
visited[self] = True
invalid = False
for dependency in self.dependencies:
if not dependency.check(run, visited):
invalid = True
invalid |= self.check_dir()
if run and invalid:
visited[self] = False
self.run()
return not invalid
def __hash__(self):
return hash(self.path)
def __repr__(self):
return "Module({})".format(self.name)
class NoFileModule(Module):
def check_dir(self):
return False
def run(self):
pass
utils = Module("utils")
history = Module("history", dependencies=[utils])
trial = Module("trial", dependencies=[utils])
nowvis = Module("nowvis", dependencies=[history, trial])
nbextension = Module("nbextension", dependencies=[history, trial])
ALL = NoFileModule("ALL", dependencies=[nowvis, nbextension])
print("Monitoring packages...")
while True:
visited = {}
try:
ALL.check(visited=visited)
except Exception as e:
print("Failed: {}".format(e))
sleep(1.0)
|
"""pandoc-fignos: a pandoc filter that inserts figure nos. and refs."""
import re
import functools
import itertools
import io
import sys
import pandocfilters
from pandocfilters import stringify, walk
from pandocfilters import RawInline, Str, Space, Para, Plain, Cite, elt
from pandocattributes import PandocAttributes
Image = elt('Image', 2) # Pandoc < 1.16
AttrImage = elt('Image', 3) # Pandoc >= 1.16
LABEL_PATTERN = re.compile(r'(fig:[\w/-]*)(.*)')
REF_PATTERN = re.compile(r'@(fig:[\w/-]+)')
PY3 = sys.version_info > (3,)
if PY3: # Force utf-8 decoding (decoding of input streams is automatic in py3)
STDIN = io.TextIOWrapper(sys.stdin.buffer, 'utf-8', 'strict')
STDOUT = io.TextIOWrapper(sys.stdout.buffer, 'utf-8', 'strict')
else: # No decoding; utf-8-encoded strings in means the same out
STDIN = sys.stdin
STDOUT = sys.stdout
references = {} # Global references tracker
def is_attrimage(key, value):
"""True if this is an attributed image; False otherwise."""
try:
if key == 'Para' and value[0]['t'] == 'Image':
# Old pandoc < 1.16
if len(value[0]['c']) == 2:
s = stringify(value[1:]).strip()
if s.startswith('{') and s.endswith('}'):
return True
else:
return False
# New pandoc >= 1.16
else:
assert len(value[0]['c']) == 3
return True # Pandoc >= 1.16 has image attributes by default
# pylint: disable=bare-except
except:
return False
def parse_attrimage(value):
"""Parses an attributed image."""
if len(value[0]['c']) == 2: # Old pandoc < 1.16
attrs, (caption, target) = None, value[0]['c']
s = stringify(value[1:]).strip() # The attribute string
# Extract label from attributes (label, classes, kvs)
label = PandocAttributes(s, 'markdown').to_pandoc()[0]
if label == 'fig:': # Make up a unique description
label = label + '__'+str(hash(target[0]))+'__'
return attrs, caption, target, label
else: # New pandoc >= 1.16
assert len(value[0]['c']) == 3
attrs, caption, target = value[0]['c']
s = stringify(value[1:]).strip() # The attribute string
# Extract label from attributes
label = attrs[0]
if label == 'fig:': # Make up a unique description
label = label + '__'+str(hash(target[0]))+'__'
return attrs, caption, target, label
def is_ref(key, value):
"""True if this is a figure reference; False otherwise."""
return key == 'Cite' and REF_PATTERN.match(value[1][0]['c']) and \
parse_ref(value)[1] in references
def parse_ref(value):
"""Parses a figure reference."""
prefix = value[0][0]['citationPrefix']
label = REF_PATTERN.match(value[1][0]['c']).groups()[0]
suffix = value[0][0]['citationSuffix']
return prefix, label, suffix
def ast(string):
"""Returns an AST representation of the string."""
toks = [Str(tok) for tok in string.split()]
spaces = [Space()]*len(toks)
ret = list(itertools.chain(*zip(toks, spaces)))
if string[0] == ' ':
ret = [Space()] + ret
return ret if string[-1] == ' ' else ret[:-1]
def is_broken_ref(key1, value1, key2, value2):
"""True if this is a broken link; False otherwise."""
try: # Pandoc >= 1.16
return key1 == 'Link' and value1[1][0]['t'] == 'Str' and \
value1[1][0]['c'].endswith('{@fig') \
and key2 == 'Str' and '}' in value2
except TypeError: # Pandoc < 1.16
return key1 == 'Link' and value1[0][0]['t'] == 'Str' and \
value1[0][0]['c'].endswith('{@fig') \
and key2 == 'Str' and '}' in value2
def repair_broken_refs(value):
"""Repairs references broken by pandoc's --autolink_bare_uris."""
# autolink_bare_uris splits {@fig:label} at the ':' and treats
# the first half as if it is a mailto url and the second half as a string.
# Let's replace this mess with Cite and Str elements that we normally
# get.
flag = False
for i in range(len(value)-1):
if value[i] == None:
continue
if is_broken_ref(value[i]['t'], value[i]['c'],
value[i+1]['t'], value[i+1]['c']):
flag = True # Found broken reference
try: # Pandoc >= 1.16
s1 = value[i]['c'][1][0]['c'] # Get the first half of the ref
except TypeError: # Pandoc < 1.16
s1 = value[i]['c'][0][0]['c'] # Get the first half of the ref
s2 = value[i+1]['c'] # Get the second half of the ref
ref = '@fig' + s2[:s2.index('}')] # Form the reference
prefix = s1[:s1.index('{@fig')] # Get the prefix
suffix = s2[s2.index('}')+1:] # Get the suffix
# We need to be careful with the prefix string because it might be
# part of another broken reference. Simply put it back into the
# stream and repeat the preprocess() call.
if i > 0 and value[i-1]['t'] == 'Str':
value[i-1]['c'] = value[i-1]['c'] + prefix
value[i] = None
else:
value[i] = Str(prefix)
# Put fixed reference in as a citation that can be processed
value[i+1] = Cite(
[{"citationId":ref[1:],
"citationPrefix":[],
"citationSuffix":[Str(suffix)],
"citationNoteNum":0,
"citationMode":{"t":"AuthorInText", "c":[]},
"citationHash":0}],
[Str(ref)])
if flag:
return [v for v in value if v is not None]
def is_braced_ref(i, value):
"""Returns true if a reference is braced; otherwise False."""
return is_ref(value[i]['t'], value[i]['c']) \
and value[i-1]['t'] == 'Str' and value[i+1]['t'] == 'Str' \
and value[i-1]['c'].endswith('{') and value[i+1]['c'].startswith('}')
def remove_braces(value):
"""Search for references and remove curly braces around them."""
flag = False
for i in range(len(value)-1)[1:]:
if is_braced_ref(i, value):
flag = True # Found reference
# Remove the braces
value[i-1]['c'] = value[i-1]['c'][:-1]
value[i+1]['c'] = value[i+1]['c'][1:]
return flag
def preprocess(key, value, fmt, meta):
"""Preprocesses to correct for problems."""
if key in ('Para', 'Plain'):
while True:
newvalue = repair_broken_refs(value)
if newvalue:
value = newvalue
else:
break
if key == 'Para':
return Para(value)
else:
return Plain(value)
def replace_attrimages(key, value, fmt, meta):
"""Replaces attributed images while storing reference labels."""
if is_attrimage(key, value):
# Parse the image
attrs, caption, target, label = parse_attrimage(value)
# Bail out if the label does not conform
if not label or not LABEL_PATTERN.match(label):
return None
# Save the reference
references[label] = len(references) + 1
# Adjust caption depending on the output format
if fmt == 'latex':
caption = list(caption) + [RawInline('tex', r'\label{%s}'%label)]
else:
caption = ast('Figure %d. '%references[label]) + list(caption)
# Required for pandoc to process the image
target[1] = "fig:"
# Return the replacement
if len(value[0]['c']) == 2: # Old pandoc < 1.16
img = Image(caption, target)
else: # New pandoc >= 1.16
assert len(value[0]['c']) == 3
img = AttrImage(attrs, caption, target)
if fmt in ('html', 'html5'):
anchor = RawInline('html', '<a name="%s"></a>'%label)
return [Plain([anchor]), Para([img])]
else:
return Para([img])
def replace_refs(key, value, fmt, meta):
"""Replaces references to labelled images."""
# Remove braces around references
if key in ('Para', 'Plain'):
if remove_braces(value):
if key == 'Para':
return Para(value)
else:
return Plain(value)
# Replace references
if is_ref(key, value):
prefix, label, suffix = parse_ref(value)
# The replacement depends on the output format
if fmt == 'latex':
return prefix + [RawInline('tex', r'\ref{%s}'%label)] + suffix
elif fmt in ('html', 'html5'):
link = '<a href="#%s">%s</a>' % (label, references[label])
return prefix + [RawInline('html', link)] + suffix
else:
return prefix + [Str('%d'%references[label])] + suffix
def main():
"""Filters the document AST."""
# Get the output format, document and metadata
fmt = sys.argv[1] if len(sys.argv) > 1 else ''
doc = pandocfilters.json.loads(STDIN.read())
meta = doc[0]['unMeta']
# Replace attributed images and references in the AST
altered = functools.reduce(lambda x, action: walk(x, action, fmt, meta),
[preprocess, replace_attrimages, replace_refs],
doc)
# Dump the results
pandocfilters.json.dump(altered, STDOUT)
# Flush stdout
STDOUT.flush()
if __name__ == '__main__':
main()
|
"""
Encapsulate the absence of an object by providing a substitutable
alternative that offers suitable default do nothing behavior.
"""
import abc
class AbstractObject(metaclass=abc.ABCMeta):
"""
Declare the interface for Client's collaborator.
Implement default behavior for the interface common to all classes,
as appropriate.
"""
@abc.abstractmethod
def request(self):
pass
class RealObject(AbstractObject):
"""
Define a concrete subclass of AbstractObject whose instances provide
useful behavior that Client expects.
"""
def request(self):
pass
class NullObject(AbstractObject):
"""
Provide an interface identical to AbstractObject's so that a null
object can be substituted for a real object.
Implement its interface to do nothing. What exactly it means to do
nothing depends on what sort of behavior Client is expecting.
"""
def request(self):
pass
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['blogs.ModelData']))
def backwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.ModelFieldData'], null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blogs.blog': {
'Meta': {'object_name': 'Blog'},
'analytics_account': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contributors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogcontributor'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'custom_domain': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'draft_notice': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'exclusion_end': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'exclusion_start': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'facebook_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'fb_page_access_token': ('django.db.models.fields.CharField', [], {'max_length': '260', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'has_artists': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'header_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bootblog': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_online': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}),
'main_color': ('django.db.models.fields.CharField', [], {'default': "'#C4BDB2'", 'max_length': '10', 'blank': 'True'}),
'moderator_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'pinterest_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Template']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True', 'blank': 'True'}),
'twitter_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token_secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'blogs.category': {
'Meta': {'object_name': 'Category'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_category'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'cat_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_caret': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_close': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_email': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_fb': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_left': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_pint': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_right': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_tw': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'default': "'#000000'", 'max_length': '10'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'parent_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_category'", 'null': 'True', 'to': "orm['blogs.Category']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'Comment_author'", 'null': 'True', 'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'comment_status': ('django.db.models.fields.CharField', [], {'default': "'pe'", 'max_length': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'notify_me': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Post']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'})
},
'blogs.info_email': {
'Meta': {'object_name': 'Info_email'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'We'", 'max_length': '2', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'subject': ('django.db.models.fields.TextField', [], {'max_length': '100', 'blank': 'True'}),
'subscribers': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '2', 'null': 'True'})
},
'blogs.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'language_name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menu': {
'Meta': {'object_name': 'Menu'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_menu'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_main': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menuitem': {
'Meta': {'object_name': 'MenuItem'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'external_link': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'menu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Menu']", 'null': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Page']", 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'})
},
'blogs.model': {
'Meta': {'object_name': 'Model'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Custom_post'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'blogs.modeldata': {
'Meta': {'object_name': 'ModelData'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '140'})
},
'blogs.modelfield': {
'Meta': {'object_name': 'ModelField'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'post_type': ('django.db.models.fields.CharField', [], {'default': "'Text'", 'max_length': '40'}),
'rank': ('django.db.models.fields.CharField', [], {'default': "'1'", 'max_length': '2'})
},
'blogs.modelfielddata': {
'Meta': {'object_name': 'ModelFieldData'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100', 'blank': 'True'}),
'foreign': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'foreign'", 'null': 'True', 'to': "orm['blogs.ModelData']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'longtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'model_data': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelData']", 'null': 'True'}),
'model_field': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelField']", 'null': 'True'}),
'nullboolean': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'onetofive': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'positiveinteger': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'relation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'relation'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['blogs.ModelData']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.page': {
'Meta': {'object_name': 'Page'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_page'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.post': {
'Meta': {'object_name': 'Post'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'base62id': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_0': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_01': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_1': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_2': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_3': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_4': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_5': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_6': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_video': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discarded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ready': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_top': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'karma': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'layout_type': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'pic': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_0': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_04': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_1': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_10': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_11': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_12': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_13': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_14': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_15': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_16': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_17': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_18': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_19': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_2': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_20': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_21': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_22': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_23': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_24': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_25': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_26': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_27': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_28': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_29': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_3': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_30': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_31': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_32': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_33': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_4': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_5': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_6': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_7': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_8': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_9': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'publish_on_facebook': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'soundcloud_id': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'soundcloud_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'source': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '2', 'null': 'True'}),
'tag': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Tag']", 'null': 'True', 'blank': 'True'}),
'temp_tag_field': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'translated_content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'translated_title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'video': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_ogg': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'vimeo_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'vimeo_thumb_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'youtube_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'blogs.rss': {
'Meta': {'object_name': 'Rss'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_rss'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'feed_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'blogs.subscription': {
'Meta': {'object_name': 'Subscription'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_new': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'blogs.subuser': {
'Meta': {'object_name': 'Subuser'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_user'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'blogs.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_tag'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.template': {
'Meta': {'object_name': 'Template'},
'archives': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'base': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'category': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'single': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'})
},
'blogs.translation': {
'Meta': {'object_name': 'Translation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'origin_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_origin_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'translated_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_translated_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blogs']
|
import filecmp
from transfert import Resource
from transfert.actions import copy
def estimate_nb_cycles(len_data, chunk_size):
return (len_data // chunk_size) + [0, 1][(len_data % chunk_size) > 0]
def test_simple_local_copy(tmpdir):
src = tmpdir.join('alpha')
dst = tmpdir.join('beta')
src.write('some data')
assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath))
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
def test_simple_local_copy_with_callback(tmpdir):
def wrapper(size):
nonlocal count
count += 1
count = 0
src = tmpdir.join('alpha')
dst = tmpdir.join('beta')
data = b'some data'
src.write(data)
chunk_size = 1
assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath,),
size=chunk_size,
callback_freq=1,
callback=wrapper)
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
assert count == estimate_nb_cycles(len(data), chunk_size)
dst.remove()
count = 0
chunk_size = 2
assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath,),
size=chunk_size,
callback_freq=1,
callback=wrapper)
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
assert count == estimate_nb_cycles(len(data), chunk_size)
|
import json
from django.db.models import Q, Subquery
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
from readthedocs.oauth.services import registry
from readthedocs.oauth.services.base import SyncServiceError
from readthedocs.projects.models import Project
from readthedocs.organizations.models import Organization
class Command(BaseCommand):
help = "Re-connect RemoteRepository to Project"
def add_arguments(self, parser):
parser.add_argument('organization', nargs='+', type=str)
parser.add_argument(
'--no-dry-run',
action='store_true',
default=False,
help='Update database with the changes proposed.',
)
# If owners does not have their RemoteRepository synced, it could
# happen we don't find a matching Project (see --force-owners-social-resync)
parser.add_argument(
'--only-owners',
action='store_true',
default=False,
help='Connect repositories only to organization owners.',
)
parser.add_argument(
'--force-owners-social-resync',
action='store_true',
default=False,
help='Force to re-sync RemoteRepository for organization owners.',
)
def _force_owners_social_resync(self, organization):
for owner in organization.owners.all():
for service_cls in registry:
for service in service_cls.for_user(owner):
try:
service.sync()
except SyncServiceError:
print(f'Service {service} failed while syncing. Skipping...')
def _connect_repositories(self, organization, no_dry_run, only_owners):
connected_projects = []
# TODO: consider using same login than RemoteRepository.matches method
# https://github.com/readthedocs/readthedocs.org/blob/49b03f298b6105d755554f7dc7e97a3398f7066f/readthedocs/oauth/models.py#L185-L194
remote_query = (
Q(ssh_url__in=Subquery(organization.projects.values('repo'))) |
Q(clone_url__in=Subquery(organization.projects.values('repo')))
)
for remote in RemoteRepository.objects.filter(remote_query).order_by('created'):
admin = json.loads(remote.json).get('permissions', {}).get('admin')
if only_owners and remote.users.first() not in organization.owners.all():
# Do not connect a RemoteRepository if the User is not owner of the organization
continue
if not admin:
# Do not connect a RemoteRepository where the User is not admin of the repository
continue
if not organization.users.filter(username=remote.users.first().username).exists():
# Do not connect a RemoteRepository if the use does not belong to the organization
continue
# Projects matching
# - RemoteRepository URL
# - are under the Organization
# - not connected to a RemoteRepository already
# - was not connected previously by this call to the script
projects = Project.objects.filter(
Q(repo=remote.ssh_url) | Q(repo=remote.clone_url),
organizations__in=[organization.pk],
remote_repository__isnull=True
).exclude(slug__in=connected_projects)
for project in projects:
connected_projects.append(project.slug)
if no_dry_run:
remote.project = project
remote.save()
print(f'{project.slug: <40} {remote.pk: <10} {remote.html_url: <60} {remote.users.first().username: <20} {admin: <5}') # noqa
print('Total:', len(connected_projects))
if not no_dry_run:
print(
'Changes WERE NOT applied to the database. '
'Run it with --no-dry-run to save the changes.'
)
def handle(self, *args, **options):
no_dry_run = options.get('no_dry_run')
only_owners = options.get('only_owners')
force_owners_social_resync = options.get('force_owners_social_resync')
for organization in options.get('organization'):
try:
organization = Organization.objects.get(slug=organization)
if force_owners_social_resync:
self._force_owners_social_resync(organization)
self._connect_repositories(organization, no_dry_run, only_owners)
except Organization.DoesNotExist:
print(f'Organization does not exist. organization={organization}')
|
'''
@author: lockrecv@gmail.com
A pure python ping implementation using raw socket.
Note that ICMP messages can only be sent from processes running as root.
Inspired by Matthew Dixon Cowles <http://www.visi.com/~mdc/>.
'''
import os
import select
import socket
import struct
import time
class Ping:
''' Power On State Pint Utility (3rdparty)'''
def __init__(self):
self.ICMP_ECHO_REQUEST = 8
def checksum(self, source_string):
summ = 0
count_to = (len(source_string)/2)*2
for count in xrange(0, count_to, 2):
this = ord(source_string[count+1]) * 256 + ord(source_string[count])
summ = summ + this
summ = summ & 0xffffffff
if count_to < len(source_string):
summ = summ + ord(source_string[len(source_string)-1])
summ = summ & 0xffffffff
summ = (summ >> 16) + (summ & 0xffff)
summ = summ + (summ >> 16)
answer = ~summ
answer = answer & 0xffff
# Swap bytes
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
def receive_one_ping(self, my_socket, idd, timeout):
'''Receive the ping from the socket'''
time_left = timeout
while True:
started_select = time.time()
what_ready = select.select([my_socket], [], [], time_left)
how_long_in_select = (time.time() - started_select)
if what_ready[0] == []: # Timeout
return
time_received = time.time()
received_packet, addr = my_socket.recvfrom(1024)
icmpHeader = received_packet[20:28]
type, code, checksum, packet_id, sequence = struct.unpack("bbHHh", icmpHeader)
if packet_id == idd:
bytess = struct.calcsize("d")
time_sent = struct.unpack("d", received_packet[28:28 + bytess])[0]
return time_received - time_sent
time_left = time_left - how_long_in_select
if time_left <= 0:
return
def send_one_ping(self, my_socket, dest_addr, idd, psize):
'''Send one ping to the given address'''
dest_addr = socket.gethostbyname(dest_addr)
# Remove header size from packet size
psize = psize - 8
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
my_checksum = 0
# Make a dummy header with a 0 checksum
header = struct.pack("bbHHh", self.ICMP_ECHO_REQUEST, 0, my_checksum, idd, 1)
bytess = struct.calcsize("d")
data = (psize - bytess) * "Q"
data = struct.pack("d", time.time()) + data
# Calculate the checksum on the data and the dummy header
my_checksum = self.checksum(header+data)
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy
header = struct.pack("bbHHh", self.ICMP_ECHO_REQUEST, 0, socket.htons(my_checksum), idd, 1)
packet = header + data
my_socket.sendto(packet, (dest_addr, 1)) # Don't know about the 1
def do_one(self, dest_addr, timeout, psize):
'''Returns either the delay (in seconds) or none on timeout'''
icmp = socket.getprotobyname("icmp")
try:
my_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp)
except socket.errno, (errno, msg):
if errno == 1:
# Operation not permitted
msg = msg + (
" - Note that ICMP messages can only be sent from processes"
" running as root."
)
raise socket.error(msg)
my_id = os.getpid() & 0xFFFF
self.send_one_ping(my_socket, dest_addr, my_id, psize)
delay = self.receive_one_ping(my_socket, my_id, timeout)
my_socket.close()
return delay
def verbose_ping(self, dest_addr, timeout = 2, count = 4, psize = 64):
'''
Send 'count' ping with 'psize' size to 'dest_addr' with
the given 'timeout' and display the result
'''
for i in xrange(count):
print 'ping %s with ...' % dest_addr
try:
delay = self.do_one(dest_addr, timeout, psize)
except socket.gaierror, e:
print 'FAILED. (socket error: "%s")' % e[1]
break
if delay == None:
print 'FAILED. (timeout within %ssec.)' % timeout
else:
delay = delay * 1000
print 'get ping in %0.4fms' % delay
print
def quiet_ping(self, dest_addr, timeout = 2, count = 4, psize = 64):
'''
Send 'count' pint with 'psize' size to 'dest_addr' with
the given 'timeout' and display the result.
Returns 'percent' lost packages, 'max' round trip time
and 'avg' round trip time.
'''
mrtt = None
artt = None
plist = []
for i in xrange(count):
try:
delay = self.do_one(dest_addr, timeout, psize)
except socket.gaierror, e:
print 'FAILED. (socket error: "%s")' % e[1]
break
if delay != None:
delay = delay * 1000
plist.append(delay)
# Find lost package percent
percent_lost = 100 - (len(plist)*100/count)
# Find max and avg round trip time
if plist:
mrtt = max(plist)
artt = sum(plist)/len(plist)
return percent_lost, mrtt, artt
|
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateLinkResourcesOperations:
"""PrivateLinkResourcesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~device_update.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_account(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PrivateLinkResourceListResult"]:
"""List all private link resources in a device update account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: Account name.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateLinkResourceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~device_update.models.PrivateLinkResourceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_account.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[A-Za-z0-9]+(-[A-Za-z0-9]+)*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DeviceUpdate/accounts/{accountName}/privateLinkResources'} # type: ignore
async def get(
self,
resource_group_name: str,
account_name: str,
group_id: str,
**kwargs: Any
) -> "_models.GroupInformation":
"""Get the specified private link resource associated with the device update account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: Account name.
:type account_name: str
:param group_id: The group ID of the private link resource.
:type group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GroupInformation, or the result of cls(response)
:rtype: ~device_update.models.GroupInformation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[A-Za-z0-9]+(-[A-Za-z0-9]+)*$'),
'groupId': self._serialize.url("group_id", group_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('GroupInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DeviceUpdate/accounts/{accountName}/privateLinkResources/{groupId}'} # type: ignore
|
import sys
from osgeo import gdal
from osgeo import ogr
bReadOnly = False
bVerbose = True
bSummaryOnly = False
nFetchFID = ogr.NullFID
papszOptions = None
def EQUAL(a, b):
return a.lower() == b.lower()
def main(argv = None):
global bReadOnly
global bVerbose
global bSummaryOnly
global nFetchFID
global papszOptions
layerlist=[]
pszWHERE = None
pszDataSource = None
papszLayers = None
poSpatialFilter = None
nRepeatCount = 1
bAllLayers = False
pszSQLStatement = None
pszDialect = None
options = {}
pszGeomField = None
if argv is None:
argv = sys.argv
argv = ogr.GeneralCmdLineProcessor( argv )
if argv is None:
return 1
nArgc = len(argv)
iArg = 1
while iArg < nArgc:
if EQUAL(argv[iArg],"--utility_version"):
print("%s is running against GDAL %s" %
(argv[0], gdal.VersionInfo("RELEASE_NAME")))
return 0
elif EQUAL(argv[iArg],"-ro"):
bReadOnly = True
elif EQUAL(argv[iArg],"-q") or EQUAL(argv[iArg],"-quiet"):
bVerbose = False
elif EQUAL(argv[iArg],"-fid") and iArg < nArgc-1:
iArg = iArg + 1
nFetchFID = int(argv[iArg])
elif EQUAL(argv[iArg],"-spat") and iArg + 4 < nArgc:
oRing = ogr.Geometry(ogr.wkbLinearRing)
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+2]) )
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+4]) )
oRing.AddPoint( float(argv[iArg+3]), float(argv[iArg+4]) )
oRing.AddPoint( float(argv[iArg+3]), float(argv[iArg+2]) )
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+2]) )
poSpatialFilter = ogr.Geometry(ogr.wkbPolygon)
poSpatialFilter.AddGeometry(oRing)
iArg = iArg + 4
elif EQUAL(argv[iArg],"-geomfield") and iArg < nArgc-1:
iArg = iArg + 1
pszGeomField = argv[iArg]
elif EQUAL(argv[iArg],"-where") and iArg < nArgc-1:
iArg = iArg + 1
pszWHERE = argv[iArg]
elif EQUAL(argv[iArg],"-sql") and iArg < nArgc-1:
iArg = iArg + 1
pszSQLStatement = argv[iArg]
elif EQUAL(argv[iArg],"-dialect") and iArg < nArgc-1:
iArg = iArg + 1
pszDialect = argv[iArg]
elif EQUAL(argv[iArg],"-rc") and iArg < nArgc-1:
iArg = iArg + 1
nRepeatCount = int(argv[iArg])
elif EQUAL(argv[iArg],"-al"):
bAllLayers = True
elif EQUAL(argv[iArg],"-so") or EQUAL(argv[iArg],"-summary"):
bSummaryOnly = True
elif len(argv[iArg]) > 8 and EQUAL(argv[iArg][0:8],"-fields="):
options['DISPLAY_FIELDS'] = argv[iArg][7:len(argv[iArg])]
elif len(argv[iArg]) > 6 and EQUAL(argv[iArg][0:6],"-geom="):
options['DISPLAY_GEOMETRY'] = argv[iArg][6:len(argv[iArg])]
elif argv[iArg][0] == '-':
return Usage()
elif pszDataSource is None:
pszDataSource = argv[iArg]
else:
if papszLayers is None:
papszLayers = []
papszLayers.append( argv[iArg] )
bAllLayers = False
iArg = iArg + 1
if pszDataSource is None:
return Usage()
poDS = None
poDriver = None
poDS = ogr.Open( pszDataSource, not bReadOnly )
if poDS is None and not bReadOnly:
poDS = ogr.Open( pszDataSource, False )
if poDS is not None and bVerbose:
#print( "Had to open data source read-only." )
bReadOnly = True
if poDS is None:
print( "FAILURE:\n"
"Unable to open datasource `%s' with the following drivers." % pszDataSource )
for iDriver in range(ogr.GetDriverCount()):
print( " -> %s" % ogr.GetDriver(iDriver).GetName() )
return 1
poDriver = poDS.GetDriver()
if bVerbose:
print( "INFO: Open of `%s'\n"
" using driver `%s' successful." % (pszDataSource, poDriver.GetName()) )
poDS_Name = poDS.GetName()
if str(type(pszDataSource)) == "<type 'unicode'>" and str(type(poDS_Name)) == "<type 'str'>":
poDS_Name = poDS_Name.decode("utf8")
if bVerbose and pszDataSource != poDS_Name:
print( "INFO: Internal data source name `%s'\n"
" different from user name `%s'." % (poDS_Name, pszDataSource ))
if pszSQLStatement is not None:
poResultSet = None
nRepeatCount = 0 #// skip layer reporting.
if papszLayers is not None:
print( "layer names ignored in combination with -sql." )
if pszGeomField is None:
poResultSet = poDS.ExecuteSQL( pszSQLStatement, poSpatialFilter,
pszDialect )
else:
poResultSet = poDS.ExecuteSQL( pszSQLStatement, None, pszDialect )
if poResultSet is not None:
if pszWHERE is not None:
if poResultSet.SetAttributeFilter( pszWHERE ) != 0:
print("FAILURE: SetAttributeFilter(%s) failed." % pszWHERE)
return 1
if pszGeomField is not None:
ReportOnLayer( poResultSet, None, pszGeomField, poSpatialFilter, options )
else:
ReportOnLayer( poResultSet, None, None, None, options )
poDS.ReleaseResultSet( poResultSet )
#gdal.Debug( "OGR", "GetLayerCount() = %d\n", poDS.GetLayerCount() )
for iRepeat in range(nRepeatCount):
if papszLayers is None:
for iLayer in range(poDS.GetLayerCount()):
poLayer = poDS.GetLayer(iLayer)
if poLayer is None:
print( "FAILURE: Couldn't fetch advertised layer %d!" % iLayer )
return 1
if not bAllLayers:
line = "%d: %s" % (iLayer+1, poLayer.GetLayerDefn().GetName())
# added by hep-ml
layerlist.append(poLayer.GetLayerDefn().GetName())
nGeomFieldCount = poLayer.GetLayerDefn().GetGeomFieldCount()
if nGeomFieldCount > 1:
line = line + " ("
for iGeom in range(nGeomFieldCount):
if iGeom > 0:
line = line + ", "
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
line = line + "%s" % ogr.GeometryTypeToName( poGFldDefn.GetType() )
line = line + ")"
if poLayer.GetLayerDefn().GetGeomType() != ogr.wkbUnknown:
line = line + " (%s)" % ogr.GeometryTypeToName( poLayer.GetLayerDefn().GetGeomType() )
print(line)
else:
if iRepeat != 0:
poLayer.ResetReading()
ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options )
else:
for papszIter in papszLayers:
poLayer = poDS.GetLayerByName(papszIter)
if poLayer is None:
print( "FAILURE: Couldn't fetch requested layer %s!" % papszIter )
return 1
if iRepeat != 0:
poLayer.ResetReading()
ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options )
poDS.Destroy()
return layerlist
def Usage():
print( "Usage: ogrinfo [--help-general] [-ro] [-q] [-where restricted_where]\n"
" [-spat xmin ymin xmax ymax] [-geomfield field] [-fid fid]\n"
" [-sql statement] [-al] [-so] [-fields={YES/NO}]\n"
" [-geom={YES/NO/SUMMARY}][--formats]\n"
" datasource_name [layer [layer ...]]")
return 1
def ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options ):
poDefn = poLayer.GetLayerDefn()
if pszWHERE is not None:
if poLayer.SetAttributeFilter( pszWHERE ) != 0:
print("FAILURE: SetAttributeFilter(%s) failed." % pszWHERE)
return
if poSpatialFilter is not None:
if pszGeomField is not None:
iGeomField = poLayer.GetLayerDefn().GetGeomFieldIndex(pszGeomField)
if iGeomField >= 0:
poLayer.SetSpatialFilter( iGeomField, poSpatialFilter )
else:
print("WARNING: Cannot find geometry field %s." % pszGeomField)
else:
poLayer.SetSpatialFilter( poSpatialFilter )
print( "" )
print( "Layer name: %s" % poDefn.GetName() )
if bVerbose:
nGeomFieldCount = poLayer.GetLayerDefn().GetGeomFieldCount()
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
print( "Geometry (%s): %s" % (poGFldDefn.GetNameRef(), ogr.GeometryTypeToName( poGFldDefn.GetType() ) ))
else:
print( "Geometry: %s" % ogr.GeometryTypeToName( poDefn.GetGeomType() ) )
print( "Feature Count: %d" % poLayer.GetFeatureCount() )
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
oExt = poLayer.GetExtent(True, geom_field = iGeom, can_return_null = True)
if oExt is not None:
print("Extent (%s): (%f, %f) - (%f, %f)" % (poGFldDefn.GetNameRef(), oExt[0], oExt[2], oExt[1], oExt[3]))
else:
oExt = poLayer.GetExtent(True, can_return_null = True)
if oExt is not None:
print("Extent: (%f, %f) - (%f, %f)" % (oExt[0], oExt[2], oExt[1], oExt[3]))
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
if poGFldDefn.GetSpatialRef() is None:
pszWKT = "(unknown)"
else:
pszWKT = poGFldDefn.GetSpatialRef().ExportToPrettyWkt()
print( "SRS WKT (%s):\n%s" % (poGFldDefn.GetNameRef(), pszWKT) )
else:
if poLayer.GetSpatialRef() is None:
pszWKT = "(unknown)"
else:
pszWKT = poLayer.GetSpatialRef().ExportToPrettyWkt()
print( "Layer SRS WKT:\n%s" % pszWKT )
if len(poLayer.GetFIDColumn()) > 0:
print( "FID Column = %s" % poLayer.GetFIDColumn() )
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
print( "Geometry Column %d = %s" % (iGeom + 1, poGFldDefn.GetNameRef() ))
else:
if len(poLayer.GetGeometryColumn()) > 0:
print( "Geometry Column = %s" % poLayer.GetGeometryColumn() )
for iAttr in range(poDefn.GetFieldCount()):
poField = poDefn.GetFieldDefn( iAttr )
print( "%s: %s (%d.%d)" % ( \
poField.GetNameRef(), \
poField.GetFieldTypeName( poField.GetType() ), \
poField.GetWidth(), \
poField.GetPrecision() ))
poFeature = None
if nFetchFID == ogr.NullFID and not bSummaryOnly:
poFeature = poLayer.GetNextFeature()
while poFeature is not None:
DumpReadableFeature(poFeature, options)
poFeature = poLayer.GetNextFeature()
elif nFetchFID != ogr.NullFID:
poFeature = poLayer.GetFeature( nFetchFID )
if poFeature is None:
print( "Unable to locate feature id %d on this layer." % nFetchFID )
else:
DumpReadableFeature(poFeature, options)
return
def DumpReadableFeature( poFeature, options = None ):
poDefn = poFeature.GetDefnRef()
print("OGRFeature(%s):%ld" % (poDefn.GetName(), poFeature.GetFID() ))
if 'DISPLAY_FIELDS' not in options or EQUAL(options['DISPLAY_FIELDS'], 'yes'):
for iField in range(poDefn.GetFieldCount()):
poFDefn = poDefn.GetFieldDefn(iField)
line = " %s (%s) = " % ( \
poFDefn.GetNameRef(), \
ogr.GetFieldTypeName(poFDefn.GetType()) )
if poFeature.IsFieldSet( iField ):
try:
line = line + "%s" % (poFeature.GetFieldAsString( iField ) )
except:
# For Python3 on non-UTF8 strings
line = line + "%s" % (poFeature.GetFieldAsBinary( iField ) )
else:
line = line + "(null)"
print(line)
if poFeature.GetStyleString() is not None:
if 'DISPLAY_STYLE' not in options or EQUAL(options['DISPLAY_STYLE'], 'yes'):
print(" Style = %s" % poFeature.GetStyleString() )
nGeomFieldCount = poFeature.GetGeomFieldCount()
if nGeomFieldCount > 0:
if 'DISPLAY_GEOMETRY' not in options or not EQUAL(options['DISPLAY_GEOMETRY'], 'no'):
for iField in range(nGeomFieldCount):
poGFldDefn = poFeature.GetDefnRef().GetGeomFieldDefn(iField)
poGeometry = poFeature.GetGeomFieldRef(iField)
if poGeometry is not None:
sys.stdout.write(" ")
if len(poGFldDefn.GetNameRef()) > 0 and nGeomFieldCount > 1:
sys.stdout.write("%s = " % poGFldDefn.GetNameRef() )
DumpReadableGeometry( poGeometry, "", options)
print('')
return
def DumpReadableGeometry( poGeometry, pszPrefix, options ):
if pszPrefix == None:
pszPrefix = ""
if 'DISPLAY_GEOMETRY' in options and EQUAL(options['DISPLAY_GEOMETRY'], 'SUMMARY'):
line = ("%s%s : " % (pszPrefix, poGeometry.GetGeometryName() ))
eType = poGeometry.GetGeometryType()
if eType == ogr.wkbLineString or eType == ogr.wkbLineString25D:
line = line + ("%d points" % poGeometry.GetPointCount())
print(line)
elif eType == ogr.wkbPolygon or eType == ogr.wkbPolygon25D:
nRings = poGeometry.GetGeometryCount()
if nRings == 0:
line = line + "empty"
else:
poRing = poGeometry.GetGeometryRef(0)
line = line + ("%d points" % poRing.GetPointCount())
if nRings > 1:
line = line + (", %d inner rings (" % (nRings - 1))
for ir in range(0,nRings-1):
if ir > 0:
line = line + ", "
poRing = poGeometry.GetGeometryRef(ir+1)
line = line + ("%d points" % poRing.GetPointCount())
line = line + ")"
print(line)
elif eType == ogr.wkbMultiPoint or \
eType == ogr.wkbMultiPoint25D or \
eType == ogr.wkbMultiLineString or \
eType == ogr.wkbMultiLineString25D or \
eType == ogr.wkbMultiPolygon or \
eType == ogr.wkbMultiPolygon25D or \
eType == ogr.wkbGeometryCollection or \
eType == ogr.wkbGeometryCollection25D:
line = line + "%d geometries:" % poGeometry.GetGeometryCount()
print(line)
for ig in range(poGeometry.GetGeometryCount()):
subgeom = poGeometry.GetGeometryRef(ig)
from sys import version_info
if version_info >= (3,0,0):
exec('print("", end=" ")')
else:
exec('print "", ')
DumpReadableGeometry( subgeom, pszPrefix, options)
else:
print(line)
elif 'DISPLAY_GEOMETRY' not in options or EQUAL(options['DISPLAY_GEOMETRY'], 'yes') \
or EQUAL(options['DISPLAY_GEOMETRY'], 'WKT'):
print("%s%s" % (pszPrefix, poGeometry.ExportToWkt() ))
return
if __name__ == '__main__':
version_num = int(gdal.VersionInfo('VERSION_NUM'))
if version_num < 1800: # because of ogr.GetFieldTypeName
print('ERROR: Python bindings of GDAL 1.8.0 or later required')
sys.exit(1)
sys.exit(main( sys.argv ))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.