text stringlengths 0 1.05M | meta dict |
|---|---|
"""AppAssure 5 REST API"""
from appassure.api import AppAssureAPI
class IReplicationCommunication(AppAssureAPI):
"""Full documentation online at
http://docs.appassure.com/display/AA50D/IReplicationCommunication
"""
def getConsumedSeedDrives(self, agentId):
"""Gets identifiers of seed drives consumed on the Core
for specified agent.
"""
return self.session.request('replicationcomms/consumedSeedDrives/%s'
% (agentId))
def getReplicatedAgents(self):
"""Gets the list of agents the caller is replicating to
this slave core. A pairing must be in place, and this request
must be authenticated by the master core's client certificate.
"""
return self.session.request('replicationcomms/slave/agents')
def getReplicatedAgentsStorageUsage(self):
"""Gets a summary of storage usage of the replicated
agents.
"""
return self.session.request('replicationcomms/slave/agents')
def getRepositoryFreeSpaceForAgent(self, agentId):
"""Get free space for agent's remote repository."""
return self.session.request('replicationcomms/slave/agents/%s'
% (agentId))
def deleteAgent(self, agentId):
"""Deletes a replicated agent from the slave core,
including all of its recovery points.
"""
return self.session.request('replicationcomms/slave/agents/%s'
% (agentId))
def startMetadataUpdate(self, data, agentId):
"""Starts metadata update for specified agent."""
return self.session.request('replicationcomms/slave/agents/%s/metadataUpdate'
% (agentId), 'POST',
self.getXML(data, 'startMetadataUpdateRequest'))
def cancelRemoteMetadataUpdate(self, agentId):
"""Cancels metadata update phase of replication job for
replicated agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/metadataUpdate'
% (agentId))
def getMetadataUpdateProgress(self, agentId):
"""Gets status of the metadata update job initiated from
master core.
"""
return self.session.request('replicationcomms/slave/agents/%s/metadataUpdate/status'
% (agentId))
def startMetadataUpdateJob(self, data, agentId):
"""Starts metadata update job for specified agent."""
return self.session.request('replicationcomms/slave/agents/%s/metadataUpdateJob'
% (agentId), 'POST',
self.getXML(data, 'startMetadataUpdateRequest'))
def getBasicReplicatedVolumeImagesInfo(self, agentId):
"""Gets the details for all recovery points replicated
for the given agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/replicatedVolumeImages'
% (agentId))
def verifyReplicationAbility(self, agentId):
"""Verifies replication ability."""
return self.session.request('replicationcomms/slave/agents/%s/replication/verifyStart'
% (agentId))
def startRollup(self, data, agentId):
"""Starts rollup for specified slave agent for specified
granularity cells (time intervals).
"""
return self.session.request('replicationcomms/slave/agents/%s/rollup'
% (agentId), 'POST',
self.getXML(data, 'startRemoteRollupRequest'))
def cancelRemoteRollup(self, agentId):
"""Cancels rollup phase of replication job for replicated
agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/rollup'
% (agentId))
def getRollupProgress(self, agentId):
"""Gets status of the rollup job initiated from master
core.
"""
return self.session.request('replicationcomms/slave/agents/%s/rollup/progress'
% (agentId))
def startRollupJob(self, data, agentId):
"""Starts rollup job for specified slave agent for
specified granularity cells (time intervals).
"""
return self.session.request('replicationcomms/slave/agents/%s/rollupJob'
% (agentId), 'POST',
self.getXML(data, 'startRemoteRollupRequest'))
def getAgentRecoveryPoints(self, agentId):
"""Gets the recovery points replicated for the given
agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/rps'
% (agentId))
def getAgentRecoveryPointDetails(self, agentId, recoveryPointId):
"""Gets the details for a single replicated recovery
point.
"""
return self.session.request('replicationcomms/slave/agents/%s/rps/%s'
% (agentId, recoveryPointId))
def getAgentRecoveryPointsCounts(self, agentId):
"""Gets count of the recovery points replicated for the
given agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/rpsCount'
% (agentId))
def selectRangeAgentRecoveryPoints(self, agentId, skipCount):
"""Select range of the recovery points replicated for the
given agent. {maxCount}/rps.
"""
return self.session.request('replicationcomms/slave/agents/%s/skipCount/%s/maxCount/'
% (agentId, skipCount))
def updateReplicationStatus(self, data, agentId):
"""Set replication status on the slave core."""
return self.session.request('replicationcomms/slave/agents/%s/status'
% (agentId), 'PUT',
self.getXML(data, 'job'))
def startTransferJob(self, agentId, jobId):
"""Starts remote mirrored transfer job on slave core."""
return self.session.request('replicationcomms/slave/agents/%s/transferJob/%s'
% (agentId, jobId), 'POST')
def startVolumeImagesDeletionOld(self, data, agentId):
"""Starts deletion of volume images with specified
identifiers for specified agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletion'
% (agentId), 'POST',
self.getXML(data, 'startRemoteVolumeImagesDeletionRequest'))
def cancelRemoteVolumeImagesDeletion(self, agentId):
"""Cancels volume images deletion phase of replication
job for replicated agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletion'
% (agentId))
def getVolumeImagesDeletionProgress(self, agentId):
"""Gets status of the deletion job initiated from master
core.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletion/progress'
% (agentId))
def startVolumeImagesDeletionJobOld(self, data, agentId):
"""Starts deletion of volume images job with specified
identifiers for specified agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletionJob'
% (agentId), 'POST',
self.getXML(data, 'startRemoteVolumeImagesDeletionRequest'))
def startVolumeImagesDeletionJob(self, data, agentId):
"""Starts deletion of volume images job with specified
identifiers for specified agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletionJobNew'
% (agentId), 'POST',
self.getXML(data, 'startRemoteVolumeImagesDeletionRequest'))
def startVolumeImagesDeletion(self, data, agentId):
"""Starts deletion of volume images with specified
identifiers for specified agent.
"""
return self.session.request('replicationcomms/slave/agents/%s/volumeImagesDeletionNew'
% (agentId), 'POST',
self.getXML(data, 'startRemoteVolumeImagesDeletionRequest'))
def addAgentsByDemand(self, data):
"""Add agents by demand to a remote slave core."""
return self.session.request('replicationcomms/slave/agents/demand', 'POST',
self.getXML(data, 'addAgentsDemand'))
def startRemoteReplicationJob(self, data):
"""Starts remote mirrored replication job on slave core."""
return self.session.request('replicationcomms/slave/agents/replicationJob/start', 'POST',
self.getXML(data, 'remoteReplicationJobRequest'))
def syncRemoteReplicationJob(self, data):
"""Sync with remote mirrored replication job on slave
core.
"""
return self.session.request('replicationcomms/slave/agents/replicationJob/sync', 'POST',
self.getXML(data, 'remoteSyncReplicationJobRequest'))
def addAgentsByRequest(self, data):
"""Add agents by request to a remote slave."""
return self.session.request('replicationcomms/slave/agents/request', 'POST',
self.getXML(data, 'addAgentsRequest'))
def getReplicatedAgentsRecoveryPointsInfo(self):
"""Gets the list of agents which have recovery points on
a remote slave core.
"""
return self.session.request('replicationcomms/slave/agents/rpsinfo')
def getAgentRepositoryRelationships(self):
"""Gets the repositories for replicated agents."""
return self.session.request('replicationcomms/slave/cores/agentRepositoryRelationships')
def getRemoteMasterCoresForDemand(self):
"""Getting remote masers cores info for current slave
core. Using NTLM authentication.
"""
return self.session.request('replicationcomms/slave/cores/masters')
def verifyAddAgentsByDemand(self, data):
"""Verifies whether agents can be safely replicated by
demand.
"""
return self.session.request('replicationcomms/slave/demand/agents/verify', 'POST',
self.getXML(data, 'addAgentsDemand'))
def getExchangeVersions(self):
"""Gets versions of Exchange dlls, with present on remote
slave core.
"""
return self.session.request('replicationcomms/slave/exchange')
def getFileInfoForExchangeDll(self, data, fileName):
"""Gets information for given Exchange DLL file."""
return self.session.request('replicationcomms/slave/exchange/dllinfo/%s'
% (fileName), 'POST',
self.getXML(data, 'ExchangeServerVersion xmlns="http://schemas.datacontract.org/2004/07/Replay.Common.Contracts.Metadata.Exchange"'))
def startNewUploadSession(self, data):
"""Starts new file upload session."""
return self.session.request('replicationcomms/slave/newsession/', 'POST',
self.getXML(data, 'fileReceiveRequest'))
def demandPairing(self, data):
"""Demands the establishment of a pairing relationship
with a remote core. Demands are only accepted if the caller
performs NTLM authentication as a member of the administrators
group. This method will reset connection for establish new
secured connection.
"""
return self.session.request('replicationcomms/slave/pairing/demand', 'POST',
self.getXML(data, 'replicationPairingDemand'))
def requestPairing(self, data):
"""Sends a request to a remote slave for authorization to
replicate one or more agents. The request is adjudicated by a
human operator and will be approved or denied at a later date.
This method will reset connection for establish new secured
connection.
"""
return self.session.request('replicationcomms/slave/pairing/request', 'POST',
self.getXML(data, 'replicationPairingRequest'))
def getPairingStatus(self):
"""Gets the status of the pairing between the calling
core and the remote slave core. The caller is identified by its
SSL client certificate. This method is available to a remote
core regardless of whether it was paired via a request or
initiated the pairing itself.
"""
return self.session.request('replicationcomms/slave/pairing/status')
def syncPairingStatus(self, data):
"""Gets the status of the pairing between the calling
core and the remote slave core. The caller is identified by its
SSL client certificate. This method is available to a remote
core regardless of whether it was paired via a request or
initiated the pairing itself.
"""
return self.session.request('replicationcomms/slave/pairing/sync', 'POST',
self.getXML(data, 'masterCorePairingStatus'))
def deletePairing(self, deleteRecoveryPoints):
"""Removes replication relationship with Master Core on
Slave's Core side. Actual replicated and protected agent on
Master and Slave Cores stay available.
"""
return self.session.request('replicationcomms/slave/pairing?deleteRecoveryPoints=%s'
% (deleteRecoveryPoints))
def verifyReplicationCorePairingAbility(self, data):
"""Verifies pairing ability. {useCredentials}."""
return self.session.request('replicationcomms/slave/replication/verifyStart/?useCredentials=', 'POST',
self.getXML(data, 'remoteMasterSummaryCoreInfo'))
def getRepositories(self):
"""Gets all repositories. With certificate authentication
for already paired cores.
"""
return self.session.request('replicationcomms/slave/repositories')
def verifyAddAgentsByRequest(self, data):
"""Verifies whether agents can be safely replicated by
request.
"""
return self.session.request('replicationcomms/slave/request/agents/verify', 'POST',
self.getXML(data, 'addAgentsRequest'))
def negotiateMissingRecordsOld(self, sessionId):
"""Sends a stream of record metadata for the image being
replicated, and receives back a stream of records which are
missing from the remote core.
"""
return self.session.request('replicationcomms/slave/sessions/%s/records/keys'
% (sessionId), 'POST')
def transferMissingRecordsOld(self, sessionId):
"""Sends a stream of raw records to the slave core, the
list of which is determined by NegotiateMissingRecords.
"""
return self.session.request('replicationcomms/slave/sessions/%s/records/rawdata'
% (sessionId), 'POST')
def endVolumeImageReplicationSessionOld(self, commit):
"""Ends the volume image replication session, optionally
committing the transferred volume image.
"""
return self.session.request('replicationcomms/slave/sessions/%s?commit=%s'
% (commit))
def endUploadFile(self, uploadSessionId):
"""Ends current upload session and cheks MD5 hash of
received file.
"""
return self.session.request('replicationcomms/slave/sessions/%s/'
% (uploadSessionId), 'POST')
def cancelUploadFile(self, uploadSessionId):
"""Cancels current upload session."""
return self.session.request('replicationcomms/slave/sessions/%s/'
% (uploadSessionId))
def continueUploadFile(self, uploadSessionId, dataSize):
"""Reads data from slave core in current upload session."""
return self.session.request('replicationcomms/slave/sessions/%s/data/%s'
% (uploadSessionId, dataSize), 'POST')
def startNewVolumeImageReplicationSession(self, data):
"""Starts a replication session with a slave core."""
return self.session.request('replicationcomms/slave/sessions/new', 'POST',
self.getXML(data, 'startVolumeImageReplicationSessionRequest'))
def updateMasterStorageUsage(self, data):
"""Reports a summary of storage usage on the master core
to the slave core. This primarily exists to support MSP billing
needs.
"""
return self.session.request('replicationcomms/slave/storage', 'PUT',
self.getXML(data, 'storageUsageSummary'))
def getCoreId(self, useCredentials):
"""Tests connection to remote core and returns core ID.
If useCredentials in true then NTLM authentication used,
otherwise Anonymous authentication.
"""
return self.session.request('replicationcomms/slave/validate/?useCredentials=%s'
% (useCredentials))
| {
"repo_name": "rshipp/python-appassure",
"path": "appassure/core/IReplicationCommunication.py",
"copies": "1",
"size": "16750",
"license": "bsd-3-clause",
"hash": 7876033462815340000,
"line_mean": 43.0789473684,
"line_max": 153,
"alpha_frac": 0.6557014925,
"autogenerated": false,
"ratio": 4.517259978425027,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0044338112031321215,
"num_lines": 380
} |
"""AppAssure 5 REST API"""
from appassure.api import AppAssureAPI
class IVirtualDiskManagement(AppAssureAPI):
"""Full documentation online at
http://docs.appassure.com/display/AA50D/IVirtualDiskManagement
"""
def baseFileName(self, id):
"""Gets VHD base file name."""
return self.session.request('vhd/%s/baseFileName'
% (id))
def beginBatch(self, id, target):
"""Begins batch."""
return self.session.request('vhd/%s/beginBatch/%s'
% (id, target), 'POST')
def close(self, id):
"""Closes VHD."""
return self.session.request('vhd/%s/close'
% (id))
def delete(self, id, target):
"""Deletes VHD snapshot or base file."""
return self.session.request('vhd/%s/delete/%s'
% (id, target), 'POST')
def endBatch(self, id):
"""Ends batch."""
return self.session.request('vhd/%s/endBatch'
% (id), 'POST')
def hasSnapshot(self, id):
"""Verifies VHD has snapshot."""
return self.session.request('vhd/%s/hasSnapshot'
% (id))
def read(self, id, target, sectorOffset, sectorLength):
"""Reads raw data from VHD."""
return self.session.request('vhd/%s/read/%s/%s/%s'
% (id, target, sectorOffset, sectorLength), 'POST')
def readCustomMetadata(self, id, target, key):
"""Reads a user-defined custom metadata string."""
return self.session.request('vhd/%s/readCustomMetadata/%s/%s'
% (id, target, key), 'POST')
def sectorSize(self, id):
"""Gets sector size of the VHD."""
return self.session.request('vhd/%s/sectorSize'
% (id))
def snapshotFileName(self, id):
"""Gets VHD snapshot file name."""
return self.session.request('vhd/%s/snapshotFileName'
% (id))
def takeSnapshot(self, id):
"""Takes VHD snapshot."""
return self.session.request('vhd/%s/takeSnapshot'
% (id))
def totalSectorCapacity(self, id):
"""Gets VHD capacity."""
return self.session.request('vhd/%s/totalSectorCapacity'
% (id))
def translateSectorOffsetToChsTuple(self, id, sectorOffset):
"""Translates sector offset to chs tuple."""
return self.session.request('vhd/%s/translateSectorOffsetToChsTuple?sectorOffset=%s'
% (id, sectorOffset))
def write(self, id, target, sectorOffset, sectorLength):
"""Writes raw data to VHD."""
return self.session.request('vhd/%s/write/%s/%s/%s'
% (id, target, sectorOffset, sectorLength), 'POST')
def writeCustomMetadata(self, id, target, value):
"""Reads a user-defined custom metadata string."""
return self.session.request('vhd/%s/writeCustomMetadata/%s/%s?value=%s'
% (id, target, value), 'POST')
def create(self, path, bytesCapacity, bytesPerSector,
containsBootSystemVolume, preallocate):
"""Creates VHD."""
return self.session.request('vhd/createVhd?path=%s&bytesCapacity=%s&bytesPerSector=%s&containsBootSystemVolume=%s&preallocate=%s'
% (path, bytesCapacity, bytesPerSector,
containsBootSystemVolume, preallocate), 'PUT')
def open(self, path):
"""No summary."""
return self.session.request('vhd/openVhd?path=%s'
% (path), 'PUT')
| {
"repo_name": "rshipp/python-appassure",
"path": "appassure/core/IVirtualDiskManagement.py",
"copies": "1",
"size": "3492",
"license": "bsd-3-clause",
"hash": -1445433631705768400,
"line_mean": 35.7578947368,
"line_max": 137,
"alpha_frac": 0.5856242841,
"autogenerated": false,
"ratio": 3.9060402684563758,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4991664552556375,
"avg_score": null,
"num_lines": null
} |
# app/auth/forms.py
# coding: utf-8
from flask_wtf import FlaskForm
from wtforms import PasswordField, StringField, SubmitField, ValidationError
from wtforms.validators import DataRequired, Email, EqualTo
from app.models import Usuario
class RegistrationForm(FlaskForm):
"""
Formulario para registrar usuarios
"""
email = StringField(u'Email', validators=[DataRequired(), Email()])
username = StringField(u'Nombre de usuario', validators=[DataRequired()])
first_name = StringField(u'Nombres', validators=[DataRequired()])
last_name = StringField(u'Apellidos', validators=[DataRequired()])
password = PasswordField(u'Contraseña', validators=[
DataRequired(),
EqualTo('confirm_password')
])
confirm_password = PasswordField(u'Confirma contraseña')
submit = SubmitField(u'Crear usuario')
def validate_email(self, field):
if Usuario.query.filter_by(email=field.data).first():
raise ValidationError(u'El email ya existe en la BD.')
def validate_username(self, field):
if Usuario.query.filter_by(username=field.data).first():
raise ValidationError(u'El usuario ya existe en la BD.')
class LoginForm(FlaskForm):
"""
Formulario para logarse en el sistema
"""
email = StringField(u'Email', validators=[DataRequired(), Email()])
password = PasswordField(u'Contraseña', validators=[DataRequired()])
submit = SubmitField(u'Aceptar')
| {
"repo_name": "originaltebas/chmembers",
"path": "app/auth/forms.py",
"copies": "1",
"size": "1605",
"license": "mit",
"hash": -7484420237145767000,
"line_mean": 37.0731707317,
"line_max": 77,
"alpha_frac": 0.638576779,
"autogenerated": false,
"ratio": 4.025125628140704,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5163702407140703,
"avg_score": null,
"num_lines": null
} |
# app/auth/views.py
# coding: utf-8
from flask import flash, redirect, render_template, url_for
from flask_login import login_required, login_user, logout_user
from app.auth import auth
from app.auth.forms import LoginForm, RegistrationForm
from app import db
from app.models import Usuario
@auth.route('/register', methods=['GET', 'POST'])
def register():
"""
Agrega el usuario a la base de datos
-- Actualmente todas las funciones están hechas para administradores
-- y solo se puede poner la marca de admin por base de datos
-- está creado en el modelo el flag de editor para dar permiso a
-- todo excepto a datos de seguimiento de gente por privacidad
"""
form = RegistrationForm()
if form.validate_on_submit():
usuario = Usuario(email=form.email.data,
username=form.username.data,
first_name=form.first_name.data,
last_name=form.last_name.data,
password=form.password.data)
try:
# agregar usuario a la BD
db.session.add(usuario)
db.session.commit()
flash(u'Se ha creado correctamente el usuario.', 'success')
except Exception as e:
# error
flash('Error:', e, 'danger')
# redirect to the login page
return redirect(url_for('auth.login'))
# load registration template
return render_template('auth/register.html', form=form)
@auth.route('/login', methods=['GET', 'POST'])
def login():
"""
Gestiona las solicitudes de login
Loguea un usuario
"""
form = LoginForm()
if form.validate_on_submit():
# check whether user exists in the database and whether
# the password entered matches the password in the database
usuario = Usuario.query.filter_by(email=form.email.data).first()
if usuario is not None and usuario.verify_password(form.password.data):
login_user(usuario)
# redirect to the appropriate dashboard page
return redirect(url_for('home.hub'))
# when login details are incorrect
else:
flash(u'Email o contraseña invalidos.', 'danger')
# load login template
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
"""
Gestiona los logouts
"""
logout_user()
flash(u'Has salido correctamente.', 'success')
# redirect to the login page
return redirect(url_for('auth.login'))
| {
"repo_name": "originaltebas/chmembers",
"path": "app/auth/views.py",
"copies": "1",
"size": "2643",
"license": "mit",
"hash": -8852431270395165000,
"line_mean": 30.5925925926,
"line_max": 79,
"alpha_frac": 0.6109848485,
"autogenerated": false,
"ratio": 3.8095238095238093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49205086580238094,
"avg_score": null,
"num_lines": null
} |
# app/blog/views.py
#################### Imports ####################
from flask import render_template, Blueprint, request, redirect, url_for, flash
from app import app
from app import db
from app.models import Blog
from .forms import AddArticleForm
#################### Config ####################
blog_blueprint = Blueprint('blog', __name__, template_folder='templates')
#################### Helper Functions ####################
def flash_errors(form):
for field, errors in form.errors.items():
for error in errors:
flash(u"Error in the %s field - %s" % (
getattr(form, field).label.text,
error
), 'info')
#################### Routes ####################
@blog_blueprint.route('/')
@blog_blueprint.route('/index')
@blog_blueprint.route('/blog')
def index():
all_articles = Blog.query.all()
return render_template('blog.html', articles=all_articles)
@blog_blueprint.route('/add_article', methods=['GET', 'POST'])
def add_article():
form = AddArticleForm(request.form)
if request.method == 'POST':
if form.validate_on_submit():
new_article = Blog(form.article_title.data, form.article_author.data, form.article_content.data)
db.session.add(new_article)
db.session.commit()
flash('New Article, {}, added!'.format(new_article.article_title), 'success')
return redirect(url_for('blog.index'))
else:
flash_errors(form)
flash('ERROR! Article was not added.', 'error')
return render_template('add_article.html', form=form) | {
"repo_name": "weldon0405/weldon-blog",
"path": "app/blog/views.py",
"copies": "1",
"size": "1622",
"license": "mit",
"hash": -5773044661729584000,
"line_mean": 29.0555555556,
"line_max": 108,
"alpha_frac": 0.5721331689,
"autogenerated": false,
"ratio": 4.004938271604939,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5077071440504939,
"avg_score": null,
"num_lines": null
} |
# appclient.py
# client code to hit the events app services.
# Ryan Halbrook, 3/16/16
import requests
class CommunityClient:
#
# host - protocol, host name, and port - i.e. http://localhost:8080
#
def __init__(self, host, userid):
self.host = host
self.userid = userid
def post_community(self, name, description):
data = {"name":name, "description":description}
headers = {'userid':self.userid}
response = requests.post(self.host + '/rest/communities', data = data, headers = headers)
return response
def get_community(self, community_id):
return requests.get(self.host + '/rest/communities/' + community_id)
def delete_community(self, community_id):
data = {"communityid": community_id}
headers = {'userid':self.userid}
response = requests.delete(self.host + '/rest/communities/' + community_id, data = data, headers = headers)
return response
def get_events(self, community_id):
return requests.get(self.host + '/rest/communities/' + community_id + '/events')
class EventClient:
#
# host - protocol, host name, and port - i.e. http://localhost:8080
#
def __init__(self, host, userid):
self.host = host
self.userid = userid
def get_event(self, community_id, event_id):
return requests.get(self.host + '/rest/communities/' + community_id + '/events/' + event_id);
def post_event(self, community_id, name, description):
data = {'name': name, 'description': description}
headers = {'userid':self.userid}
response = requests.post(self.host + '/rest/communities/events', data = data, headers = headers)
return response
def delete_event(self, community_id, event_id):
headers = {'userid':self.userid}
response = requests.delete(self.host + '/rest/communities/' + community_id + '/events/' + event_id, headers = headers)
return response
| {
"repo_name": "ryhal/cs263",
"path": "test-scripts/appclient.py",
"copies": "1",
"size": "1819",
"license": "apache-2.0",
"hash": 6106992769536030000,
"line_mean": 24.6197183099,
"line_max": 120,
"alpha_frac": 0.6871907642,
"autogenerated": false,
"ratio": 3.259856630824373,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4447047395024373,
"avg_score": null,
"num_lines": null
} |
"""Appcommands-compatible command class with extra fixins."""
from __future__ import print_function
import cmd
import inspect
import pdb
import shlex
import sys
import traceback
import types
import six
from google.apputils import app
from google.apputils import appcommands
import gflags as flags
__all__ = [
'NewCmd',
'Repl',
]
flags.DEFINE_boolean(
'debug_mode', False,
'Show tracebacks on Python exceptions.')
flags.DEFINE_boolean(
'headless', False,
'Assume no user is at the controlling console.')
FLAGS = flags.FLAGS
def _SafeMakeAscii(s):
if isinstance(s, six.text_type):
return s.encode('ascii')
elif isinstance(s, str):
return s.decode('ascii')
else:
return six.text_type(s).encode('ascii', 'backslashreplace')
class NewCmd(appcommands.Cmd):
"""Featureful extension of appcommands.Cmd."""
def __init__(self, name, flag_values):
super(NewCmd, self).__init__(name, flag_values)
run_with_args = getattr(self, 'RunWithArgs', None)
self._new_style = isinstance(run_with_args, types.MethodType)
if self._new_style:
func = run_with_args.__func__
argspec = inspect.getargspec(func)
if argspec.args and argspec.args[0] == 'self':
argspec = argspec._replace( # pylint: disable=protected-access
args=argspec.args[1:])
self._argspec = argspec
# TODO(craigcitro): Do we really want to support all this
# nonsense?
self._star_args = self._argspec.varargs is not None
self._star_kwds = self._argspec.keywords is not None
self._max_args = len(self._argspec.args or ())
self._min_args = self._max_args - len(self._argspec.defaults or ())
if self._star_args:
self._max_args = sys.maxint
self._debug_mode = FLAGS.debug_mode
self.surface_in_shell = True
self.__doc__ = self.RunWithArgs.__doc__
def __getattr__(self, name):
if name in self._command_flags:
return self._command_flags[name].value
return super(NewCmd, self).__getattribute__(name)
def _GetFlag(self, flagname):
if flagname in self._command_flags:
return self._command_flags[flagname]
else:
return None
def Run(self, argv):
"""Run this command.
If self is a new-style command, we set up arguments and call
self.RunWithArgs, gracefully handling exceptions. If not, we
simply call self.Run(argv).
Args:
argv: List of arguments as strings.
Returns:
0 on success, nonzero on failure.
"""
if not self._new_style:
return super(NewCmd, self).Run(argv)
# TODO(craigcitro): We need to save and restore flags each time so
# that we can per-command flags in the REPL.
args = argv[1:]
fail = None
if len(args) < self._min_args:
fail = 'Not enough positional args; found %d, expected at least %d' % (
len(args), self._min_args)
if len(args) > self._max_args:
fail = 'Too many positional args; found %d, expected at most %d' % (
len(args), self._max_args)
if fail:
print(fail)
if self.usage:
print('Usage: %s' % (self.usage,))
return 1
if self._debug_mode:
return self.RunDebug(args, {})
else:
return self.RunSafely(args, {})
def RunCmdLoop(self, argv):
"""Hook for use in cmd.Cmd-based command shells."""
try:
args = shlex.split(argv)
except ValueError as e:
raise SyntaxError(self.EncodeForPrinting(e))
return self.Run([self._command_name] + args)
@staticmethod
def EncodeForPrinting(s):
"""Safely encode a string as the encoding for sys.stdout."""
encoding = sys.stdout.encoding or 'ascii'
return six.text_type(s).encode(encoding, 'backslashreplace')
def _FormatError(self, e):
"""Hook for subclasses to modify how error messages are printed."""
return _SafeMakeAscii(e)
def _HandleError(self, e):
message = self._FormatError(e)
print('Exception raised in %s operation: %s' % (
self._command_name, message))
return 1
def _IsDebuggableException(self, e):
"""Hook for subclasses to skip debugging on certain exceptions."""
return not isinstance(e, app.UsageError)
def RunDebug(self, args, kwds):
"""Run this command in debug mode."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException as e:
# Don't break into the debugger for expected exceptions.
if not self._IsDebuggableException(e):
return self._HandleError(e)
print()
print('****************************************************')
print('** Unexpected Exception raised in execution! **')
if FLAGS.headless:
print('** --headless mode enabled, exiting. **')
print('** See STDERR for traceback. **')
else:
print('** --debug_mode enabled, starting pdb. **')
print('****************************************************')
print()
traceback.print_exc()
print()
if not FLAGS.headless:
pdb.post_mortem()
return 1
return return_value
def RunSafely(self, args, kwds):
"""Run this command, turning exceptions into print statements."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException as e:
return self._HandleError(e)
return return_value
# pylint: disable=g-bad-name
class CommandLoop(cmd.Cmd):
"""Instance of cmd.Cmd built to work with NewCmd."""
class TerminateSignal(Exception):
"""Exception type used for signaling loop completion."""
def __init__(self, commands, prompt):
cmd.Cmd.__init__(self)
self._commands = {'help': commands['help']}
self._special_command_names = ['help', 'repl', 'EOF']
for name, command in six.iteritems(commands):
if (name not in self._special_command_names and
isinstance(command, NewCmd) and
command.surface_in_shell):
self._commands[name] = command
setattr(self, 'do_%s' % (name,), command.RunCmdLoop)
self._default_prompt = prompt
self._set_prompt()
self._last_return_code = 0
@property
def last_return_code(self):
return self._last_return_code
def _set_prompt(self):
self.prompt = self._default_prompt
def do_EOF(self, *unused_args):
"""Terminate the running command loop.
This function raises an exception to avoid the need to do
potentially-error-prone string parsing inside onecmd.
Args:
*unused_args: unused.
Returns:
Never returns.
Raises:
CommandLoop.TerminateSignal: always.
"""
raise CommandLoop.TerminateSignal()
def postloop(self):
print('Goodbye.')
def completedefault(self, unused_text, line, unused_begidx, unused_endidx):
if not line:
return []
else:
command_name = line.partition(' ')[0].lower()
usage = ''
if command_name in self._commands:
usage = self._commands[command_name].usage
if usage:
print()
print(usage)
print('%s%s' % (self.prompt, line), end=' ')
return []
def emptyline(self):
print('Available commands:', end=' ')
print(' '.join(list(self._commands)))
def precmd(self, line):
"""Preprocess the shell input."""
if line == 'EOF':
return line
if line.startswith('exit') or line.startswith('quit'):
return 'EOF'
words = line.strip().split()
if len(words) == 1 and words[0] not in ['help', 'ls', 'version']:
return 'help %s' % (line.strip(),)
return line
def onecmd(self, line):
"""Process a single command.
Runs a single command, and stores the return code in
self._last_return_code. Always returns False unless the command
was EOF.
Args:
line: (str) Command line to process.
Returns:
A bool signaling whether or not the command loop should terminate.
"""
try:
self._last_return_code = cmd.Cmd.onecmd(self, line)
except CommandLoop.TerminateSignal:
return True
except BaseException as e:
name = line.split(' ')[0]
print('Error running %s:' % name)
print(e)
self._last_return_code = 1
return False
def get_names(self):
names = dir(self)
commands = (name for name in self._commands
if name not in self._special_command_names)
names.extend('do_%s' % (name,) for name in commands)
names.remove('do_EOF')
return names
def do_help(self, command_name):
"""Print the help for command_name (if present) or general help."""
# TODO(craigcitro): Add command-specific flags.
def FormatOneCmd(name, command, command_names):
indent_size = appcommands.GetMaxCommandLength() + 3
if len(command_names) > 1:
indent = ' ' * indent_size
command_help = flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=indent,
firstline_indent='')
first_help_line, _, rest = command_help.partition('\n')
first_line = '%-*s%s' % (indent_size, name + ':', first_help_line)
return '\n'.join((first_line, rest))
else:
default_indent = ' '
return '\n' + flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=default_indent,
firstline_indent=default_indent) + '\n'
if not command_name:
print('\nHelp for commands:\n')
command_names = list(self._commands)
print('\n\n'.join(
FormatOneCmd(name, command, command_names)
for name, command in six.iteritems(self._commands)
if name not in self._special_command_names))
print()
elif command_name in self._commands:
print(FormatOneCmd(command_name, self._commands[command_name],
command_names=[command_name]))
return 0
def postcmd(self, stop, line):
return bool(stop) or line == 'EOF'
# pylint: enable=g-bad-name
class Repl(NewCmd):
"""Start an interactive session."""
PROMPT = '> '
def __init__(self, name, fv):
super(Repl, self).__init__(name, fv)
self.surface_in_shell = False
flags.DEFINE_string(
'prompt', '',
'Prompt to use for interactive shell.',
flag_values=fv)
def RunWithArgs(self):
"""Start an interactive session."""
prompt = FLAGS.prompt or self.PROMPT
repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt)
print('Welcome! (Type help for more information.)')
while True:
try:
repl.cmdloop()
break
except KeyboardInterrupt:
print()
return repl.last_return_code
| {
"repo_name": "wemanuel/smry",
"path": "server-auth/ls/google-cloud-sdk/.install/.backup/lib/googlecloudapis/apitools/base/py/app2.py",
"copies": "4",
"size": "10638",
"license": "apache-2.0",
"hash": -3409668770886742000,
"line_mean": 29.3076923077,
"line_max": 77,
"alpha_frac": 0.6186313217,
"autogenerated": false,
"ratio": 3.803360743653915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01097241389257385,
"num_lines": 351
} |
"""Appcommands-compatible command class with extra fixins."""
import cmd
import inspect
import pdb
import shlex
import sys
import traceback
import types
from google.apputils import app
from google.apputils import appcommands
import gflags as flags
__all__ = [
'NewCmd',
'Repl',
]
flags.DEFINE_boolean(
'debug_mode', False,
'Show tracebacks on Python exceptions.')
flags.DEFINE_boolean(
'headless', False,
'Assume no user is at the controlling console.')
FLAGS = flags.FLAGS
def _SafeMakeAscii(s):
if isinstance(s, unicode):
return s.encode('ascii')
elif isinstance(s, str):
return s.decode('ascii')
else:
return unicode(s).encode('ascii', 'backslashreplace')
class NewCmd(appcommands.Cmd):
"""Featureful extension of appcommands.Cmd."""
def __init__(self, name, flag_values):
super(NewCmd, self).__init__(name, flag_values)
run_with_args = getattr(self, 'RunWithArgs', None)
self._new_style = isinstance(run_with_args, types.MethodType)
if self._new_style:
func = run_with_args.im_func
argspec = inspect.getargspec(func)
if argspec.args and argspec.args[0] == 'self':
argspec = argspec._replace( # pylint: disable=protected-access
args=argspec.args[1:])
self._argspec = argspec
# TODO(user): Do we really want to support all this
# nonsense?
self._star_args = self._argspec.varargs is not None
self._star_kwds = self._argspec.keywords is not None
self._max_args = len(self._argspec.args or ())
self._min_args = self._max_args - len(self._argspec.defaults or ())
if self._star_args:
self._max_args = sys.maxint
self._debug_mode = FLAGS.debug_mode
self.surface_in_shell = True
self.__doc__ = self.RunWithArgs.__doc__
def __getattr__(self, name):
if name in self._command_flags:
return self._command_flags[name].value
return super(NewCmd, self).__getattribute__(name)
def _GetFlag(self, flagname):
if flagname in self._command_flags:
return self._command_flags[flagname]
else:
return None
def Run(self, argv):
"""Run this command.
If self is a new-style command, we set up arguments and call
self.RunWithArgs, gracefully handling exceptions. If not, we
simply call self.Run(argv).
Args:
argv: List of arguments as strings.
Returns:
0 on success, nonzero on failure.
"""
if not self._new_style:
return super(NewCmd, self).Run(argv)
# TODO(user): We need to save and restore flags each time so
# that we can per-command flags in the REPL.
args = argv[1:]
fail = None
if len(args) < self._min_args:
fail = 'Not enough positional args; found %d, expected at least %d' % (
len(args), self._min_args)
if len(args) > self._max_args:
fail = 'Too many positional args; found %d, expected at most %d' % (
len(args), self._max_args)
if fail:
print fail
if self.usage:
print 'Usage: %s' % (self.usage,)
return 1
if self._debug_mode:
return self.RunDebug(args, {})
else:
return self.RunSafely(args, {})
def RunCmdLoop(self, argv):
"""Hook for use in cmd.Cmd-based command shells."""
try:
args = shlex.split(argv)
except ValueError as e:
raise SyntaxError(self.EncodeForPrinting(e))
return self.Run([self._command_name] + args)
@staticmethod
def EncodeForPrinting(s):
"""Safely encode a string as the encoding for sys.stdout."""
encoding = sys.stdout.encoding or 'ascii'
return unicode(s).encode(encoding, 'backslashreplace')
def _FormatError(self, e):
"""Hook for subclasses to modify how error messages are printed."""
return _SafeMakeAscii(e)
def _HandleError(self, e):
message = self._FormatError(e)
print 'Exception raised in %s operation: %s' % (self._command_name, message)
return 1
def _IsDebuggableException(self, e):
"""Hook for subclasses to skip debugging on certain exceptions."""
return not isinstance(e, app.UsageError)
def RunDebug(self, args, kwds):
"""Run this command in debug mode."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException, e:
# Don't break into the debugger for expected exceptions.
if not self._IsDebuggableException(e):
return self._HandleError(e)
print
print '****************************************************'
print '** Unexpected Exception raised in execution! **'
if FLAGS.headless:
print '** --headless mode enabled, exiting. **'
print '** See STDERR for traceback. **'
else:
print '** --debug_mode enabled, starting pdb. **'
print '****************************************************'
print
traceback.print_exc()
print
if not FLAGS.headless:
pdb.post_mortem()
return 1
return return_value
def RunSafely(self, args, kwds):
"""Run this command, turning exceptions into print statements."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException, e:
return self._HandleError(e)
return return_value
# pylint: disable=g-bad-name
class CommandLoop(cmd.Cmd):
"""Instance of cmd.Cmd built to work with NewCmd."""
class TerminateSignal(Exception):
"""Exception type used for signaling loop completion."""
def __init__(self, commands, prompt):
cmd.Cmd.__init__(self)
self._commands = {'help': commands['help']}
self._special_command_names = ['help', 'repl', 'EOF']
for name, command in commands.iteritems():
if (name not in self._special_command_names and
isinstance(command, NewCmd) and
command.surface_in_shell):
self._commands[name] = command
setattr(self, 'do_%s' % (name,), command.RunCmdLoop)
self._default_prompt = prompt
self._set_prompt()
self._last_return_code = 0
@property
def last_return_code(self):
return self._last_return_code
def _set_prompt(self):
self.prompt = self._default_prompt
def do_EOF(self, *unused_args):
"""Terminate the running command loop.
This function raises an exception to avoid the need to do
potentially-error-prone string parsing inside onecmd.
Args:
*unused_args: unused.
Returns:
Never returns.
Raises:
CommandLoop.TerminateSignal: always.
"""
raise CommandLoop.TerminateSignal()
def postloop(self):
print 'Goodbye.'
def completedefault(self, unused_text, line, unused_begidx, unused_endidx):
if not line:
return []
else:
command_name = line.partition(' ')[0].lower()
usage = ''
if command_name in self._commands:
usage = self._commands[command_name].usage
if usage:
print
print usage
print '%s%s' % (self.prompt, line),
return []
def emptyline(self):
print 'Available commands:',
print ' '.join(list(self._commands))
def precmd(self, line):
"""Preprocess the shell input."""
if line == 'EOF':
return line
if line.startswith('exit') or line.startswith('quit'):
return 'EOF'
words = line.strip().split()
if len(words) == 1 and words[0] not in ['help', 'ls', 'version']:
return 'help %s' % (line.strip(),)
return line
def onecmd(self, line):
"""Process a single command.
Runs a single command, and stores the return code in
self._last_return_code. Always returns False unless the command
was EOF.
Args:
line: (str) Command line to process.
Returns:
A bool signaling whether or not the command loop should terminate.
"""
try:
self._last_return_code = cmd.Cmd.onecmd(self, line)
except CommandLoop.TerminateSignal:
return True
except BaseException as e:
name = line.split(' ')[0]
print 'Error running %s:' % name
print e
self._last_return_code = 1
return False
def get_names(self):
names = dir(self)
commands = (name for name in self._commands
if name not in self._special_command_names)
names.extend('do_%s' % (name,) for name in commands)
names.remove('do_EOF')
return names
def do_help(self, command_name):
"""Print the help for command_name (if present) or general help."""
# TODO(user): Add command-specific flags.
def FormatOneCmd(name, command, command_names):
indent_size = appcommands.GetMaxCommandLength() + 3
if len(command_names) > 1:
indent = ' ' * indent_size
command_help = flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=indent,
firstline_indent='')
first_help_line, _, rest = command_help.partition('\n')
first_line = '%-*s%s' % (indent_size, name + ':', first_help_line)
return '\n'.join((first_line, rest))
else:
default_indent = ' '
return '\n' + flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=default_indent,
firstline_indent=default_indent) + '\n'
if not command_name:
print '\nHelp for commands:\n'
command_names = list(self._commands)
print '\n\n'.join(
FormatOneCmd(name, command, command_names)
for name, command in self._commands.iteritems()
if name not in self._special_command_names)
print
elif command_name in self._commands:
print FormatOneCmd(command_name, self._commands[command_name],
command_names=[command_name])
return 0
def postcmd(self, stop, line):
return bool(stop) or line == 'EOF'
# pylint: enable=g-bad-name
class Repl(NewCmd):
"""Start an interactive session."""
PROMPT = '> '
def __init__(self, name, fv):
super(Repl, self).__init__(name, fv)
self.surface_in_shell = False
flags.DEFINE_string(
'prompt', '',
'Prompt to use for interactive shell.',
flag_values=fv)
def RunWithArgs(self):
"""Start an interactive session."""
prompt = FLAGS.prompt or self.PROMPT
repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt)
print 'Welcome! (Type help for more information.)'
while True:
try:
repl.cmdloop()
break
except KeyboardInterrupt:
print
return repl.last_return_code
| {
"repo_name": "Plantain/sms-mailinglist",
"path": "lib/googlecloudapis/apitools/base/py/app2.py",
"copies": "5",
"size": "10484",
"license": "apache-2.0",
"hash": -5698443210297118000,
"line_mean": 29.2132564841,
"line_max": 80,
"alpha_frac": 0.6198969859,
"autogenerated": false,
"ratio": 3.83187134502924,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.695176833092924,
"avg_score": null,
"num_lines": null
} |
"""app/components/base
Abstract base class for components
#! candidate for moving into utils/components
"""
from typing import List, Dict, Union
from abc import ABC
from dash.development.base_component import ComponentMeta
from dash_html_components import Div
from penn_chime.model.parameters import Parameters
from chime_dash.app.utils.templates import read_localization_yml, read_localization_markdown
class Component(ABC):
"""Base component for rendering dash html objects and registering callbacks
Attributes:
localization_file: File name for rendering localized strings
external_stylesheets: External stylesheets. Just a storage container.
external_scripts: External scripts. Just a storage container.
"""
localization_file: str = None
external_stylesheets: List[str] = []
external_scripts: List[str] = []
def __init__(self, language: str = "en", defaults: Parameters = None):
"""Initializes the component
"""
self.language = language
self.defaults = defaults
self._content = None
self._html = None
def get_html(self) -> List[ComponentMeta]: # pylint: disable=R0201
"""Function which is called to render html elements.
Should return a list of Dash components. Must be overwritten.
"""
return Div("")
@property
def html(self) -> List[ComponentMeta]:
"""Accessor for `get_html` wrapped with Exception handling:
Raises:
HTMLComponentError: if any error occurs.
"""
if self._html is None:
try:
self._html = self.get_html()
except Exception as error:
raise HTMLComponentError(self, error)
return self._html
@property
def content(self) -> Union[str, Dict[str, str], None]:
"""Reads localization files and returns text (for md) or dict (for yml) files.
Infers template location from `localization_file` and `language` attributes.
Raises:
Key error if unknown
"""
if self._content is None:
if self.localization_file is None:
self._content = {}
else:
if self.localization_file.endswith(".yml"):
self._content = read_localization_yml(
self.localization_file, self.language
)
elif self.localization_file.endswith(".md"):
self._content = read_localization_markdown(
self.localization_file, self.language
)
else:
raise KeyError(
"Unknown content file extension 'file'".format(
file=self.localization_file
)
)
return self._content
class Page(Component):
callbacks_cls = None
def __init__(self, language: str = "en", defaults: Parameters = None):
super().__init__(language, defaults)
self.callbacks_cls(self)
class HTMLComponentError(Exception):
"""Custom exception for errors when rendering component html.
Original error is stored in `error` attribute.
"""
def __init__(self, component: Component, error: Exception):
"""Initializes the error message
"""
message = "{etype}->{error} while rendering HTML component {component}".format(
etype=error.__class__.__name__, error=error, component=component
)
message += (
"\n\nData:\n"
+ "\n -".join(
[
"{key}: {value}".format(key=key, value=value)
for key, value in {
"language": component.language,
"localization_file": component.localization_file,
"content": component.content,
"defaults": component.defaults,
}.items()
]
)
+ "\n"
)
super().__init__(message)
self.component = component
self.error = error
| {
"repo_name": "CodeForPhilly/chime",
"path": "src/chime_dash/app/components/base.py",
"copies": "1",
"size": "4180",
"license": "mit",
"hash": -1858815484225374700,
"line_mean": 31.65625,
"line_max": 92,
"alpha_frac": 0.5655502392,
"autogenerated": false,
"ratio": 4.7990815154994255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0003544944007155901,
"num_lines": 128
} |
"""App config for questions app.
Primarily used to register signal handlers pertaining to habits and questions with the appropriate signals.
See: https://docs.djangoproject.com/en/1.7/ref/applications/
"""
from django.apps import AppConfig
from django.db.models.signals import pre_save, post_save
from habits.models import Habit, Schedule
from questions.models import (
Question,
UserHabitQuestion,
TimeFiringRule,
UserHabitQuestionFiringRule
)
from profiles.models import BasicProfile
from questions.signals.handlers import (
mark_habit_or_question_instance_association_with_all_users,
add_or_remove_habit_or_question_instance_association_with_all_users,
mark_questions_for_reschedule,
reschedule_questions)
class QuestionsConfig(AppConfig):
"""Overrides the default app config for the questions app."""
name = 'questions'
def ready(self):
"""Primarily used to register handlers with signals."""
# pre_save handlers
pre_save.connect(mark_habit_or_question_instance_association_with_all_users,
sender=Habit,
dispatch_uid="habit_associate_users_pre_save"
)
pre_save.connect(mark_habit_or_question_instance_association_with_all_users,
sender=Question,
dispatch_uid="question_associate_users_pre_save"
)
pre_save.connect(mark_habit_or_question_instance_association_with_all_users,
sender=TimeFiringRule,
dispatch_uid="time_firing_rule_associate_users_pre_save"
)
pre_save.connect(
mark_questions_for_reschedule,
sender=Schedule,
dispatch_uid="schedule_reschedule_questions_pre_save"
)
pre_save.connect(
mark_questions_for_reschedule,
sender=UserHabitQuestion,
dispatch_uid="user_habit_question_reschedule_questions_pre_save"
)
pre_save.connect(
mark_questions_for_reschedule,
sender=TimeFiringRule,
dispatch_uid="time_firing_rule_reschedule_questions_pre_save"
)
pre_save.connect(
mark_questions_for_reschedule,
sender=BasicProfile,
dispatch_uid="basic_profile_reschedule_questions_pre_save"
)
pre_save.connect(
mark_questions_for_reschedule,
sender=UserHabitQuestionFiringRule,
dispatch_uid="user_habit_question_firing_rule_reschedule_questions_pre_save"
)
# post_save handlers
post_save.connect(add_or_remove_habit_or_question_instance_association_with_all_users,
sender=Habit,
dispatch_uid="habit_associate_users_post_save"
)
post_save.connect(add_or_remove_habit_or_question_instance_association_with_all_users,
sender=Question,
dispatch_uid="question_associate_users_post_save"
)
post_save.connect(add_or_remove_habit_or_question_instance_association_with_all_users,
sender=TimeFiringRule,
dispatch_uid="time_firing_rule_associate_users_post_save"
)
post_save.connect(
reschedule_questions,
sender=Schedule,
dispatch_uid="schedule_reschedule_questions_post_save"
)
post_save.connect(
reschedule_questions,
sender=UserHabitQuestion,
dispatch_uid="user_habit_question_reschedule_questions_post_save"
)
post_save.connect(
reschedule_questions,
sender=TimeFiringRule,
dispatch_uid="time_firing_rule_reschedule_questions_post_save"
)
post_save.connect(
reschedule_questions,
sender=BasicProfile,
dispatch_uid="basic_profile_reschedule_questions_post_save"
)
post_save.connect(
reschedule_questions,
sender=UserHabitQuestionFiringRule,
dispatch_uid="user_habit_question_firing_rule_reschedule_questions_post_save"
)
| {
"repo_name": "prathapsridharan/health_project",
"path": "health_project/questions/apps.py",
"copies": "1",
"size": "4054",
"license": "bsd-3-clause",
"hash": 3896629979883255300,
"line_mean": 35.8545454545,
"line_max": 107,
"alpha_frac": 0.6470152935,
"autogenerated": false,
"ratio": 3.9397473275024297,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.508676262100243,
"avg_score": null,
"num_lines": null
} |
"""App constants"""
import os
STUDENT_ROLE = 'student'
GRADER_ROLE = 'grader'
STAFF_ROLE = 'staff'
INSTRUCTOR_ROLE = 'instructor'
LAB_ASSISTANT_ROLE = 'lab assistant'
ROLE_DISPLAY_NAMES = {
STUDENT_ROLE: 'Student',
GRADER_ROLE: 'Reader',
STAFF_ROLE: 'Teaching Assistant',
INSTRUCTOR_ROLE: 'Instructor',
LAB_ASSISTANT_ROLE: 'Lab Assistant',
}
VALID_ROLES = [STUDENT_ROLE, LAB_ASSISTANT_ROLE, GRADER_ROLE, STAFF_ROLE,
INSTRUCTOR_ROLE]
STAFF_ROLES = [GRADER_ROLE, STAFF_ROLE, INSTRUCTOR_ROLE]
SCORE_KINDS = ['composition', 'correctness', 'effort', 'total', 'partner a', 'partner b',
'regrade', 'revision', 'checkpoint 1', 'checkpoint 2',
'private', 'autograder', 'error']
API_PREFIX = '/api'
OAUTH_SCOPES = ['all', 'email']
OAUTH_OUT_OF_BAND_URI = 'urn:ietf:wg:oauth:2.0:oob'
COMMON_LANGUAGES = ['python', 'java', 'c', 'scheme', 'lisp', 'javascript']
COURSE_ENDPOINT_FORMAT = '^[\w\-]+/[\w\-]+/(fa|sp|su|wi|au|yr)\d\d$'
ASSIGNMENT_ENDPOINT_FORMAT = COURSE_ENDPOINT_FORMAT[:-1] + '/\w+$'
GRADES_BUCKET = 'ok_grades_bucket'
TIMEZONE = 'America/Los_Angeles'
ISO_DATETIME_FMT = '%Y-%m-%d %H:%M:%S'
APPLICATION_ROOT = os.getenv('APPLICATION_ROOT', '/')
# The default autograder url
# Each course can configure their own autograder url in course.edit view
AUTOGRADER_URL = os.getenv('AUTOGRADER_URL', 'https://autograder.cs61a.org')
SENDGRID_KEY = os.getenv("SENDGRID_KEY")
FORBIDDEN_ROUTE_NAMES = [
'about',
'admin',
'api',
'comments',
'login',
'logout',
'oauth',
'rq',
'testing-login',
]
FORBIDDEN_ASSIGNMENT_NAMES = []
# Service Providers
GOOGLE = "GOOGLE"
MICROSOFT = "MICROSOFT"
# Maximum file size to show in browser, in characters
DIFF_SIZE_LIMIT = 64 * 1024 # 64KB
SOURCE_SIZE_LIMIT = 10 * 1024 * 1024 # 10MB
MAX_UPLOAD_FILE_SIZE = 25 * 1024 * 1024 # 25MB
# Email client format for to field
EMAIL_FORMAT = "{name} <{email}>"
| {
"repo_name": "Cal-CS-61A-Staff/ok",
"path": "server/constants.py",
"copies": "1",
"size": "1939",
"license": "apache-2.0",
"hash": -4779185798135428000,
"line_mean": 27.5147058824,
"line_max": 89,
"alpha_frac": 0.6518824136,
"autogenerated": false,
"ratio": 2.876854599406528,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9017193125178842,
"avg_score": 0.00230877756553711,
"num_lines": 68
} |
#app = contacts_and_people
from django.db import models
from django.db.utils import DatabaseError
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
from django.utils.functional import cached_property
from django.conf import settings
from django.core.urlresolvers import reverse
from cms.models import Page, CMSPlugin
from cms.models.fields import PlaceholderField
from mptt.models import MPTTModel, TreeForeignKey
from mptt.managers import TreeManager
from filer.fields.image import FilerImageField
from arkestra_utilities.mixins import URLModelMixin
from arkestra_utilities.text import concatenate
from arkestra_utilities.settings import (
MULTIPLE_ENTITY_MODE, ARKESTRA_BASE_ENTITY, DEFAULT_NEWS_PAGE_TITLE,
DEFAULT_CONTACTS_PAGE_TITLE, DEFAULT_VACANCIES_PAGE_TITLE,
DEFAULT_PUBLICATIONS_PAGE_TITLE
)
import news_and_events
class Site(models.Model):
"""Maintains a list of an institution's geographical sites"""
site_name = models.CharField(max_length=50, unique=True)
post_town = models.CharField(max_length=50)
country = models.CharField(max_length=50)
description = models.TextField(max_length=500, null=True, blank=True)
class Meta:
ordering = ('country', 'site_name', 'post_town')
def __unicode__(self):
return self.site_name
def buildings(self):
return self.place.all().count()
@property
def maps(self):
return [
building for building in self.place.all() if building.has_map()
]
class BuildingManager(models.Manager):
def get_by_natural_key(self, slug):
return self.get(slug=slug)
class Building(models.Model):
# the Building model should really be named Place
objects = BuildingManager()
name = models.CharField(max_length=100, null=True, blank=True)
number = models.CharField(max_length=10, blank=True)
street = models.CharField(
"Street name", max_length=100,
blank=True
)
additional_street_address = models.CharField(
help_text=u"If required",
max_length=100, null=True, blank=True)
postcode = models.CharField(max_length=9, null=True, blank=True)
site = models.ForeignKey(
Site, on_delete=models.PROTECT, related_name="place"
)
slug = models.SlugField(
blank=True,
help_text=u"Leave blank to regenerate; amend only if required",
max_length=255, null=True, unique=True
)
image = FilerImageField(
on_delete=models.SET_NULL,
null=True, blank=True
)
# for the place page
summary = models.TextField(
verbose_name="Summary",
max_length=256,
default="",
help_text="A very short description (maximum two lines)",
)
description = PlaceholderField(
'body', related_name="building_description",
help_text="A fuller description"
)
getting_here = PlaceholderField(
'simple',
related_name="getting_here",
help_text="How to get here"
)
access_and_parking = PlaceholderField(
'simple',
related_name="building_access_and_parking",
help_text="Where to park, how to get in, etc"
)
map = models.BooleanField(
"Show map", default=False,
help_text="Use Google Maps to <a target='_blank' \
style='text-decoration: underline;' \
href='http://universimmedia.pagesperso-orange.fr/geo/loc.htm'>look up\
Latitude & Longitude</a>")
latitude = models.FloatField(null=True, blank=True)
longitude = models.FloatField(null=True, blank=True)
zoom = models.IntegerField(blank=True, null=True, default=17)
class Meta:
ordering = ('site', 'street', 'number', 'name',)
def __unicode__(self):
"""
A text-friendly way of referring to a building
"""
if self.name:
return self.name
elif self.street:
return concatenate([self.number, self.street], " ")
else:
return self.postcode
@property
def admin_identifier(self):
return u"%s (%s)" % (self.__unicode__(), unicode(self.site))
def get_absolute_url(self):
return reverse("contact-place", kwargs={"slug": self.slug})
def save(self):
# if the slug is blank, regenerate it
if not self.slug:
self.slug = slugify(self.__unicode__())
super(Building, self).save()
@property
def get_postal_address(self):
"""
Assembles the postal (external) parts of an address
"""
# print "getting postal address"
address = []
if self.name:
address.append(self.name)
if self.number or self.street:
address.append(
concatenate(
strings=[self.number, self.street],
with_string=" "
)
)
if self.additional_street_address:
address.append(self.additional_street_address)
# there will always be a site.post_town; no need to check
fragments = concatenate(
strings=[self.site.post_town, self.postcode],
with_string=" "
)
address.append(fragments)
return address
def has_map(self):
return (
self.latitude is not None and self.longitude is not None
and self.zoom and self.map
)
has_map.boolean = True
@cached_property
def events(self):
# invoke the lister to find out more
lister = news_and_events.lister.EventsPlaceLister(
place=self,
entity=None,
display="events",
order_by="date",
item_format="details image",
# request=instance.request
)
return lister
@property
def get_website(self):
return None
class PhoneContact(models.Model):
LABEL_CHOICES = (
('', '-----'),
('Office', 'Office'),
('Laboratory', 'Laboratory'),
('Mobile', 'Mobile'),
('Fax', 'Fax'),
('Out of hours', 'Out of hours'),
('Pager', 'Pager'),
)
label = models.CharField(max_length=64, null=True, blank=True)
country_code = models.CharField(max_length=5, default="44")
area_code = models.CharField(
max_length=5, default="029", help_text="Not 02920"
)
number = models.CharField(max_length=12)
internal_extension = models.CharField(max_length=6, null=True, blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.IntegerField(db_index=True)
content_object = generic.GenericForeignKey()
class Meta:
ordering = ('label',)
def __unicode__(self):
return u"%s: %s" % (self.label, self.number)
class CommonFields(URLModelMixin):
precise_location = models.CharField(
help_text=u"Precise location <em>within</em> the building, \
for visitors",
max_length=255, null=True, blank=True
)
access_note = models.CharField(
help_text=u"Notes on access/visiting hours/etc",
max_length=255, null=True, blank=True
)
email = models.EmailField(
verbose_name="Email address", null=True, blank=True
)
phone_contacts = generic.GenericRelation(PhoneContact)
image = FilerImageField(on_delete=models.SET_NULL, null=True, blank=True)
class Meta:
abstract = True
class EntityLite(models.Model):
name = models.CharField(
max_length=100, help_text="e.g. Department of Haematology"
)
def __unicode__(self):
return unicode(self.name)
class EntityManager(TreeManager):
def get_by_natural_key(self, slug):
return self.get(slug=slug)
def base_entity(self):
try:
# are Entities available at all?
list(Entity.objects.all())
# print "** Entity objects are available from the database"
except:
# no - the database isn't ready
# print "** Entity objects are not available from the database"
pass
else:
# we managed to get Entity.objects.all()
# we don't use default_entity (or default_entity_id) in
# MULTIPLE_ENTITY_MODE
try:
# print "trying to match", ARKESTRA_BASE_ENTITY
entity = self.model.objects.get(id=ARKESTRA_BASE_ENTITY)
# it can't be found, maybe because of a misconfiguation or because
# we haven't added any Entities yet
except (Entity.DoesNotExist, DatabaseError):
# print "** Either the Entity does not exist, or I got a
# DatabaseError:"
# print "**", e
pass
else:
# print "** I successfully found a default entity:", entity
return entity
# only used in single-entity mode
def default_entity_id(self):
if self.base_entity and not MULTIPLE_ENTITY_MODE:
return ARKESTRA_BASE_ENTITY
class Entity(MPTTModel, EntityLite, CommonFields):
objects = EntityManager()
# URLModelMixin's get_absolute_url() requires a view_name
view_name = "contact-entity"
short_name = models.CharField(
blank=True, help_text="e.g. Haematology",
max_length=100, null=True, verbose_name="Short name for menus"
)
abstract_entity = models.BooleanField(
"abstract", default=False,
help_text=u"Select if this <em>group</em> of entities, but not an \
entity itself, or if it's just a grouping of people",)
parent = TreeForeignKey(
'self', null=True, blank=True, related_name='children'
)
display_parent = models.BooleanField(
u"Include parent entity's name in address", default=True,
help_text=u"Deselect if this entity recapitulates its parent's name"
)
building_recapitulates_entity_name = models.BooleanField(
default=False,
help_text=u"""
Removes the first line of the address - use to avoid, for
example:<br /><em>Department of Haematology<br />Haematology
Building<br />...</em>
"""
)
building = models.ForeignKey(
Building,
null=True, blank=True,
on_delete=models.SET_NULL,
help_text=u"Select the place where this Entity is based"
)
website = models.ForeignKey(
Page, verbose_name="Home page",
related_name='entity', unique=True, null=True, blank=True,
on_delete=models.SET_NULL,
help_text=u"Select the Page that is the home page of this Entity \
(leave blank if this is an external Entity)",)
auto_news_page = models.BooleanField(
u"Publish an automatic news & events page",
default=False,
)
news_page_menu_title = models.CharField(
u"Title",
max_length=50,
default=DEFAULT_NEWS_PAGE_TITLE
)
news_page_intro = PlaceholderField(
'body',
related_name="news_page_intro",
)
auto_contacts_page = models.BooleanField(
u"Publish an automatic contacts & people page", default=False,
)
contacts_page_menu_title = models.CharField(
u"Title",
max_length=50,
default=DEFAULT_CONTACTS_PAGE_TITLE,
)
contacts_page_intro = PlaceholderField(
'body',
related_name="contacts_page_intro",
help_text="Text for the Contacts & people page"
)
auto_vacancies_page = models.BooleanField(
u"Publish an automatic vacancies & studentships page",
default=False,
)
vacancies_page_menu_title = models.CharField(
u"Title",
max_length=50,
default=DEFAULT_VACANCIES_PAGE_TITLE,
)
vacancies_page_intro = PlaceholderField(
'body',
related_name="vacancies_page_intro",
)
if 'publications' in settings.INSTALLED_APPS:
auto_publications_page = models.BooleanField(
u"Publish a publications page for this entity automatcally",
default=False
)
publications_page_menu_title = models.CharField(
u"Title",
max_length=50,
default=DEFAULT_PUBLICATIONS_PAGE_TITLE,
)
class Meta:
verbose_name_plural = "Entities"
ordering = ['tree_id', 'lft']
# def natural_key(self):
# return (self.slug)
def __unicode__(self):
return self.name
@property
def get_real_ancestor(self):
"""
Find the nearest non-abstract Entity amongst this Entity's ancestors
"""
for ancestor in self.get_ancestors(ascending=True):
if not ancestor.abstract_entity:
return ancestor
@property
def get_building(self):
"""
Return the Building for this Entity (or its nearest parent)
"""
if self.abstract_entity:
return
elif self.building:
return self.building
else:
try:
return self.get_real_ancestor.get_building
except AttributeError:
return None
@property
def _get_institutional_address(self):
"""
Lists the parts of an address within the institution (Section of YYY,
Department of XXX and YYY, School of ZZZ)
"""
if self.abstract_entity:
return
else:
ancestors = []
showparent = self.display_parent
for entity in self.get_ancestors(ascending=True) \
.exclude(abstract_entity=True):
if showparent:
ancestors.append(entity.name)
showparent = entity.display_parent
return ancestors
@property
def get_full_address(self):
"""
Returns the full address of the entity
"""
if self.abstract_entity:
return []
else:
address = self._get_institutional_address
building = self.get_building
if building:
if self.building_recapitulates_entity_name:
address.extend(building.get_postal_address[1:])
else:
address.extend(building.get_postal_address)
return address
@property
def get_website(self):
"""
Return the Django CMS page that this Entity has attached to it (or to
its nearest parent)
"""
if self.website:
return self.website
else:
try:
return self.parent.get_website
except AttributeError:
return None
def get_website_url(self):
"""
Return the Django CMS page's url that this Entity has attached to it
(or to its nearest parent)
"""
if self.website:
return self.website.get_absolute_url()
elif self.external_url:
return self.external_url.url
elif self.parent:
# try
return self.parent.get_website_url()
else: # except
return None
def get_auto_page_url(self, view_name):
"""
Returns a URL not for the entity, but for its /contact page,
/news-and-events, or whatever.
If the entity is the base entity, doesn't add the entity slug to
the URL
"""
if not view_name:
return ""
# external entities don't have info pages
elif self.external_url:
return ""
# info pages for base entity
elif self == Entity.objects.base_entity():
return reverse(view_name)
# info pages for other entities
else:
return reverse(view_name, kwargs={"slug": self.slug})
def get_template(self):
"""
Returns a template for any pages that need to render based on this
entity
"""
if self.get_website:
return self.get_website.get_template()
else:
return settings.CMS_TEMPLATES[0][0]
def get_contacts(self):
"""
Return designated contacts for the entity
"""
return self.members.filter(
person__active=True,
key_contact=True
).order_by('importance_to_entity')
def get_people_with_roles(self, key_members_only=False):
"""
Publishes an ordered list of key members grouped by their most
significant roles in the entity
Ranks roles by importance to entity, then gathers people under that
role
Optionally, will return *all* members with roles
"""
memberships = self.members.\
filter(person__active=True).\
exclude(role="").\
order_by(
'-importance_to_entity',
'person__surname',
'person__given_name'
)
if key_members_only:
memberships = memberships.filter(importance_to_entity__gte=3)
# create a set with which to check for duplicates
duplicates = set()
membership_list = []
for membership in memberships:
# if this is the first time we've seen this role...
if membership.role not in duplicates:
# put this role on the duplicates list for future reference,
# and add everyone with that role to the membership_list
duplicates.add(membership.role)
membership_list.extend(
memberships.filter(role=membership.role)
)
# returns a list of memberships, in the right order - we use a regroup
# tag to group them by person in the template
return membership_list
def get_key_people(self):
return self.get_people_with_roles(key_members_only=True)
def get_roles_for_members(self, members):
"""
Given a list of its members (as Persons), returns the best role for
each.
The roles returned are in alphabetical order by Person.
"""
for m in members:
ms = m.member_of
# get the best named membership in the entity
named_memberships = ms.filter(entity=self) \
.exclude(role="").order_by('-importance_to_person')
if named_memberships:
m.membership = named_memberships[0]
else:
# see if there's a display_role membership - actually this one
# should go first
display_role_memberships = ms.filter(entity=self) \
.exclude(display_role=None). \
order_by('-importance_to_person',)
if display_role_memberships:
m.membership = display_role_memberships[0].display_role
else:
# find the best named membership anywhere we can
best_named_ms = ms.exclude(role="") \
.order_by('-importance_to_person',)
if best_named_ms:
m.membership = best_named_ms[0]
else:
# add the unnamed membership for this entity - it's
# all we have
unnamed_mss = ms.order_by('-importance_to_person',)
m.membership = unnamed_mss[0]
return members
def get_people(self, letter=None):
"""
Publishes a list of every member, and of every member of all children
"""
people = Person.objects.filter(
active=True,
member_of__entity__in=self.get_descendants(include_self=True)). \
order_by('surname', 'given_name', 'middle_names').distinct()
if letter:
people = people.filter(surname__istartswith=letter)
return people
def get_people_and_initials(self, letter=None):
"""
Returns a list of people and/or their initials for use in people lists
More than 20 people, or a letter was provided? Return initials
Fewer than 20 people? Return the people
"""
people = self.get_people(letter)
# letter or long list? show initials
if letter or people.count() > 20:
initials = set(person.surname[0].upper() for person in people)
initials = list(initials)
initials.sort()
# no letter but list is long? initials only
if not letter:
people = people[:0]
# no letter, short list? don't show initials
else:
initials = None
return (people, initials)
class Title(models.Model):
title = models.CharField(max_length=50, unique=True)
abbreviation = models.CharField(max_length=20, unique=True)
class Meta:
ordering = ['title']
def __unicode__(self):
return self.abbreviation
class PersonLite(models.Model):
title = models.ForeignKey(
'contacts_and_people.Title',
blank=True, null=True,
on_delete=models.SET_NULL)
given_name = models.CharField(max_length=50, blank=True)
middle_names = models.CharField(max_length=100, blank=True)
surname = models.CharField(max_length=50)
def __unicode__(self):
# to-do: make it smarter, i.e. don't include empty/None strings
return u"%s %s %s" % (self.given_name, self.middle_names, self.surname)
def __getInitials(self):
if self.given_name != '' and self.middle_names != '':
return self.given_name[0] + '.' + self.middle_names[0] + '.'
elif self.given_name != '':
return self.given_name[0] + '.'
else:
return ''
initials = property(__getInitials,)
class PersonManager(models.Manager):
def get_by_natural_key(self, slug):
return self.get(slug=slug)
class Person(PersonLite, CommonFields):
objects = PersonManager()
# URLModelMixin's get_absolute_url() requires a view_name
view_name = "contact-person"
user = models.ForeignKey(
User,
related_name='person_user',
unique=True,
blank=True, null=True,
verbose_name='Arkestra User',
on_delete=models.PROTECT
)
institutional_username = models.CharField(
max_length=10, blank=True, null=True
)
active = models.BooleanField(default=True,)
description = PlaceholderField('body')
entities = models.ManyToManyField(
Entity, related_name='people',
through='Membership', blank=True, null=True
)
building = models.ForeignKey(
Building,
verbose_name='Specify building',
help_text=u"""
<strong>Only</strong> required if this Person's <strong>Home
entity</strong> has a different address
""",
blank=True, null=True,
on_delete=models.SET_NULL
)
override_entity = models.ForeignKey(
Entity, verbose_name='Specify entity',
help_text=u"""
<strong>Temporarily specify</strong> an entity for contact
information - over-rides entity and postal address
""",
related_name='people_override',
blank=True, null=True,
on_delete=models.SET_NULL
)
please_contact = models.ForeignKey(
'self',
help_text=u"""
Publish another person's details as contact information for this
person
""",
related_name='contact_for',
blank=True, null=True,
on_delete=models.SET_NULL)
staff_id = models.CharField(null=True, blank=True, max_length=20)
data_feed_locked = models.BooleanField(default=False)
# def natural_key(self):
# return (self.slug)
class Meta:
ordering = ['surname', 'given_name', 'user']
verbose_name_plural = "People"
def __unicode__(self):
title = self.title or ""
return u" ".join(
name_part for name_part in (
unicode(title),
self.given_name,
self.surname
) if name_part
)
@property
def get_role(self):
"""
Returns a Membership object.
Works the Membership object representing a Person's best role, which
has to be in a real, not abstract, entity, and it must be at least
Significant (gte = 2) to the person
If it can't find any role, it returns None.
"""
memberships = self.member_of.filter(
entity__abstract_entity=False,
importance_to_person__gte=2).order_by('-importance_to_person')
if memberships:
return memberships[0]
else: # the poor person had no memberships
return None
@property
def get_entity(self):
"""
Works out a person's best entity, based on get_role
A person needs at least a named role to have an entity.
"""
if self.override_entity and not self.override_entity.abstract_entity:
return self.override_entity
elif self.get_role:
return self.get_role.entity
return None
def get_entity_short_name(self):
if self.get_entity:
return self.get_entity.short_name
else:
return u""
get_entity_short_name.short_description = "Entity"
@property
def get_building(self):
"""
Returns a Person's Building, if possible
"""
if self.building:
return self.building
elif self.get_entity:
return self.get_entity.get_building
@property
def get_full_address(self):
"""
Works out a person's address, based on their home/best entity or
information that overrides this
"""
if self.get_entity: # needs an entity to work
if self.building:
address = self.get_entity._get_institutional_address
address.extend(self.building.get_postal_address)
return address
else:
return self.get_entity.get_full_address
else:
return []
def get_please_contact(self):
"""
Works out whether to display someone else's contact details
"""
if self.please_contact:
return self.please_contact.get_please_contact()
else:
return self
def get_phone_contacts(self):
return self.get_please_contact().phone_contacts.all()
def get_email(self):
return self.get_please_contact().email
@property
def real_entity_memberships(self):
# returns Memberships of non-abstract entities the person belongs to
return self.member_of.filter(entity__abstract_entity=False)
def gather_entities(self):
"""
Returns all the entities that a person belongs to, including implicit
membership
"""
entitylist = set()
for entity in self.entities.all():
entitylist.add(entity)
entitylist.update(entity.get_ancestors())
#set(entity for entity in entitylist if not entity.abstract_entity)
return entitylist
def check_please_contact_has_loop(self, compare_to, person_list=None):
if person_list is None:
person_list = [compare_to]
if not self == compare_to:
person_list.append(self)
if self.please_contact:
if compare_to == self.please_contact:
person_list.append(compare_to)
return True, person_list
else:
return self.please_contact.check_please_contact_has_loop(
compare_to, person_list
)
else:
return False, person_list
def save(self, *args, **kwargs):
do_check_please_contact_loop = kwargs.pop(
'do_check_please_contact_loop', True
)
if do_check_please_contact_loop and self.check_please_contact_has_loop(
compare_to=self) is True:
raise Exception # TODO: raise a more appropriate exception
return super(Person, self).save(*args, **kwargs)
class Membership(models.Model):
PERSON_DISPLAY_PRIORITY = (
(1, 'No role'),
(2, 'Significant'),
(3, 'More significant'),
(4, 'Very significant'),
(5, 'Home'),
)
ENTITY_DISPLAY_PRIORITY = (
(1, 'No role'),
(2, 'Has a role'),
(3, 'Key member'),
(4, 'Keyer member'),
(5, 'Keyest member'),
)
person = models.ForeignKey(Person, related_name='member_of')
entity = models.ForeignKey(Entity, related_name='members')
# this is currently too complex to manage - in this version it remains
# unused
display_role = models.ForeignKey(
'self',
related_name="display_roles",
null=True, blank=True,
on_delete=models.SET_NULL)
key_contact = models.BooleanField(default=False)
role = models.CharField(max_length=50, null=True, blank=True)
# how important the role is to the person
importance_to_person = models.IntegerField(
blank=True, null=True,
choices=PERSON_DISPLAY_PRIORITY, default=1
)
# how important the role is to the entity
importance_to_entity = models.IntegerField(
blank=True, null=True,
choices=ENTITY_DISPLAY_PRIORITY, default=1
)
class Meta:
ordering = ('-importance_to_entity', 'person__surname')
def __unicode__(self):
if self.display_role:
return "%s-%s" % (
unicode(self.entity.short_name), unicode(self.display_role)
)
else:
return unicode(self.role)
def save(self, *args, **kwargs):
"""
The rules:
order importance_to_entity
--------------------- ---------------------
has no role: 1 has no role: 1
has a role: 2-4 has a role: 2
home: 5 key member: 3-5
"""
# if there's just one membership, make it home; if this one is home,
# make home on all the others false
memberships = self.person.member_of.all()
if self.importance_to_person == 5:
for membership in memberships:
if membership.importance_to_person == 5:
membership.importance_to_person = 4
super(Membership, membership).save()
self.importance_to_person = 5
# if no role is set, then it can't be home or a key membership, and
# orders must be the lowest
if not self.role:
self.importance_to_person = 1
# if there is a role set, orders must be > 1
else:
# with a role, order must be at least 2
if self.importance_to_person < 2:
self.importance_to_person = 2
# and importance_to_entity must be 2
if self.importance_to_entity < 2:
self.importance_to_entity = 2
super(Membership, self).save(*args, **kwargs)
class EntityAutoPageLinkPluginEditor(CMSPlugin):
AUTO_PAGES = {
'contacts-and-people': (
u'Contacts & people',
'contact-entity',
'contacts_page_menu_title',
'auto_contacts_page'
),
'news-and-events': (
u'News & events',
'news-and-events',
'news_page_menu_title',
'auto_news_page'
),
'vacancies-and-studentships': (
u'Vacancies & studentships',
'vacancies-and-studentships',
'vacancies_page_menu_title',
'auto_vacancies_page'
),
'publications': (
u'Publications',
'publications',
'publications_page_menu_title',
'auto_publications_page'),
}
link_to = models.CharField(
max_length=50,
choices=[(x, y[0]) for x, y in sorted(AUTO_PAGES.items())]
)
entity = models.ForeignKey(
Entity, null=True, blank=True,
help_text="Leave blank for autoselect",
related_name="auto_page_plugin",
on_delete=models.SET_NULL)
text_override = models.CharField(
max_length=256, null=True, blank=True,
help_text="Override the default link text"
)
class EntityDirectoryPluginEditor(CMSPlugin):
DIRECTORY_TYPE = (
('children', u'Immediate children only'),
('descendants', u'All descendants'),
)
entity = models.ForeignKey(
Entity, null=True, blank=True,
help_text="Leave blank for autoselect",
related_name="directory_plugin",
on_delete=models.SET_NULL
)
levels = models.PositiveSmallIntegerField(
help_text=u'Leave blank/set to 0 to display all sub-levels',
null=True, blank=True
)
display_descriptions_to_level = models.PositiveSmallIntegerField(
default=0,
help_text=u'Blank for all levels, 0 for none, 1 for first', null=True,
blank=True
)
link_icons = models.BooleanField(
help_text=u"Display link icons (first level only)",
default=True
)
use_short_names = models.BooleanField(default=True)
class EntityMembersPluginEditor(CMSPlugin):
entity = models.ForeignKey(
Entity, null=True, blank=True,
help_text="Leave blank for autoselect",
related_name="entity_members_plugin",
on_delete=models.SET_NULL
)
# try:
# mptt.register(Entity)
# except mptt.AlreadyRegistered:
# pass
# default_entity_id is used to autofill the default entity where required,
# when MULTIPLE_ENTITY_MODE = False
# default_entity is used throughout the system
# make default_entity and default_entity_id available
# default_entity = Entity.objects.base_entity() # get it from the Entity
# custom manager method
# if default_entity and not MULTIPLE_ENTITY_MODE:
# default_entity_id = ARKESTRA_BASE_ENTITY
# else:
# default_entity_id = None
# crazymaniac's wild monkeypatch#
# """
# THE FOLLOWING CODE IS A LOADED GUN AND MAY VERY WELL BACKFIRE.
#
# I STRONGLY ADVICE AGAINST USING THIS CODE AND IF YOU STILL WANT TO USE IT,
# YOU ARE
# DOING SO AT YOUR OWN RISK.
# """
#
# from cms.admin.forms import PageForm
# from cms.admin.pageadmin import PageAdmin
# set up the attributes of the the meta_description in the PageForm
# PageForm.base_fields['meta_description'].required = True
# PageForm.base_fields['meta_description'].label = "Summary"
# PageForm.base_fields['meta_description'].help_text = \
# "A <em>brief</em> (25-30 words maximum) summary of the page's message or
# contents in the clearest, simplest language possible."
# get the SEO settings fields
# tmp = list(PageAdmin.fieldsets[4][1]['fields'])
# we can't amend the fieldsets tuple itself, so we'll just leave the SEO
# fieldset blank
# this is in fact a good metaphor for the empty nature of SEO
# tmp.remove('meta_keywords')
# tmp.remove('meta_description')
# tmp.remove('page_title')
# PageAdmin.fieldsets[4][1]['fields'] = tmp
# rescue the meta_description field from its undeserved obscurity
# and put it in the first fieldset on the page
# PageAdmin.fieldsets[0][1]['fields'].insert(1, 'meta_description')
# page_title really belongs in the Advanced settings fieldset
# PageAdmin.fieldsets[03][1]['fields'].insert(1, 'page_title')
| {
"repo_name": "bubenkoff/Arkestra",
"path": "contacts_and_people/models.py",
"copies": "2",
"size": "35922",
"license": "bsd-2-clause",
"hash": 6359039068742092000,
"line_mean": 32.3847583643,
"line_max": 79,
"alpha_frac": 0.5892210901,
"autogenerated": false,
"ratio": 4.122805004016986,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00010473506670434865,
"num_lines": 1076
} |
#!~\app\core\AlchemyEncoder.py
#
'''
class转JSON
'''
# from flask import json
import json
import datetime
import decimal
from sqlalchemy.ext.declarative import DeclarativeMeta
class AlchemyEncoder(json.JSONEncoder):
'''用于把实体类换成JSON'''
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# an SQLAlchemy class
fields = {}
useFields=[x for x in obj.__dict__ if not x.startswith('_') and x != 'parent' and x != 'metadata' and x != "query" and x != "query_class"]
for field in useFields:
try:
data = getattr(obj, field)
except:
data=None
if data!=None and data !='':
try:
if hasattr(data,"__dict__"):
pass
elif isinstance(data, decimal.Decimal):
fields[field] = float(data)
elif isinstance(data, datetime.datetime):
fields[field] = data.strftime("%Y-%m-%d %H:%M")
elif isinstance(data, datetime.date):
fields[field] = data.strftime("%Y-%m-%d")
elif isinstance(data, datetime.timedelta):
fields[field] = (datetime.datetime.min + data).time().isoformat()
else:
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError: # 添加了对datetime的处理
fields[field] = None
return fields
return json.JSONEncoder.default(self, obj) | {
"repo_name": "wengzhilai/family",
"path": "iSoft/core/AlchemyEncoder.py",
"copies": "1",
"size": "1842",
"license": "bsd-3-clause",
"hash": 8176588842107980000,
"line_mean": 38.3695652174,
"line_max": 150,
"alpha_frac": 0.4767955801,
"autogenerated": false,
"ratio": 4.713541666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5690337246766667,
"avg_score": null,
"num_lines": null
} |
# app/data.py
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class CRUDMixin(object):
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
@classmethod
def create(cls, commit=True, **kwargs):
instance = cls(**kwargs)
return instance.save(commit=commit)
@classmethod
def get(cls, id):
return cls.query.get(id)
@classmethod
def get_or_404(cls, id):
return cls.query.get_or_404(id)
def update(self, commit=True, **kwargs):
for attr, value in kwargs.iteritems():
setattr(self, attr, value)
return commit and self.save() or self
def save(self, commit=True):
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit=True):
db.session.delete(self)
return commit and db.session.commit()
def query_to_list(query, include_field_names=True):
"""Turns a SQLAlchemy query into a list of data values."""
column_names = []
for i, obj in enumerate(query.all()):
if i == 0:
column_names = [c.name for c in obj.__table__.columns]
if include_field_names:
yield column_names
yield obj_to_list(obj, column_names)
def obj_to_list(sa_obj, field_order):
"""Takes a SQLAlchemy object - returns a list of all its data"""
return [getattr(sa_obj, field_name, None) for field_name in field_order]
| {
"repo_name": "ChristopherGS/sensor_readings",
"path": "app/data.py",
"copies": "2",
"size": "1496",
"license": "bsd-3-clause",
"hash": 7612089036492883000,
"line_mean": 26.2,
"line_max": 76,
"alpha_frac": 0.6129679144,
"autogenerated": false,
"ratio": 3.6666666666666665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5279634581066667,
"avg_score": null,
"num_lines": null
} |
"""appdecodeweb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth.views import login
import views
login_forbidden = user_passes_test(lambda u: u.is_anonymous(), '/user/home')
urlpatterns = [
url(r'^$', login_forbidden(login), {'template_name': 'loginapp/templates/login.html'}),
url(r'^logout/$', views.logout_page, name='logout'),
url(r'^accounts/login/$', login_forbidden(login), {'template_name': 'loginapp/login.html'}, name='login'), # If user is not login it will redirect to login page
url(r'^register/$', login_forbidden(views.register), name='register'),
url(r'^register/success/$', views.register_success, name='register_success'),
url(r'^home/$', views.home, name='home'),
]
| {
"repo_name": "amitt001/Analytics-App",
"path": "appdecodeweb/loginapp/urls.py",
"copies": "1",
"size": "1397",
"license": "mit",
"hash": -2133708601018016000,
"line_mean": 42.65625,
"line_max": 164,
"alpha_frac": 0.6986399427,
"autogenerated": false,
"ratio": 3.4925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46911399427,
"avg_score": null,
"num_lines": null
} |
"""App Decorators Module."""
# standard library
import inspect
from typing import List, Union
# third-party
import wrapt
# first-party
from tcex.validators import (
ValidationError,
equal_to,
greater_than,
greater_than_or_equal,
in_range,
less_than,
less_than_or_equal,
not_in,
to_bool,
to_float,
to_int,
)
class IterateOnArg:
"""Iterate on values stored in ``self.args`` namespace.
This decorator will iterate over all value of the supplied arg and return results. This feature
is helpful when processing a single value (String) or array of values (StringArray). If the App
was provided the arg ``self.args.colors`` with a value of ``['blue', 'green', 'magenta']``, then
this decorator would call the function 3 times. Each call to the function would pass one value
from the array. The return values are stored and returned all at once after the last value is
processed.
.. code-block:: python
:linenos:
:lineno-start: 1
# the args value of "colors" refers to ``self.args.colors``.
@iterateOnArgs(arg='colors')
def my_method(colors):
return colors
# ** OR **
# the args value of "colors" refers to ``self.args.colors``.
@iterateOnArg(arg='colors')
def my_method(**kwargs):
return kwargs.get('colors')
Args:
arg (str): The arg name from the App which contains the input. This input can be
a Binary, BinaryArray, KeyValue, KeyValueArray, String, StringArray, TCEntity, or
TCEntityArray.
default (str, kwargs): Defaults to None. Default value to pass to method if arg
value is None. Only supported for String or StringArray.
fail_enabled (boolean|str, kwargs): Accepts a boolean or string value. If a boolean value
is provided that value will control enabling/disabling this feature. A string
value should reference an item in the args namespace which resolves to a boolean.
The value of this boolean will control enabling/disabling this feature.
fail_on (list, kwargs): Defaults to None. Fail if data read from Redis is in list.
"""
def __init__(self, arg, **kwargs):
"""Initialize Class properties"""
self.arg = arg
self.default = kwargs.get('default')
self.fail_enabled = kwargs.get('fail_enabled', False)
self.fail_msg = kwargs.get('fail_msg')
self.fail_on = kwargs.get('fail_on', [])
self.transforms: Union[List[callable], callable] = kwargs.get('transforms', [])
self.validators: Union[List[callable], callable] = kwargs.get('validators', [])
if self.fail_on:
self.validators.insert(0, not_in(self.fail_on))
self._init_validators(**kwargs)
def _init_validators(self, **kwargs):
validators_map = {
'in_range': in_range,
'equal_to': equal_to,
'less_than': less_than,
'less_than_or_equal': less_than_or_equal,
'greater_than': greater_than,
'greater_than_or_equal': greater_than_or_equal,
}
for k, v in filter(lambda item: item[0] in validators_map, kwargs.items()):
validator = validators_map.get(k)
if isinstance(v, list):
self.validators.append(validator(*v))
elif isinstance(v, dict):
self.validators.append(validator(**v))
else:
self.validators.append(validator(v))
transform_map = {
'to_int': to_int,
'to_float': to_float,
'to_bool': to_bool,
}
for transform_name in filter(lambda t: t in transform_map, kwargs.keys()):
transform = transform_map.get(transform_name)
transform_args = kwargs.get(transform_name)
if isinstance(transform_args, list):
self.transforms.append(transform(*transform_args))
elif isinstance(transform_args, dict):
self.transforms.append(transform(**transform_args))
else:
self.transforms.append(transform())
@wrapt.decorator
def __call__(self, wrapped, instance, args, kwargs):
"""Implement __call__ function for decorator.
Args:
wrapped (callable): The wrapped function which in turns
needs to be called by your wrapper function.
instance (App): The object to which the wrapped
function was bound when it was called.
args (list): The list of positional arguments supplied
when the decorated function was called.
kwargs (dict): The dictionary of keyword arguments
supplied when the decorated function was called.
Returns:
function: The custom decorator function.
"""
def loop(app, *args, **kwargs):
"""Iterate over data, calling the decorated function for each value.
Args:
app (class): The instance of the App class "self".
"""
# retrieve the label for the current Arg
label = app.tcex.ij.params_dict.get(self.arg, {}).get('label')
# get the signature for the decorated method
fn_signature = inspect.signature(wrapped, follow_wrapped=True).parameters
# self.enable (e.g., True or 'fail_on_false') enables/disables this feature
enabled = self.fail_enabled
if not isinstance(self.fail_enabled, bool):
enabled = getattr(app.args, self.fail_enabled)
if not isinstance(enabled, bool): # pragma: no cover
raise RuntimeError(
'The fail_enabled value must be a boolean or resolved to bool.'
)
app.tcex.log.debug(f'Fail enabled is {enabled} ({self.fail_enabled}).')
# retrieve data from Redis if variable and always return and array.
results = []
arg_data = app.tcex.playbook.read(getattr(app.args, self.arg))
arg_type = app.tcex.playbook.variable_type(getattr(app.args, self.arg))
if arg_data is None:
arg_data = [None]
elif not isinstance(arg_data, list):
arg_data = [arg_data]
_index = 0
_array_length = len(arg_data)
for ad in arg_data:
# add "magic" args
if '_index' in fn_signature:
kwargs['_index'] = _index
if '_array_length' in fn_signature:
kwargs['_array_length'] = _array_length
if ad is None and self.default is not None:
# set value passed to method to default if value is None.
ad = self.default
app.tcex.log.debug(
f'replacing null value with provided default value "{self.default}".'
)
try:
for transform in self.transforms:
ad = transform(ad, self.arg, label)
except ValidationError as v:
value_formatted = f'"{ad}"' if isinstance(ad, str) else str(ad)
message = f'Invalid value ({value_formatted}) found for "{label}": {v.message}'
app.tcex.log.error(message)
if self.fail_msg:
app.exit_message = self.fail_msg # for test cases
app.tcex.exit(1, self.fail_msg)
else:
app.exit_message = message
app.tcex.exit(1, message)
# check ad against fail_on_values
if enabled:
try:
list([v(ad, self.arg, label) for v in self.validators])
except ValidationError as v:
value_formatted = f'"{ad}"' if isinstance(ad, str) else str(ad)
message = (
f'Invalid value ({value_formatted}) found for "{label}": {v.message}'
)
app.tcex.log.error(message)
if self.fail_msg:
app.exit_message = self.fail_msg # for test cases
app.tcex.exit(1, self.fail_msg)
else:
app.exit_message = message
app.tcex.exit(1, message)
# Add logging for debug/troubleshooting
if (
arg_type not in ['Binary', 'BinaryArray']
and app.tcex.log.getEffectiveLevel() <= 10
):
log_string = str(ad)
if len(log_string) > 100: # pragma: no cover
log_string = f'{log_string[:100]} ...'
app.tcex.log.debug(f'input value: {log_string}')
# add results to kwargs
kwargs[self.arg] = ad
results.append(wrapped(*args, **kwargs))
# increment index
_index += 1
return results
return loop(instance, *args, **kwargs)
| {
"repo_name": "kstilwell/tcex",
"path": "tcex/decorators/iterate_on_arg.py",
"copies": "1",
"size": "9362",
"license": "apache-2.0",
"hash": 380250635784333250,
"line_mean": 40.0614035088,
"line_max": 100,
"alpha_frac": 0.5447553941,
"autogenerated": false,
"ratio": 4.4411764705882355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013550369529137533,
"num_lines": 228
} |
"""App Decorators Module."""
# standard library
import traceback
# third-party
import wrapt
class OnException:
"""Set exit message on failed execution.
This decorator will catch the generic "Exception" error, log the supplied error message, set
the "exit_message", and exit the App with an exit code of 1.
.. code-block:: python
:linenos:
:lineno-start: 1
@OnException(exit_msg='Failed to process JSON data.')
def my_method(json_data):
json.dumps(json_data)
Args:
exit_msg (str): The message to send to exit method.
exit_enabled (boolean|str, kwargs): Accepts a boolean or string value. If a boolean value
is provided that value will control enabling/disabling this feature. A string
value should reference an item in the args namespace which resolves to a boolean.
The value of this boolean will control enabling/disabling this feature.
write_output (boolean): default True.
If enabled, will call app.write_output() when an exception is raised.
"""
def __init__(self, exit_msg=None, exit_enabled=True, write_output=True):
"""Initialize Class properties"""
self.exit_enabled = exit_enabled
self.exit_msg = exit_msg or 'An exception has been caught. See the logs for more details.'
self.write_output = write_output
@wrapt.decorator
def __call__(self, wrapped, instance, args, kwargs):
"""Implement __call__ function for decorator.
Args:
wrapped (callable): The wrapped function which in turns
needs to be called by your wrapper function.
instance (App): The object to which the wrapped
function was bound when it was called.
args (list): The list of positional arguments supplied
when the decorated function was called.
kwargs (dict): The dictionary of keyword arguments
supplied when the decorated function was called.
Returns:
function: The custom decorator function.
"""
def exception(app, *args, **kwargs): # pylint: disable=inconsistent-return-statements
"""Call the function and handle any exception.
Args:
app (class): The instance of the App class "self".
"""
# self.enable (e.g., True or 'fail_on_false') enables/disables this feature
enabled = self.exit_enabled
if not isinstance(self.exit_enabled, bool):
enabled = getattr(app.args, self.exit_enabled)
if not isinstance(enabled, bool): # pragma: no cover
raise RuntimeError(
'The exit_enabled value must be a boolean or resolved to bool.'
)
app.tcex.log.debug(f'Fail enabled is {enabled} ({self.exit_enabled}).')
try:
return wrapped(*args, **kwargs)
except Exception:
app.tcex.log.error(traceback.format_exc())
app.exit_message = self.exit_msg # for test cases
if enabled:
if self.write_output:
app.tcex.playbook.write_output()
if hasattr(app, 'write_output'):
app.write_output()
app.tcex.exit(1, self.exit_msg)
return exception(instance, *args, **kwargs)
| {
"repo_name": "ThreatConnect-Inc/tcex",
"path": "tcex/decorators/on_exception.py",
"copies": "1",
"size": "3495",
"license": "apache-2.0",
"hash": -2333689307785447000,
"line_mean": 40.1176470588,
"line_max": 98,
"alpha_frac": 0.591702432,
"autogenerated": false,
"ratio": 4.6414342629482075,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5733136694948208,
"avg_score": null,
"num_lines": null
} |
"""App Decorators Module."""
# third-party
import wrapt
class FailOnOutput:
"""Fail App if return value (output) value conditions are met.
This decorator allows for the App to exit on conditions defined in the function
parameters.
.. code-block:: python
:linenos:
:lineno-start: 1
@FailOnOutput(
fail_on=['false'], fail_msg='Operation returned a value of "false".'
)
def my_method(data):
return data.lowercase()
Args:
fail_enabled (boolean|str, kwargs): Accepts a boolean or string value. If a boolean value
is provided that value will control enabling/disabling this feature. A string
value should reference an item in the args namespace which resolves to a boolean.
The value of this boolean will control enabling/disabling this feature.
fail_msg (str, kwargs): The message to log when raising RuntimeError.
fail_msg_property (str, kwargs): The App property containting the dynamic exit message.
fail_on (list, kwargs): Defaults to None.
Fail if return value from App method is in the list.
write_output (bool, kwargs): Defaults to True.
If true, will call App.write_outputs() before failing on matched fail_on value.
"""
def __init__(self, **kwargs):
"""Initialize Class properties."""
self.fail_enabled = kwargs.get('fail_enabled', True)
self.fail_msg = kwargs.get('fail_msg', 'Method returned invalid output.')
self.fail_on = kwargs.get('fail_on', [])
self.fail_msg_property = kwargs.get('fail_msg_property')
self.write_output = kwargs.get('write_output', True)
@wrapt.decorator
def __call__(self, wrapped, instance, args, kwargs):
"""Implement __call__ function for decorator.
Args:
wrapped (callable): The wrapped function which in turns
needs to be called by your wrapper function.
instance (App): The object to which the wrapped
function was bound when it was called.
args (list): The list of positional arguments supplied
when the decorated function was called.
kwargs (dict): The dictionary of keyword arguments
supplied when the decorated function was called.
Returns:
function: The custom decorator function.
"""
def fail(app, *args, **kwargs):
"""Call the function and store or append return value.
Args:
app (class): The instance of the App class "self".
"""
# call method to get output
data = wrapped(*args, **kwargs)
# self.enable (e.g., True or 'fail_on_false') enables/disables this feature
enabled = self.fail_enabled
if not isinstance(self.fail_enabled, bool):
enabled = getattr(app.args, self.fail_enabled)
if not isinstance(enabled, bool): # pragma: no cover
raise RuntimeError(
'The fail_enabled value must be a boolean or resolved to bool.'
)
app.tcex.log.debug(f'Fail enabled is {enabled} ({self.fail_enabled}).')
failed = False
if enabled:
if isinstance(data, list):
# validate each value in the list of results.
for d in data:
if d in self.fail_on:
failed = True
break
else:
if data in self.fail_on:
failed = True
if failed:
if self.write_output:
app.tcex.playbook.write_output()
if hasattr(app, 'write_output'):
app.write_output()
app.exit_message = self.get_fail_msg(app) # for test cases
app.tcex.exit(1, self.get_fail_msg(app))
return data
return fail(instance, *args, **kwargs)
def get_fail_msg(self, app):
"""Return the appropriate fail message."""
fail_msg = self.fail_msg
if self.fail_msg_property and hasattr(app, self.fail_msg_property):
fail_msg = getattr(app, self.fail_msg_property)
return fail_msg
| {
"repo_name": "ThreatConnect-Inc/tcex",
"path": "tcex/decorators/fail_on_output.py",
"copies": "2",
"size": "4436",
"license": "apache-2.0",
"hash": -950284157555992300,
"line_mean": 40.0740740741,
"line_max": 98,
"alpha_frac": 0.5651487827,
"autogenerated": false,
"ratio": 4.654774396642183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012553507854862751,
"num_lines": 108
} |
"""App Decorators Module."""
# third-party
import wrapt
class Output:
"""Store the method return value in self.<attribute>.
This decorator will write, append, or extend the methods return value to the App attribute
provided in the ``attribute`` input. The ``attribute`` must first be defined in the
``__init__()`` method of the App before the decorator is used.
.. code-block:: python
:linenos:
:lineno-start: 1
def __init__(self, _tcex):
super(App, self).__init__(_tcex)
self.output_strings = [] # Output decorator writes here.
@Output(attribute='output_strings')
def my_method(data):
return data.lowercase()
Args:
attribute (str): The name of the App attribute to write data.
overwrite (bool): When True and the method is called more than once the previous value
will be overwritten.
"""
def __init__(self, attribute, overwrite=False):
"""Initialize Class properties"""
self.attribute = attribute
self.overwrite = overwrite
@wrapt.decorator
def __call__(self, wrapped, instance, args, kwargs):
"""Implement __call__ function for decorator.
Args:
wrapped (callable): The wrapped function which in turns
needs to be called by your wrapper function.
instance (App): The object to which the wrapped
function was bound when it was called.
args (list): The list of positional arguments supplied
when the decorated function was called.
kwargs (dict): The dictionary of keyword arguments
supplied when the decorated function was called.
Returns:
function: The custom decorator function.
"""
def output(app, *args, **kwargs):
"""Call the function and store or append return value.
Args:
app (class): The instance of the App class "self".
"""
data = wrapped(*args, **kwargs)
attr = getattr(app, self.attribute)
# tracker to indicate see if attribute has already been updated
attr_tracker_name = f'__{self.attribute}_tracker__'
attr_tracker = False
try:
attr_tracker = getattr(app, attr_tracker_name)
except AttributeError:
setattr(app, attr_tracker_name, False)
# update the attribute
if isinstance(attr, list) and isinstance(data, list) and not self.overwrite:
attr.extend(data)
elif isinstance(attr, list) and not self.overwrite:
attr.append(data)
elif attr_tracker and isinstance(data, list) and not self.overwrite:
# convert string to list and extend with data
d = [attr]
d.extend(data)
setattr(app, self.attribute, d)
elif attr_tracker and not self.overwrite:
# convert string to list and append with data
d = [attr]
d.append(data)
setattr(app, self.attribute, d)
else:
setattr(app, self.attribute, data)
# update tracker to indicate the attribute has already been set at least once
setattr(app, attr_tracker_name, True)
return data
return output(instance, *args, **kwargs)
| {
"repo_name": "kstilwell/tcex",
"path": "tcex/decorators/output.py",
"copies": "2",
"size": "3482",
"license": "apache-2.0",
"hash": 7295546613414043000,
"line_mean": 36.0425531915,
"line_max": 94,
"alpha_frac": 0.5784032165,
"autogenerated": false,
"ratio": 4.890449438202247,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6468852654702247,
"avg_score": null,
"num_lines": null
} |
"""App Decorators Module."""
# third-party
import wrapt
class WriteOutput:
"""Write the App output variables to Redis.
This decorator will take the functions return value and write the data to Redis using the
key and variable_type. An optional hard coded value can be passed, which will override the
return value. If multiple value are provided for the same output variable there is an option
to overwrite the previous value.
This decorator is intended for very simple Apps. Using the `write_output()` method of the App
template is the recommended way of writing output data.
.. code-block:: python
:linenos:
:lineno-start: 1
@WriteOutput(key='color', variable_type='String')
def my_method(color):
return color.lowercase()
Args:
key (str): The name of the playbook output variable.
variable_type (str): The type for the playbook output variable. Supported types are:
String, Binary, KeyValue, TCEntity, TCEnhancedEntity, StringArray,
BinaryArray, KeyValueArray, TCEntityArray, TCEnhancedEntityArray.
default (str): If the method return is None use the provided value as a default.
overwrite (bool): When True and more than one value is provided for the same variable
the previous value will be overwritten.
"""
def __init__(self, key, variable_type, default=None, overwrite=True):
"""Initialize Class properties"""
self.key = key
self.overwrite = overwrite
self.default = default
self.variable_type = variable_type
@wrapt.decorator
def __call__(self, wrapped, instance, args, kwargs):
"""Implement __call__ function for decorator.
Args:
wrapped (callable): The wrapped function which in turns
needs to be called by your wrapper function.
instance (App): The object to which the wrapped
function was bound when it was called.
args (list): The list of positional arguments supplied
when the decorated function was called.
kwargs (dict): The dictionary of keyword arguments
supplied when the decorated function was called.
Returns:
function: The custom decorator function.
"""
def output(app, *args, **kwargs):
"""Call the function and store or append return value.
Args:
app (class): The instance of the App class "self".
"""
data = wrapped(*args, **kwargs)
if data is None and self.default is not None:
data = self.default
index = f'{self.key}-{self.variable_type}'
if app.tcex.playbook.output_data.get(index) and not self.overwrite:
# skip data since a previous value has already been written
pass
else:
# store data returned by function call or default
app.tcex.playbook.add_output(self.key, data, self.variable_type)
return data
return output(instance, *args, **kwargs)
| {
"repo_name": "kstilwell/tcex",
"path": "tcex/decorators/write_output.py",
"copies": "2",
"size": "3168",
"license": "apache-2.0",
"hash": 7618613519920011000,
"line_mean": 39.1012658228,
"line_max": 97,
"alpha_frac": 0.6268939394,
"autogenerated": false,
"ratio": 4.785498489425982,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6412392428825981,
"avg_score": null,
"num_lines": null
} |
"""App demonstrating a flask API using flask-restful with swagger."""
import json
from flask import Flask, request
from flasgger import Swagger
from flask_restful import Api, Resource
from webargs import fields
from webargs.flaskparser import use_kwargs, parser, abort
app = Flask(__name__)
api = Api(app)
swagger = Swagger(app)
@parser.error_handler
def handle_request_parsing_error(err, req):
"""
Webargs error handler.
Uses Flask-RESTful's abort function to return
a JSON error response to the client.
"""
abort(422, errors=err.messages)
class Username(Resource):
"""A resource do deal with usernames."""
args = {
'username': fields.String(
required=True,
),
}
@use_kwargs(args)
def get(self, username):
"""
You can put anything here before the spec.
---
parameters:
- in: query
name: username
schema:
type: string
required: true
description: The user name
example: john
responses:
200:
description: JSON with the name passed.
example: {"username": "john"}
"""
return {'username': username} , 200
def post(self):
"""
Again you can put anything here before the spec.
---
parameters:
- name: body
in: body
required: false
description: List of names.
example: ["tiago", "somebody"]
responses:
200:
description: JSON with of all names passed.
example: {"names": ["tiago", "somebody"]}
400:
description: error processing names list.
"""
request_data = json.loads(request.data)
return {'names': request_data}, 200
api.add_resource(Username, '/username')
app.run(debug=True)
| {
"repo_name": "tiagoprn/experiments",
"path": "flask/flask_restful_api_with_swagger/app.py",
"copies": "1",
"size": "1921",
"license": "mit",
"hash": 1226191357147621400,
"line_mean": 21.0804597701,
"line_max": 69,
"alpha_frac": 0.5663716814,
"autogenerated": false,
"ratio": 4.395881006864989,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00026123301985370953,
"num_lines": 87
} |
# app/draw.py
import colorsys
import itertools
import logging
from palettable import colorbrewer
from radon.complexity import cc_rank
from app import models
from app.grid import ImageGrid
from app.hilbert import int_to_Hilbert as get_xy
from app.utils import make_step_iter
COLOR_CC_UNKNOWN = 'gray'
logger = logging.getLogger(__name__)
# X RGB values are off by one
def color_hsl_hex(hue, saturation, lightness):
r,g,b = colorsys.hls_to_rgb(hue/99., lightness/99., saturation/99.)
return '#%02x%02x%02x' % (int(r*255), int(g*255), int(b*255))
# palette examples
# https://jiffyclub.github.io/palettable/colorbrewer/diverging/#brbg_11
def pal_add_color(skeleton):
# pylint: disable=no-member
colors = colorbrewer.diverging.RdBu_11_r.hex_colors
prev_arg = None
color_iter = itertools.cycle(colors)
for pos, symbol, arg in skeleton:
# change color with new arg (file)
if prev_arg != arg:
color = color_iter.next()
prev_arg = arg
yield pos, symbol, arg, color
# X: speedup w/ queryset.select_related('progpmccabe')
class Theme(object):
def calc_sym_color(self, symbol):
return 'gray'
class ThemeRainbow(Theme):
def __init__(self):
self.hue_iter = make_step_iter(50, 360)
self.saturation_iter = itertools.cycle([30, 60, 80])
self.highlight_iter = itertools.cycle([40, 60])
self.hue_sat_highlight = 0, 0, 40
def calc_sym_color(self, symbol):
# prev_arg = None
# for symbol, arg in skeleton:
# change color with new arg (file)
# if prev_arg != arg:
# hue = hue_iter.next()
# prev_arg = arg
# highlight = highlight_iter.next() # X?
# alternate symbols: different saturation
hue, _, highlight = self.hue_sat_highlight
saturation = self.saturation_iter.next()
return color_hsl_hex(hue, saturation, highlight)
class ThemeComplexity(Theme):
"""
give symbol a color based on code complexity
Red=high complexity, blue=low.
"""
COLOR_CC_UNKNOWN = 'gray'
def __init__(self):
# pylint: disable=no-member
colors = colorbrewer.diverging.RdBu_5_r.hex_colors
self.colormap = dict(zip('ABCDE', colors))
self.colormap['F'] = self.colormap['E']
def calc_sym_color(self, symbol):
def get_complexity(sym):
try:
return sym.progpmccabe.mccabe
except AttributeError:
try:
return sym.progradon.complexity
except AttributeError:
pass
return None
cc_value = get_complexity(symbol)
try:
return self.colormap[cc_rank(cc_value)]
except (KeyError, TypeError):
return self.COLOR_CC_UNKNOWN
def draw_symbol(grid, skel, color):
length = skel.sourceline.length
if length < 1:
return
# draw white "grain of rice" at start of symbol
pos = skel.position
grid.moveto(get_xy(pos))
grid.drawto(get_xy(pos + 1), '#fff')
for offset in xrange(length):
grid.drawto(get_xy(pos + offset + 1), color)
# X: unused
def draw_highlight(grid, diagram):
folder_pos = [pos for pos, symbol, _, _ in diagram
if symbol.path.endswith('/setup.py')]
folder_range = xrange(min(folder_pos), max(folder_pos))
grid.draw_many((get_xy(pos) for pos in folder_range),
ImageColor.getrgb('white'))
# X BUG: doesn't account for symbol length
def draw_box(grid, dsymbols, outline='white', fill=None):
try:
upleft_x = min(dsym.x for dsym in dsymbols)
upleft_y = min(dsym.y for dsym in dsymbols)
downright_x = max(dsym.x for dsym in dsymbols)
downright_y = max(dsym.y for dsym in dsymbols)
except ValueError:
logger.warning('empty box: no symbols')
return
# 2? XX
grid.im_draw.rectangle(
[upleft_x*2, upleft_y*2, downright_x*2, downright_y*2],
fill=fill, outline=outline)
class DrawStyle(object):
"""
draw rendered project onto a grid (image)
"""
draw_diagram = NotImplementedError
def draw(self, project, theme=None):
grid = ImageGrid.FromProject(project)
mytheme = theme or Theme()
color_cb = mytheme.calc_sym_color
skeletons = models.Skeleton.objects.filter(
sourceline__project=project).order_by(
'sourceline__path', 'sourceline__name')
for skeleton in skeletons:
color = color_cb(skeleton)
draw_symbol(
grid,
skel=skeleton,
color=color)
grid.finalize()
return grid
class SimpleDraw(DrawStyle):
def draw_diagram(self, grid, diagram):
diagram.draw(grid)
class BoundingBoxDraw(DrawStyle):
def draw_diagram(self, grid, diagram):
for path in set(dsym.sourceline.path for dsym in diagram):
syms = [dsym for dsym in diagram
if dsym.sourceline.path == path]
draw_box(grid, syms, fill=syms[0].color)
if 0:
DRAW_STYLES = dict(
((name[:-len('Draw')], value)
for name, value in globals().iteritems()
if name.endswith('Draw') and issubclass(DrawStyle, value))
)
else:
DRAW_STYLES = {
'boundingbox': BoundingBoxDraw,
'simple': SimpleDraw}
| {
"repo_name": "johntellsall/shotglass",
"path": "shotglass/app/draw.py",
"copies": "1",
"size": "5423",
"license": "mit",
"hash": 1775028560574150700,
"line_mean": 28.3135135135,
"line_max": 71,
"alpha_frac": 0.6075972709,
"autogenerated": false,
"ratio": 3.4919510624597554,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9588620470390428,
"avg_score": 0.002185572593865476,
"num_lines": 185
} |
# app.dynamic.py
#
# Dynamic content block rendering methods
import logging
from yacon.models.pages import Page, PageType
from yacon.utils import SummarizedPage
logger = logging.getLogger(__name__)
# ============================================================================
NEWS_PAGE_TYPE = None
# ============================================================================
def page_context(request, uri, context):
"""Called by yacon.view.display_page, gives us a chance to add context to
every page displayed."""
global NEWS_PAGE_TYPE
menu_name = 'Menu'
# find the latest news
if not NEWS_PAGE_TYPE:
NEWS_PAGE_TYPE = PageType.objects.get(name='News Type')
pages = Page.find_by_page_type(NEWS_PAGE_TYPE).order_by('-created')
news = []
for page in pages[:3]:
item = SummarizedPage(page, 'news', 80)
news.append(item)
context.update({
'menu_name': menu_name,
'news':news,
'advertisement':'/static/images/sample_ad.png',
})
| {
"repo_name": "cltrudeau/django-yacon",
"path": "extras/sample_site/app/dynamic.py",
"copies": "1",
"size": "1028",
"license": "mit",
"hash": 7505094544898433000,
"line_mean": 25.358974359,
"line_max": 78,
"alpha_frac": 0.5525291829,
"autogenerated": false,
"ratio": 3.8646616541353382,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9905669499294488,
"avg_score": 0.002304267548169987,
"num_lines": 39
} |
"""Appearance helpers."""
THEME_SETTINGS = {
"General": {
"preview": """
<div style='background: var(--BACKGROUND-COLOR);'>
<p>This is text on the page background.</p>
<p>This is text on the page background.</p>
</div>
""",
"variables": {
"BACKGROUND-COLOR": {
"default": "#EEF7FE",
"tooltip": "The background color of the whole page"
},
"FONT-COLOR": {
"default": "#3C6080",
"tooltip": "The default font color"
},
}
},
"Header": {
"preview": """
<header class='main-header'>
<h2 class='main-header__heading main-header__padded'>
<span>
Header
</span>
</h2>
<div class="main-header__menu menu">
<ul class="menu__list inline-list">
<li class="menu__item
menu__item--with-icon main-header__padded ">
<a href="#" class="menu__item__icon plain-link">
<i class="fa fa-calendar"></i>
</a>
<a href="#" class="menu__item__text plain-link">
Item
</a>
</li>
<li class="menu__item menu__item--with-icon
main-header__padded menu__item--active">
<a href="#" class="menu__item__icon plain-link">
<i class="fa fa-user"></i>
</a>
<a href="#" class="menu__item__text plain-link">
Highlight
</a>
</li>
<li class="menu__item menu__item--with-icon
main-header__padded notification-button">
<a href="#" class="menu__item__icon plain-link">
<i class="fa fa-bell"></i>
</a>
<a href="#" class="menu__item__text plain-link">
Notify
</a>
<a href="#" class="notification-button__unread
notification-button__unread--unread
notifications-list-link dropdown--link">5</a>
</li>
</ul>
</div>
<div class="clear"></div>
</header>
<nav class="secondary-navigation">
<div class="l-container">
<ul class="inline-list inline-list--space">
<li><a class="secondary-navigation__link " href="#">
Secondary Navigation Link
</a></li>
</ul>
</div>
</nav>
""",
"variables": {
"HEADER-BACKGROUND-COLOR": {
"default": "#1976D2",
"tooltip": "The background of the overall header"
},
"SECONDARY-NAV-BACKGROUND-COLOR": {
"default": "#BBDEFB",
"tooltip": "The background of the secondary navigation bar"
},
"SECONDARY-NAV-COLOR": {
"default": "#3C6080",
"tooltip": "The text on the secondary navigation bar"
},
"MENU-COLOR": {
"default": "#FFF",
"tooltip": "The color of text and icons on the menu"
},
"MENU-HIGHLIGHT-BACKGROUND-COLOR": {
"default": "#2196F3",
"tooltip": "The background of highlighted or selected " +
"menu items"
},
"MENU-HIGHLIGHT-COLOR": {
"default": "#FFF",
"tooltip": "The color of text on highlighted or selected " +
"menu items"
},
"UNREAD-NOTIFICATION-BACKGROUND-COLOR": {
"default": "#F52424",
"tooltip": "The background of the indicator for unread " +
"notifications"
},
"UNREAD-NOTIFICATION-COLOR": {
"default": "#FFF",
"tooltip": "The color of text on the indicator for unread " +
"notifications"
},
}
},
"Blocks": {
"variables": {
"BLOCK-COLOR": {
"default": "#DEEFFC",
"tooltip": "The background color of blocks"
},
}
},
"Forms": {
"preview": """
<button>A button</button>
<br /><br />
<label class='radio'>
<input type='radio' checked>A radio button
</label>
<div class="form__field">
<div class="form__field__label">
<label for="text-input">Text Input</label>
</div>
<input type="text" value="Text Input" />
</div>
<div class="form__field">
<div class="form__field__label">
<label for="text-area">Text Area</label>
</div>
<textarea>Text Area</textarea>
</div>
<div class="form__field">
<div class="form__field__label">
<label for="toggle">Toggle</label>
</div>
<div class="toggle toggle--no-content">
<div class="toggle__item toggle__item--first
toggle__item--active">
Going
</div>
<div class="toggle__item toggle__item--last">
<a href="#">Not Going</a>
</div>
</div>
</div>
""",
"variables": {
"RADIO-BACKGROUND-COLOR": {
"default": "#7FA9D2",
"tooltip": "The background color for radio buttons"
},
"RADIO-COLOR": {
"default": "#DEEFFC",
"tooltip": "The color of the checkbox on radio buttons"
},
"BUTTON-BACKGROUND-COLOR": {
"default": "#1976D2",
"tooltip": "The background color for buttons"
},
"BUTTON-COLOR": {
"default": "#FFF",
"tooltip": "The color of text on buttons"
},
"TEXT-INPUT-COLOR": {
"default": "#000",
"tooltip": "The color of text on text inputs"
},
"TEXT-INPUT-BACKGROUND-COLOR": {
"default": "#FFF",
"tooltip": "The color of text input backgrounds"
},
}
},
"Messages": {
"preview": """
<div class="message-box message-box--error">
An error message
</div>
<div class="message-box message-box--success">
A success message
</div>
<div class="message-box">
A message
</div>
""",
"variables": {
"MESSAGE-BACKGROUND-COLOR": {
"default": "#DEEFFC"
},
"ERROR-MESSAGE-BACKGROUND-COLOR": {
"default": "#F52424"
},
"SUCCESS-MESSAGE-BACKGROUND-COLOR": {
"default": "#149A3E"
},
}
}
}
| {
"repo_name": "jscott1989/happening",
"path": "src/happening/appearance.py",
"copies": "2",
"size": "7894",
"license": "mit",
"hash": -7470555497116486000,
"line_mean": 35.3778801843,
"line_max": 77,
"alpha_frac": 0.3824423613,
"autogenerated": false,
"ratio": 4.961659333752357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6344101695052358,
"avg_score": null,
"num_lines": null
} |
# Appendix C10 - tables.py
from sqlalchemy import Table, Column, Integer, String, Text
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
from .base import Base
class Topic(Base):
__tablename__ = 'topics'
id = Column(Integer, primary_key=True)
title = Column(String)
base_path = Column(String, unique=True)
description = Column(String)
web_url = Column(String)
api_url = Column(String)
def __repr__(self):
return "<Topic(title='%s', base_path='%s')>" % (self.title, self.base_path)
def documents(self):
documents = set()
for subtopic in self.subtopics:
for doc in subtopic.documents:
documents.add(doc)
return list(documents)
def documents_with_labels(self):
doc_with_labels = []
for doc in self.documents():
doc_with_labels.append([doc, doc.topic_titles()])
return doc_with_labels
# create association table SubtopicDcoument
subtopics_documents = Table('subtopics_documents', Base.metadata,
Column('subtopic_id', ForeignKey('subtopics.id'), primary_key=True),
Column('document_id', ForeignKey('documents.id'), primary_key=True)
)
class Subtopic(Base):
__tablename__ = 'subtopics'
id = Column(Integer, primary_key=True)
title = Column(String)
base_path = Column(String, unique=True)
description = Column(String)
web_url = Column(String)
api_url = Column(String)
topic_id = Column(Integer, ForeignKey('topics.id'))
topic = relationship("Topic", back_populates="subtopics")
documents = relationship(
"Document", secondary=subtopics_documents, back_populates="subtopics"
)
def __repr__(self):
return "<Subtopic(title='%s', base_path='%s')>" % (self.title, self.base_path)
# link topic to subtopics
Topic.subtopics = relationship(
"Subtopic", order_by=Subtopic.id, back_populates="topic"
)
class Document(Base):
__tablename__ = 'documents'
id = Column(Integer, primary_key=True)
title = Column(String)
base_path = Column(String, unique=True)
web_url = Column(String)
html = Column(Text)
description = Column(Text)
content = Column(Text)
subtopics = relationship(
'Subtopic', secondary=subtopics_documents, back_populates='documents'
)
def __init__(self, title, base_path, html=None, description=None, web_url=None, content=None):
self.title = title
self.base_path = base_path
self.html = html
self.web_url = web_url
self.description = description
self.content = content
def __repr__(self):
return "<Document(title=%r, base_path=%r)" % (self.title, self.base_path)
def topics(self):
topics = set()
for subtopic in self.subtopics:
topics.add(subtopic.topic)
return list(topics)
def topic_titles(self):
return [topic.title for topic in self.topics()]
| {
"repo_name": "jackscotti/klassify",
"path": "src/tables.py",
"copies": "1",
"size": "3085",
"license": "mit",
"hash": 1970663791015250000,
"line_mean": 29.5445544554,
"line_max": 98,
"alpha_frac": 0.6204213938,
"autogenerated": false,
"ratio": 3.780637254901961,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9858602989211751,
"avg_score": 0.00849113189804182,
"num_lines": 101
} |
# Appendix C17 - test_measure_calculator.py
from klassify.src.measure_calculator import MeasureCalculator
from klassify.src.tables import Topic, Subtopic
first_set = {
"BernoulliNB": {
"cross score": 3, "precision": 1, "cross variance": 1
},
"MultinomialNB": {
"cross score": 2, "precision": 2, "cross variance": 2
}
}
second_set = {
"BernoulliNB": {"recall": 3, "f1": 1},
"MultinomialNB": {"recall": 2, "f1": 2}
}
# Groups two sets of measures by the algorithm type
def test_combine_measures():
CALC = MeasureCalculator()
assert CALC.combine_measures(first_set, second_set) == {
"BernoulliNB": {
"cross score": 3, "precision": 1, "recall": 3, "f1": 1, "cross variance": 1
},
"MultinomialNB": {
"cross score": 2, "precision": 2, "recall": 2, "f1": 2, "cross variance": 2
}
}
# Store sets of measures
def test_add_measures():
CALC = MeasureCalculator()
CALC.add_measures(first_set, second_set)
assert CALC.measures == {
"BernoulliNB": {
"cross score": [3], "precision": [1], "recall": [3], "f1": [1], "cross variance": [1]
},
"MultinomialNB": {
"cross score": [2], "precision": [2], "recall": [2], "f1": [2], "cross variance": [2]
}
}
| {
"repo_name": "jackscotti/klassify",
"path": "test/test_measure_calculator.py",
"copies": "1",
"size": "1321",
"license": "mit",
"hash": -1986737451705257500,
"line_mean": 28.3555555556,
"line_max": 97,
"alpha_frac": 0.5647236942,
"autogenerated": false,
"ratio": 3.1908212560386473,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9242476261948285,
"avg_score": 0.0026137376580726334,
"num_lines": 45
} |
# Appendix C19 - test_topic_importer.py
from klassify.src.topic_importer import TopicImporter
from klassify.src.tables import Topic, Subtopic
IMPORTER = TopicImporter()
def test_make_topic():
topic_fixture = {'base_path': '/topic/working-sea', 'web_url': 'https://www.gov.uk/topic/working-sea', 'content_id': '077826e8-f094', 'description': 'List of information about Working at sea.', 'title': 'Working at sea', 'api_url': 'https://www.gov.uk/api/content/topic/working-sea'}
created_topic = IMPORTER.make_topic(topic_fixture)
expected_topic = Topic(
title='Working at sea',
base_path='/topic/working-sea',
web_url='https://www.gov.uk/topic/working-sea',
api_url='https://www.gov.uk/api/content/topic/working-sea',
description='List of information about Working at sea.'
)
assert created_topic.title == expected_topic.title
assert created_topic.base_path == expected_topic.base_path
assert created_topic.web_url == expected_topic.web_url
assert created_topic.api_url == expected_topic.api_url
assert created_topic.description == expected_topic.description
def test_make_subtopic():
subtopic_fixture = {'content_id': '6382617d-a2c5-4651-b487-5d267dfc6662', 'locale': 'en', 'base_path': '/topic/working-sea/health-safety', 'description': 'List of information about Health and safety.', 'api_url': 'https://www.gov.uk/api/content/topic/working-sea/health-safety', 'title': 'Health and safety', 'web_url': 'https://www.gov.uk/topic/working-sea/health-safety'}
created_subtopic = IMPORTER.make_topic(subtopic_fixture)
expected_subtopic = Subtopic(
title='Health and safety',
base_path='/topic/working-sea/health-safety',
web_url='https://www.gov.uk/topic/working-sea/health-safety',
api_url='https://www.gov.uk/api/content/topic/working-sea/health-safety',
description='List of information about Health and safety.'
)
assert created_subtopic.title == expected_subtopic.title
assert created_subtopic.base_path == expected_subtopic.base_path
assert created_subtopic.web_url == expected_subtopic.web_url
assert created_subtopic.api_url == expected_subtopic.api_url
assert created_subtopic.description == expected_subtopic.description
def test_associate_topic_subtopics():
topic = Topic(title="A topi title")
subtopic_1 = Subtopic(title="A subtopic title 1")
subtopic_2 = Subtopic(title="A subtopic title 2")
IMPORTER.associate_topic_subtopics(topic, [subtopic_1, subtopic_2])
assert subtopic_1.title == topic.subtopics[0].title
assert subtopic_2.title == topic.subtopics[1].title
| {
"repo_name": "jackscotti/klassify",
"path": "test/test_topic_importer.py",
"copies": "1",
"size": "2661",
"license": "mit",
"hash": -2646005243701885400,
"line_mean": 48.2777777778,
"line_max": 377,
"alpha_frac": 0.707628711,
"autogenerated": false,
"ratio": 3.330413016270338,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9526322427675549,
"avg_score": 0.002343859918958005,
"num_lines": 54
} |
# Appendix C2 - topic_importer.py
import requests
from .tables import Topic, Subtopic
from .db_handler import DBHandler
class TopicImporter:
def __init__(self):
self.session = DBHandler(echo=False).session
self.API_URL = "https://www.gov.uk/api/content"
def make_topic(self, topic_data):
return Topic(title=topic_data["title"],
base_path=topic_data["base_path"], web_url=topic_data["web_url"],
api_url=topic_data["api_url"], description=topic_data["description"])
def make_subtopic(self, subtopic_data):
return Subtopic(title=subtopic_data["title"],
base_path=subtopic_data["base_path"], web_url=subtopic_data["web_url"],
api_url=subtopic_data["api_url"], description=subtopic_data["description"])
def associate_topic_subtopics(self, topic, subtopics):
topic.subtopics = subtopics
def run(self):
root = requests.get(self.API_URL + "/topic").json()
topics_json = root["links"]["children"]
topics = []
print("Importing topics and subtopics", end="", flush=True)
for topic_json in topics_json:
print('.', end="", flush=True)
topic = self.make_topic(topic_json)
topics.append(topic)
topic_base_path = topic_json["base_path"]
topic_data = requests.get(self.API_URL + topic_base_path).json()
subtopics_json = topic_data["links"]["children"]
subtopics = []
for subtopic_json in subtopics_json:
subtopics.append(self.make_subtopic(subtopic_json))
self.associate_topic_subtopics(topic, subtopics)
self.session.add_all(topics)
self.session.add_all(subtopics)
self.session.commit()
print("\nComplete.")
| {
"repo_name": "jackscotti/klassify",
"path": "src/topic_importer.py",
"copies": "1",
"size": "1808",
"license": "mit",
"hash": 3105240429772660000,
"line_mean": 38.3043478261,
"line_max": 87,
"alpha_frac": 0.6150442478,
"autogenerated": false,
"ratio": 3.7355371900826446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48505814378826445,
"avg_score": null,
"num_lines": null
} |
# Appendix C3 - doc_importer.py
import math
from .tables import Subtopic, Document
from .db_handler import DBHandler
import requests
import sqlalchemy
import time
class DocumentImporter(object):
def __init__(self, db_name="klassify"):
self.ROOT_URL = "https://www.gov.uk/api/search.json?reject_specialist_sectors=_MISSING"
self.PAGE_URL = "https://www.gov.uk/api/search.json?reject_specialist_sectors=_MISSING&count=1000&start="
self.DBH = DBHandler(db_name, echo=False)
def api_response(self, url):
time.sleep(0.15)
return requests.get(url).json()
def total_documents(self, document_data):
self.document_count = document_data["total"]
return self.document_count
def pages(self, number_of_documents):
return math.ceil(number_of_documents / 1000)
def urls(self, number_of_pages):
urls = []
for i in range(number_of_pages):
item_count = i * 1000
url_with_pagination = self.PAGE_URL + str(item_count)
urls.append(url_with_pagination)
return urls
def associate_document_with_subtopics(self, document, subtopics):
# remove duplicates by converting topics to a set and then back to a list
subtopics = set(subtopics)
subtopics = list(subtopics)
document.subtopics = subtopics
return document
def make_document(self, document_data):
link = document_data["link"]
title = document_data["title"]
if "description" not in document_data:
description = ""
else:
description = document_data["description"]
doc = Document(
web_url="https://www.gov.uk" + link,
description=description,
base_path=link,
title=title
)
return doc
def find_subtopics(self, document_data):
subtopics_data = document_data["specialist_sectors"]
subtopics = []
for subtopic_data in subtopics_data:
subtopic = self.DBH.session.query(Subtopic).filter_by(base_path=subtopic_data['link']).first()
if subtopic: subtopics.append(subtopic)
return subtopics
def run(self):
root_data = self.api_response(self.ROOT_URL)
number_of_documents = self.total_documents(root_data)
pages = self.pages(number_of_documents)
urls = self.urls(pages)
count = 0
duplicate_documents = []
for url in urls:
list_of_documents = self.api_response(url)
documents_data = list_of_documents['results']
for document_data in documents_data:
document = self.make_document(document_data)
subtopics = self.find_subtopics(document_data)
if subtopics:
self.associate_document_with_subtopics(document, subtopics)
try:
self.DBH.session.add(document)
self.DBH.session.commit()
except sqlalchemy.exc.IntegrityError:
duplicate_documents.append(document.base_path)
self.DBH.session.rollback()
except:
self.DBH.session.rollback()
raise
if count % 250 == 0: print("Documents processed: %d/%d" % (count, self.document_count))
count = count + 1
self.DBH.session.close()
print("Documents with duplicates that have been ignored: %d" % len(duplicate_documents))
| {
"repo_name": "jackscotti/klassify",
"path": "src/document_importer.py",
"copies": "1",
"size": "3546",
"license": "mit",
"hash": 2298767884365788200,
"line_mean": 34.1089108911,
"line_max": 113,
"alpha_frac": 0.5987027637,
"autogenerated": false,
"ratio": 4.075862068965518,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5174564832665518,
"avg_score": null,
"num_lines": null
} |
# Appendix C4 - content_importer.py
from .db_handler import DBHandler
from .tables import Document
from bs4 import BeautifulSoup
import requests
import time
# Future implementation: Tuning features by adding Documents' to their content. Maybe with a multiplier.
class ContentImporter(object):
def __init__(self, db_name="klassify"):
self.DBH = DBHandler(db_name, echo=False)
self.ROOT_URL = "https://www.gov.uk"
self.NON_RELEVANT_PHRASES = [
"Skip to main content",
"Find out more about cookies"
"GOV.UK uses cookies to make the site simpler",
"Is there anything wrong with this page",
"Last updated",
"Other ways to apply",
"Before you start",
"Elsewhere on the web",
"Find out about call charges",
"find out more about beta services",
"Return to top ↑",
"Find out more about cookies",
"GOV.UK",
"Don’t include personal or financial information",
"Help us improve",
"This file may not be suitable for users of assistive technology"
"If you use assistive technology and need a version of this document in a more accessible format",
"tell us what format you need It will help us if you say what assistive technology you use",
"Request a different format",
"What you were doing",
"What went wrong",
"uses cookies to make the site simpler."
]
def parse_page(self, page):
soup = BeautifulSoup(page, 'html.parser')
return soup
def extract_page_content(self, page):
return page.text
# Iterate through each Document in the database, get their URL on the site and
# query it to obtain their HTML and eventually store it.
def import_documents_html(self):
documents = self.DBH.session.query(Document).all()
count = 0
for doc in documents:
if doc.html == None:
time.sleep(0.75)
doc.html = requests.get(doc.web_url).text
self.DBH.session.commit()
count += 1
if count % 250 == 0: print("Documents processed: %d/%d" %(count, len(documents)))
# Iterate through the Documents' HTML, parse it and store it.
def extract_documents_content(self):
documents = self.DBH.session.query(Document).all()
count = 0
for doc in documents:
doc.content = self.extract_content(doc)
self.DBH.session.commit()
count += 1
if count % 250 == 0: print("Documents processed: %d/%d" %(count, len(documents)))
def extract_content(self, document):
page = self.parse_page(document.html)
page = self.remove_unwanted_tags(page)
page = self.get_body(page)
page_content = self.extract_page_content(page)
page_content = self.remove_non_relevant_content(page_content)
page_content = self.remove_punctuaction_and_numbers(page_content)
return page_content
def get_body(self, page):
return page.body
# Discard anything inside footer, header and scripts
def remove_unwanted_tags(self, page):
for tag in page.find_all(['footer', 'script', 'header']):
tag.replace_with('')
return page
def remove_non_relevant_content(self, page):
for phrase in self.NON_RELEVANT_PHRASES:
page = page.replace(phrase, "")
return page
def remove_punctuaction_and_numbers(self, page):
punctuation = [ '\\', '>', '_', '`', '{', ']', '*', '[',
'^', '+', '!', '(', ':', ';', "'", "’",
'<', '|', '"', '?', '=', '}', '&', '/',
'$', ')', '~', '#', '%', ',' ]
page = ''.join(ch for ch in page if ch not in punctuation)
page = ''.join([i for i in page if not i.isdigit()])
return page
| {
"repo_name": "jackscotti/klassify",
"path": "src/content_importer.py",
"copies": "1",
"size": "4002",
"license": "mit",
"hash": -4758150407819212000,
"line_mean": 37.7961165049,
"line_max": 110,
"alpha_frac": 0.5713213213,
"autogenerated": false,
"ratio": 4.128099173553719,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001942350839294639,
"num_lines": 103
} |
# Appendix C5 - feature_extractor.py
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
import nltk
class FeatureExtractor():
def __init__(self, documents, n_features=5000):
self.documents = documents
self.stemmer = PorterStemmer()
self.vocabulary = self.top_words(n_features, self.freq_dist(self.make_vocabulary()))
def tokenize(self, document=None):
if document:
documents = [document]
else:
documents = self.documents
return [token for doc in documents for token in word_tokenize(doc.content)]
def process(self, vocabulary):
ADDITIONAL_STOP_WORDS = {'january', 'please', 'https', 'email', 'detail', 'email', 'send', 'if', 'december', 'october', 'kb', 'february', 'within', 'november', 'may', 'please', '.mb', 'what', 'pdf', 'june', 'mach', 'good', 'august', 'september', 'html', 'july', 'beta', 'document', 'eg', 'published', 'april'}
stop_words = set(stopwords.words("english"))
processed_words = []
for word in vocabulary:
# select only words shorter than 20 char
if len(word) < 20:
word = word.lower()
# do not select stopwords
if word not in (stop_words | ADDITIONAL_STOP_WORDS):
# stem words
word = self.stemmer.stem(word)
# do not select words shorter than 2 characters
if word.isalpha:
if len(word) > 1:
processed_words.append(word)
else:
processed_words.append(word)
return processed_words
def make_vocabulary(self, document=None):
if document:
vocabulary = self.tokenize(document)
else:
vocabulary = self.tokenize()
vocabulary = self.process(vocabulary)
return vocabulary
def bag_of_words(self, document):
doc_words = set(self.make_vocabulary(document))
bag_of_words = {}
for word in self.vocabulary:
bag_of_words[word] = (word in doc_words)
return bag_of_words
def freq_dist(self, vocabulary):
return nltk.FreqDist(vocabulary)
def top_words(self, n_features, freq_dist):
return list(freq_dist.keys())[:n_features]
| {
"repo_name": "jackscotti/klassify",
"path": "src/feature_extractor.py",
"copies": "1",
"size": "2403",
"license": "mit",
"hash": 382910867796219500,
"line_mean": 35.9692307692,
"line_max": 317,
"alpha_frac": 0.5771951727,
"autogenerated": false,
"ratio": 4.059121621621622,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5136316794321621,
"avg_score": null,
"num_lines": null
} |
# Appendix C6 - doc_operator.py
from .db_handler import DBHandler
from .tables import Topic, Subtopic, Document
from .feature_extractor import FeatureExtractor
import random
class DocumentOperator():
def __init__(self, db_name="klassify", n=3, min_docs=None, max_docs=None, n_features=None):
self.DBH = DBHandler(db_name=db_name, echo=False)
self.topics = self.pick_random_topics(n, min_docs)
self.max_docs = max_docs
self.topic_labels = [topic.title for topic in self.topics]
self.docs_with_labels = self.docs_with_labels()
self.featuresets = []
self.processor = FeatureExtractor([doc for doc, cat in self.docs_with_labels], n_features)
def pick_random_topics(self, n, min_docs):
topics = self.DBH.session.query(Topic).all()
if min_docs:
topics = [topic for topic in topics if len(topic.documents()) > min_docs]
random.shuffle(topics)
topics = topics[:n]
return topics
def find_random_doc_by_title(self, title):
topic = self.DBH.session.query(Topic).filter(Topic.title == title).first()
subtopic = random.choice(topic.subtopics)
return random.choice(subtopic.documents)
def random_document(self):
all_topics = self.DBH.session.query(Topic).all()
topic = random.choice(all_topics)
subtopic = random.choice(topic.subtopics)
doc = random.choice(subtopic.documents)
bag_of_words = self.baggify_document(doc)
return doc, bag_of_words
def docs_with_labels(self):
docs_with_filtered_labels = []
for topic in self.topics:
docs_with_labels = topic.documents_with_labels()
if self.max_docs:
random.shuffle(docs_with_labels)
docs_with_labels = docs_with_labels[:self.max_docs]
for doc, doc_labels in docs_with_labels:
filtered_labels = []
for label in doc_labels:
# Filter out labels that are not the selected topics
if label in self.topic_labels:
filtered_labels.append(label)
docs_with_filtered_labels.append([doc, filtered_labels])
return docs_with_filtered_labels
def build_feature_sets(self):
document_set_with_category = self.docs_with_labels
random.shuffle(document_set_with_category)
count = 0
for (document, category) in document_set_with_category:
count = count + 1
self.featuresets.append([self.baggify_document(document), category])
def baggify_document(self, doc):
return self.processor.bag_of_words(doc)
| {
"repo_name": "jackscotti/klassify",
"path": "src/doc_operator.py",
"copies": "1",
"size": "2699",
"license": "mit",
"hash": -3724789807349597700,
"line_mean": 38.115942029,
"line_max": 98,
"alpha_frac": 0.6268988514,
"autogenerated": false,
"ratio": 3.7907303370786516,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49176291884786516,
"avg_score": null,
"num_lines": null
} |
# Appendix C9 - measure_calculator.py
class MeasureCalculator():
def __init__(self):
self.measures = {
"BernoulliNB": {
"cross score": [],
"cross variance": [],
"precision": [],
"recall": [],
"f1": []
},
"MultinomialNB": {
"cross score": [],
"cross variance": [],
"precision": [],
"recall": [],
"f1": []
}
}
def add_measures(self, cross_validation_measures, accuracy_measures):
measures = self.combine_measures(cross_validation_measures, accuracy_measures)
for algo_type, results in measures.items():
for result, value in results.items():
self.measures[algo_type][result].append(value)
def combine_measures(self, cross_validation_measures, accuracy_measures):
current_measures = {}
current_measures["BernoulliNB"] = dict(
list(cross_validation_measures["BernoulliNB"].items()) +
list(accuracy_measures["BernoulliNB"].items())
)
current_measures["MultinomialNB"] = dict(
list(cross_validation_measures["MultinomialNB"].items()) +
list(accuracy_measures["MultinomialNB"].items())
)
return current_measures
def averaged_measures(self):
for algo_type, results in self.measures.items():
print(algo_type + ":")
cross_score = (sum(results["cross score"]) / len(results["cross score"]))
cross_precision = (sum(results["cross variance"]) / len(results["cross variance"]))
# Print out average of cross eval measure along with its variance
print("Cross evaluation accuracy: %1.3f (+/- %1.3f)" % (cross_score, cross_precision))
results.pop("cross score")
results.pop(("cross variance"))
for result, values in results.items():
# Print out averages of all remaining measures
print("%s: %1.3f" % (result, (sum(values) / len(values))))
| {
"repo_name": "jackscotti/klassify",
"path": "src/measure_calculator.py",
"copies": "1",
"size": "2142",
"license": "mit",
"hash": 5420634201356844000,
"line_mean": 39.4150943396,
"line_max": 98,
"alpha_frac": 0.5401493931,
"autogenerated": false,
"ratio": 4.292585170340681,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008233931437355504,
"num_lines": 53
} |
"""Append module search paths for third-party packages to sys.path.
****************************************************************
* This module is automatically imported during initialization. *
****************************************************************
In earlier versions of Python (up to 1.5a3), scripts or modules that
needed to use site-specific modules would place ``import site''
somewhere near the top of their code. Because of the automatic
import, this is no longer necessary (but code that does it still
works).
This will append site-specific paths to the module search path. On
Unix (including Mac OSX), it starts with sys.prefix and
sys.exec_prefix (if different) and appends
lib/python<version>/site-packages as well as lib/site-python.
On other platforms (such as Windows), it tries each of the
prefixes directly, as well as with lib/site-packages appended. The
resulting directories, if they exist, are appended to sys.path, and
also inspected for path configuration files.
A path configuration file is a file whose name has the form
<package>.pth; its contents are additional directories (one per line)
to be added to sys.path. Non-existing directories (or
non-directories) are never added to sys.path; no directory is added to
sys.path more than once. Blank lines and lines beginning with
'#' are skipped. Lines starting with 'import' are executed.
For example, suppose sys.prefix and sys.exec_prefix are set to
/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
with three subdirectories, foo, bar and spam, and two path
configuration files, foo.pth and bar.pth. Assume foo.pth contains the
following:
# foo package configuration
foo
bar
bletch
and bar.pth contains:
# bar package configuration
bar
Then the following directories are added to sys.path, in this order:
/usr/local/lib/python2.5/site-packages/bar
/usr/local/lib/python2.5/site-packages/foo
Note that bletch is omitted because it doesn't exist; bar precedes foo
because bar.pth comes alphabetically before foo.pth; and spam is
omitted because it is not mentioned in either path configuration file.
After these path manipulations, an attempt is made to import a module
named sitecustomize, which can perform arbitrary additional
site-specific customizations. If this import fails with an
ImportError exception, it is silently ignored.
"""
import sys
import os
import __builtin__
import traceback
# Prefixes for site-packages; add additional prefixes like /usr/local here
PREFIXES = [sys.prefix, sys.exec_prefix]
# Enable per user site-packages directory
# set it to False to disable the feature or True to force the feature
ENABLE_USER_SITE = None
# for distutils.commands.install
# These values are initialized by the getuserbase() and getusersitepackages()
# functions, through the main() function when Python starts.
USER_SITE = None
USER_BASE = None
def makepath(*paths):
dir = os.path.join(*paths)
try:
dir = os.path.abspath(dir)
except OSError:
pass
return dir, os.path.normcase(dir)
def abs__file__():
"""Set all module' __file__ attribute to an absolute path"""
for m in sys.modules.values():
if hasattr(m, '__loader__'):
continue # don't mess with a PEP 302-supplied __file__
try:
m.__file__ = os.path.abspath(m.__file__)
except (AttributeError, OSError):
pass
def removeduppaths():
""" Remove duplicate entries from sys.path along with making them
absolute"""
# This ensures that the initial path provided by the interpreter contains
# only absolute pathnames, even if we're running from the build directory.
L = []
known_paths = set()
for dir in sys.path:
# Filter out duplicate paths (on case-insensitive file systems also
# if they only differ in case); turn relative paths into absolute
# paths.
dir, dircase = makepath(dir)
if not dircase in known_paths:
L.append(dir)
known_paths.add(dircase)
sys.path[:] = L
return known_paths
# XXX This should not be part of site.py, since it is needed even when
# using the -S option for Python. See http://www.python.org/sf/586680
def addbuilddir():
"""Append ./build/lib.<platform> in case we're running in the build dir
(especially for Guido :-)"""
from sysconfig import get_platform
s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
if hasattr(sys, 'gettotalrefcount'):
s += '-pydebug'
s = os.path.join(os.path.dirname(sys.path.pop()), s)
sys.path.append(s)
def _init_pathinfo():
"""Return a set containing all existing directory entries from sys.path"""
d = set()
for dir in sys.path:
try:
if os.path.isdir(dir):
dir, dircase = makepath(dir)
d.add(dircase)
except TypeError:
continue
return d
def addpackage(sitedir, name, known_paths):
"""Process a .pth file within the site-packages directory:
For each line in the file, either combine it with sitedir to a path
and add that to known_paths, or execute it if it starts with 'import '.
"""
if known_paths is None:
_init_pathinfo()
reset = 1
else:
reset = 0
fullname = os.path.join(sitedir, name)
try:
f = open(fullname, "rU")
except IOError:
return
with f:
for n, line in enumerate(f):
if line.startswith("#"):
continue
try:
if line.startswith(("import ", "import\t")):
exec line
continue
line = line.rstrip()
dir, dircase = makepath(sitedir, line)
if not dircase in known_paths and os.path.exists(dir):
sys.path.append(dir)
known_paths.add(dircase)
except Exception as err:
print >>sys.stderr, "Error processing line {:d} of {}:\n".format(
n+1, fullname)
for record in traceback.format_exception(*sys.exc_info()):
for line in record.splitlines():
print >>sys.stderr, ' '+line
print >>sys.stderr, "\nRemainder of file ignored"
break
if reset:
known_paths = None
return known_paths
def addsitedir(sitedir, known_paths=None):
"""Add 'sitedir' argument to sys.path if missing and handle .pth files in
'sitedir'"""
if known_paths is None:
known_paths = _init_pathinfo()
reset = 1
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not sitedircase in known_paths:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
dotpth = os.extsep + "pth"
names = [name for name in names if name.endswith(dotpth)]
for name in sorted(names):
addpackage(sitedir, name, known_paths)
if reset:
known_paths = None
return known_paths
def check_enableusersite():
"""Check if user site directory is safe for inclusion
The function tests for the command line flag (including environment var),
process uid/gid equal to effective uid/gid.
None: Disabled for security reasons
False: Disabled by user (command line option)
True: Safe and enabled
"""
if sys.flags.no_user_site:
return False
if hasattr(os, "getuid") and hasattr(os, "geteuid"):
# check process uid == effective uid
if os.geteuid() != os.getuid():
return None
if hasattr(os, "getgid") and hasattr(os, "getegid"):
# check process gid == effective gid
if os.getegid() != os.getgid():
return None
return True
def getuserbase():
"""Returns the `user base` directory path.
The `user base` directory can be used to store data. If the global
variable ``USER_BASE`` is not initialized yet, this function will also set
it.
"""
global USER_BASE
if USER_BASE is not None:
return USER_BASE
from sysconfig import get_config_var
USER_BASE = get_config_var('userbase')
return USER_BASE
def getusersitepackages():
"""Returns the user-specific site-packages directory path.
If the global variable ``USER_SITE`` is not initialized yet, this
function will also set it.
"""
global USER_SITE
user_base = getuserbase() # this will also set USER_BASE
if USER_SITE is not None:
return USER_SITE
from sysconfig import get_path
import os
if sys.platform == 'darwin':
from sysconfig import get_config_var
if get_config_var('PYTHONFRAMEWORK'):
USER_SITE = get_path('purelib', 'osx_framework_user')
return USER_SITE
USER_SITE = get_path('purelib', '%s_user' % os.name)
return USER_SITE
def addusersitepackages(known_paths):
"""Add a per user site-package to sys.path
Each user has its own python directory with site-packages in the
home directory.
"""
# get the per user site-package path
# this call will also make sure USER_BASE and USER_SITE are set
user_site = getusersitepackages()
if ENABLE_USER_SITE and os.path.isdir(user_site):
addsitedir(user_site, known_paths)
return known_paths
def getsitepackages():
"""Returns a list containing all global site-packages directories
(and possibly site-python).
For each directory present in the global ``PREFIXES``, this function
will find its `site-packages` subdirectory depending on the system
environment, and will return a list of full paths.
"""
sitepackages = []
seen = set()
for prefix in PREFIXES:
if not prefix or prefix in seen:
continue
seen.add(prefix)
if sys.platform in ('os2emx', 'riscos'):
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-python"))
else:
sitepackages.append(prefix)
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
if sys.platform == "darwin":
# for framework builds *only* we add the standard Apple
# locations.
from sysconfig import get_config_var
framework = get_config_var("PYTHONFRAMEWORK")
if framework and "/%s.framework/"%(framework,) in prefix:
sitepackages.append(
os.path.join("/Library", framework,
sys.version[:3], "site-packages"))
return sitepackages
def addsitepackages(known_paths):
"""Add site-packages (and possibly site-python) to sys.path"""
for sitedir in getsitepackages():
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)
return known_paths
def setBEGINLIBPATH():
"""The OS/2 EMX port has optional extension modules that do double duty
as DLLs (and must use the .DLL file extension) for other extensions.
The library search path needs to be amended so these will be found
during module import. Use BEGINLIBPATH so that these are at the start
of the library search path.
"""
dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
libpath = os.environ['BEGINLIBPATH'].split(';')
if libpath[-1]:
libpath.append(dllpath)
else:
libpath[-1] = dllpath
os.environ['BEGINLIBPATH'] = ';'.join(libpath)
def setquit():
"""Define new builtins 'quit' and 'exit'.
These are objects which make the interpreter exit when called.
The repr of each object contains a hint at how it works.
"""
if os.sep == ':':
eof = 'Cmd-Q'
elif os.sep == '\\':
eof = 'Ctrl-Z plus Return'
else:
eof = 'Ctrl-D (i.e. EOF)'
class Quitter(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return 'Use %s() or %s to exit' % (self.name, eof)
def __call__(self, code=None):
# Shells like IDLE catch the SystemExit, but listen when their
# stdin wrapper is closed.
try:
sys.stdin.close()
except:
pass
raise SystemExit(code)
__builtin__.quit = Quitter('quit')
__builtin__.exit = Quitter('exit')
class _Printer(object):
"""interactive prompt objects for printing the license text, a list of
contributors and the copyright notice."""
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
self.__name = name
self.__data = data
self.__files = files
self.__dirs = dirs
self.__lines = None
def __setup(self):
if self.__lines:
return
data = None
for dir in self.__dirs:
for filename in self.__files:
filename = os.path.join(dir, filename)
try:
fp = file(filename, "rU")
data = fp.read()
fp.close()
break
except IOError:
pass
if data:
break
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print self.__lines[i]
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
key = raw_input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
def setcopyright():
"""Set 'copyright' and 'credits' in __builtin__"""
__builtin__.copyright = _Printer("copyright", sys.copyright)
if sys.platform[:4] == 'java':
__builtin__.credits = _Printer(
"credits",
"Jython is maintained by the Jython developers (www.jython.org).")
elif sys.platform == 'cli':
__builtin__.credits = _Printer(
"credits",
"IronPython is maintained by the IronPython developers (www.ironpython.net).")
else:
__builtin__.credits = _Printer("credits", """\
Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
for supporting Python development. See www.python.org for more information.""")
here = os.path.dirname(os.__file__)
__builtin__.license = _Printer(
"license", "See http://www.python.org/%.3s/license.html" % sys.version,
["LICENSE.txt", "LICENSE"],
[os.path.join(here, os.pardir), here, os.curdir])
class _Helper(object):
"""Define the builtin 'help'.
This is a wrapper around pydoc.help (with a twist).
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
def sethelper():
__builtin__.help = _Helper()
def aliasmbcs():
"""On Windows, some default encodings are not provided by Python,
while they are always available as "mbcs" in each locale. Make
them usable by aliasing to "mbcs" in such a case."""
if sys.platform == 'win32':
import locale, codecs
enc = locale.getdefaultlocale()[1]
if enc.startswith('cp'): # "cp***" ?
try:
codecs.lookup(enc)
except LookupError:
import encodings
encodings._cache[enc] = encodings._unknown
encodings.aliases.aliases[enc] = 'mbcs'
def setencoding():
"""Set the string encoding used by the Unicode implementation. The
default is 'ascii', but if you're willing to experiment, you can
change this."""
encoding = "ascii" # Default value set by _PyUnicode_Init()
if 0:
# Enable to support locale aware default string encodings.
import locale
loc = locale.getdefaultlocale()
if loc[1]:
encoding = loc[1]
if 0:
# Enable to switch off string to Unicode coercion and implicit
# Unicode to string conversion.
encoding = "undefined"
if encoding != "ascii":
# On Non-Unicode builds this will raise an AttributeError...
sys.setdefaultencoding(encoding) # Needs Python Unicode build !
def execsitecustomize():
"""Run custom site specific code, if available."""
try:
import sitecustomize
except ImportError:
pass
except Exception:
if sys.flags.verbose:
sys.excepthook(*sys.exc_info())
else:
print >>sys.stderr, \
"'import sitecustomize' failed; use -v for traceback"
def execusercustomize():
"""Run custom user specific code, if available."""
try:
import usercustomize
except ImportError:
pass
except Exception:
if sys.flags.verbose:
sys.excepthook(*sys.exc_info())
else:
print>>sys.stderr, \
"'import usercustomize' failed; use -v for traceback"
def main():
global ENABLE_USER_SITE
abs__file__()
known_paths = removeduppaths()
if (os.name == "posix" and sys.path and
os.path.basename(sys.path[-1]) == "Modules"):
addbuilddir()
if ENABLE_USER_SITE is None:
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)
known_paths = addsitepackages(known_paths)
if sys.platform == 'os2emx':
setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
aliasmbcs()
setencoding()
execsitecustomize()
if ENABLE_USER_SITE:
execusercustomize()
# Remove sys.setdefaultencoding() so that users cannot change the
# encoding after initialization. The test for presence is needed when
# this module is run as a script, because this code is executed twice.
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
main()
def _script():
help = """\
%s [--user-base] [--user-site]
Without arguments print some useful information
With arguments print the value of USER_BASE and/or USER_SITE separated
by '%s'.
Exit codes with --user-base or --user-site:
0 - user site directory is enabled
1 - user site directory is disabled by user
2 - uses site directory is disabled by super user
or for security reasons
>2 - unknown error
"""
args = sys.argv[1:]
if not args:
print "sys.path = ["
for dir in sys.path:
print " %r," % (dir,)
print "]"
print "USER_BASE: %r (%s)" % (USER_BASE,
"exists" if os.path.isdir(USER_BASE) else "doesn't exist")
print "USER_SITE: %r (%s)" % (USER_SITE,
"exists" if os.path.isdir(USER_SITE) else "doesn't exist")
print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE
sys.exit(0)
buffer = []
if '--user-base' in args:
buffer.append(USER_BASE)
if '--user-site' in args:
buffer.append(USER_SITE)
if buffer:
print os.pathsep.join(buffer)
if ENABLE_USER_SITE:
sys.exit(0)
elif ENABLE_USER_SITE is False:
sys.exit(1)
elif ENABLE_USER_SITE is None:
sys.exit(2)
else:
sys.exit(3)
else:
import textwrap
print textwrap.dedent(help % (sys.argv[0], os.pathsep))
sys.exit(10)
if __name__ == '__main__':
_script()
| {
"repo_name": "bob-white/UnityIronPythonConsole",
"path": "Assets/IronPythonConsole/Plugins/Lib/site.py",
"copies": "41",
"size": "20474",
"license": "mpl-2.0",
"hash": -8330547354566737000,
"line_mean": 32.0759289176,
"line_max": 90,
"alpha_frac": 0.6002246752,
"autogenerated": false,
"ratio": 4.1021839310759365,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0019300703364212896,
"num_lines": 619
} |
"""Append module search paths for third-party packages to sys.path.
This is stripped down for use in py2app applications
"""
import sys, os
def makepath(*paths):
dir = os.path.abspath(os.path.join(*paths))
return dir, os.path.normcase(dir)
for m in sys.modules.values():
f = getattr(m, '__file__', None)
if isinstance(f, basestring) and os.path.exists(f):
m.__file__ = os.path.abspath(m.__file__)
del m
# This ensures that the initial path provided by the interpreter contains
# only absolute pathnames, even if we're running from the build directory.
L = []
_dirs_in_sys_path = {}
dir = dircase = None # sys.path may be empty at this point
for dir in sys.path:
# Filter out duplicate paths (on case-insensitive file systems also
# if they only differ in case); turn relative paths into absolute
# paths.
dir, dircase = makepath(dir)
if not dircase in _dirs_in_sys_path:
L.append(dir)
_dirs_in_sys_path[dircase] = 1
sys.path[:] = L
del dir, dircase, L
_dirs_in_sys_path = None
def _init_pathinfo():
global _dirs_in_sys_path
_dirs_in_sys_path = d = {}
for dir in sys.path:
if dir and not os.path.isdir(dir):
continue
dir, dircase = makepath(dir)
d[dircase] = 1
def addsitedir(sitedir):
global _dirs_in_sys_path
if _dirs_in_sys_path is None:
_init_pathinfo()
reset = 1
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not sitedircase in _dirs_in_sys_path:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
names.sort()
for name in names:
if name[-4:] == os.extsep + "pth":
addpackage(sitedir, name)
if reset:
_dirs_in_sys_path = None
def addpackage(sitedir, name):
global _dirs_in_sys_path
if _dirs_in_sys_path is None:
_init_pathinfo()
reset = 1
else:
reset = 0
fullname = os.path.join(sitedir, name)
try:
f = open(fullname)
except IOError:
return
while 1:
dir = f.readline()
if not dir:
break
if dir[0] == '#':
continue
if dir.startswith("import"):
exec dir
continue
if dir[-1] == '\n':
dir = dir[:-1]
dir, dircase = makepath(sitedir, dir)
if not dircase in _dirs_in_sys_path and os.path.exists(dir):
sys.path.append(dir)
_dirs_in_sys_path[dircase] = 1
if reset:
_dirs_in_sys_path = None
#sys.setdefaultencoding('utf-8')
#
# Run custom site specific code, if available.
#
try:
import sitecustomize
except ImportError:
pass
#
# Remove sys.setdefaultencoding() so that users cannot change the
# encoding after initialization. The test for presence is needed when
# this module is run as a script, because this code is executed twice.
#
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
| {
"repo_name": "kamitchell/py2app",
"path": "py2app/bundletemplate/lib/site.py",
"copies": "2",
"size": "3042",
"license": "mit",
"hash": -4905018189261406000,
"line_mean": 25.9203539823,
"line_max": 74,
"alpha_frac": 0.6101249178,
"autogenerated": false,
"ratio": 3.433408577878104,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5043533495678104,
"avg_score": null,
"num_lines": null
} |
"""Append module search paths for third-party packages to sys.path.
****************************************************************
* This module is automatically imported during initialization. *
****************************************************************
In earlier versions of Python (up to 1.5a3), scripts or modules that
needed to use site-specific modules would place ``import site''
somewhere near the top of their code. Because of the automatic
import, this is no longer necessary (but code that does it still
works).
This will append site-specific paths to the module search path. On
Unix (including Mac OSX), it starts with sys.prefix and
sys.exec_prefix (if different) and appends
lib/python<version>/site-packages as well as lib/site-python.
On other platforms (such as Windows), it tries each of the
prefixes directly, as well as with lib/site-packages appended. The
resulting directories, if they exist, are appended to sys.path, and
also inspected for path configuration files.
A path configuration file is a file whose name has the form
<package>.pth; its contents are additional directories (one per line)
to be added to sys.path. Non-existing directories (or
non-directories) are never added to sys.path; no directory is added to
sys.path more than once. Blank lines and lines beginning with
'#' are skipped. Lines starting with 'import' are executed.
For example, suppose sys.prefix and sys.exec_prefix are set to
/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
with three subdirectories, foo, bar and spam, and two path
configuration files, foo.pth and bar.pth. Assume foo.pth contains the
following:
# foo package configuration
foo
bar
bletch
and bar.pth contains:
# bar package configuration
bar
Then the following directories are added to sys.path, in this order:
/usr/local/lib/python2.5/site-packages/bar
/usr/local/lib/python2.5/site-packages/foo
Note that bletch is omitted because it doesn't exist; bar precedes foo
because bar.pth comes alphabetically before foo.pth; and spam is
omitted because it is not mentioned in either path configuration file.
After these path manipulations, an attempt is made to import a module
named sitecustomize, which can perform arbitrary additional
site-specific customizations. If this import fails with an
ImportError exception, it is silently ignored.
"""
import sys
import os
import __builtin__
def makepath(*paths):
dir = os.path.abspath(os.path.join(*paths))
return dir, os.path.normcase(dir)
def abs__file__():
"""Set all module' __file__ attribute to an absolute path"""
for m in sys.modules.values():
if hasattr(m, '__loader__'):
continue # don't mess with a PEP 302-supplied __file__
try:
m.__file__ = os.path.abspath(m.__file__)
except AttributeError:
continue
def removeduppaths():
""" Remove duplicate entries from sys.path along with making them
absolute"""
# This ensures that the initial path provided by the interpreter contains
# only absolute pathnames, even if we're running from the build directory.
L = []
known_paths = set()
for dir in sys.path:
# Filter out duplicate paths (on case-insensitive file systems also
# if they only differ in case); turn relative paths into absolute
# paths.
dir, dircase = makepath(dir)
if not dircase in known_paths:
L.append(dir)
known_paths.add(dircase)
sys.path[:] = L
return known_paths
# XXX This should not be part of site.py, since it is needed even when
# using the -S option for Python. See http://www.python.org/sf/586680
def addbuilddir():
"""Append ./build/lib.<platform> in case we're running in the build dir
(especially for Guido :-)"""
from distutils.util import get_platform
s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
s = os.path.join(os.path.dirname(sys.path[-1]), s)
sys.path.append(s)
def _init_pathinfo():
"""Return a set containing all existing directory entries from sys.path"""
d = set()
for dir in sys.path:
try:
if os.path.isdir(dir):
dir, dircase = makepath(dir)
d.add(dircase)
except TypeError:
continue
return d
def addpackage(sitedir, name, known_paths):
"""Add a new path to known_paths by combining sitedir and 'name' or execute
sitedir if it starts with 'import'"""
if known_paths is None:
_init_pathinfo()
reset = 1
else:
reset = 0
fullname = os.path.join(sitedir, name)
try:
f = open(fullname, "rU")
except IOError:
return
try:
for line in f:
if line.startswith("#"):
continue
if line.startswith("import"):
exec line
continue
line = line.rstrip()
dir, dircase = makepath(sitedir, line)
if not dircase in known_paths and os.path.exists(dir):
sys.path.append(dir)
known_paths.add(dircase)
finally:
f.close()
if reset:
known_paths = None
return known_paths
def addsitedir(sitedir, known_paths=None):
"""Add 'sitedir' argument to sys.path if missing and handle .pth files in
'sitedir'"""
if known_paths is None:
known_paths = _init_pathinfo()
reset = 1
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not sitedircase in known_paths:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
names.sort()
for name in names:
if name.endswith(os.extsep + "pth"):
addpackage(sitedir, name, known_paths)
if reset:
known_paths = None
return known_paths
def addsitepackages(known_paths):
"""Add site-packages (and possibly site-python) to sys.path"""
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos'):
sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
elif os.sep == '/':
sitedirs = [os.path.join(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
os.path.join(prefix, "lib", "site-python")]
else:
sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
sitedirs.append(
os.path.join(home,
'Library',
'Python',
sys.version[:3],
'site-packages'))
for sitedir in sitedirs:
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)
return None
def setBEGINLIBPATH():
"""The OS/2 EMX port has optional extension modules that do double duty
as DLLs (and must use the .DLL file extension) for other extensions.
The library search path needs to be amended so these will be found
during module import. Use BEGINLIBPATH so that these are at the start
of the library search path.
"""
dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
libpath = os.environ['BEGINLIBPATH'].split(';')
if libpath[-1]:
libpath.append(dllpath)
else:
libpath[-1] = dllpath
os.environ['BEGINLIBPATH'] = ';'.join(libpath)
def setquit():
"""Define new built-ins 'quit' and 'exit'.
These are simply strings that display a hint on how to exit.
"""
if os.sep == ':':
eof = 'Cmd-Q'
elif os.sep == '\\':
eof = 'Ctrl-Z plus Return'
else:
eof = 'Ctrl-D (i.e. EOF)'
class Quitter(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return 'Use %s() or %s to exit' % (self.name, eof)
def __call__(self, code=None):
# Shells like IDLE catch the SystemExit, but listen when their
# stdin wrapper is closed.
try:
sys.stdin.close()
except:
pass
raise SystemExit(code)
__builtin__.quit = Quitter('quit')
__builtin__.exit = Quitter('exit')
class _Printer(object):
"""interactive prompt objects for printing the license text, a list of
contributors and the copyright notice."""
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
self.__name = name
self.__data = data
self.__files = files
self.__dirs = dirs
self.__lines = None
def __setup(self):
if self.__lines:
return
data = None
for dir in self.__dirs:
for filename in self.__files:
filename = os.path.join(dir, filename)
try:
fp = file(filename, "rU")
data = fp.read()
fp.close()
break
except IOError:
pass
if data:
break
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print self.__lines[i]
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
key = raw_input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
def setcopyright():
"""Set 'copyright' and 'credits' in __builtin__"""
__builtin__.copyright = _Printer("copyright", sys.copyright)
if sys.platform[:4] == 'java':
__builtin__.credits = _Printer(
"credits",
"Jython is maintained by the Jython developers (www.jython.org).")
else:
__builtin__.credits = _Printer("credits", """\
Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
for supporting Python development. See www.python.org for more information.""")
here = os.path.dirname(os.__file__)
__builtin__.license = _Printer(
"license", "See http://www.python.org/%.3s/license.html" % sys.version,
["LICENSE.txt", "LICENSE"],
[os.path.join(here, os.pardir), here, os.curdir])
class _Helper(object):
"""Define the built-in 'help'.
This is a wrapper around pydoc.help (with a twist).
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
def sethelper():
__builtin__.help = _Helper()
def aliasmbcs():
"""On Windows, some default encodings are not provided by Python,
while they are always available as "mbcs" in each locale. Make
them usable by aliasing to "mbcs" in such a case."""
if sys.platform == 'win32':
import locale, codecs
enc = locale.getdefaultlocale()[1]
if enc.startswith('cp'): # "cp***" ?
try:
codecs.lookup(enc)
except LookupError:
import encodings
encodings._cache[enc] = encodings._unknown
encodings.aliases.aliases[enc] = 'mbcs'
def setencoding():
"""Set the string encoding used by the Unicode implementation. The
default is 'ascii', but if you're willing to experiment, you can
change this."""
encoding = "ascii" # Default value set by _PyUnicode_Init()
if 0:
# Enable to support locale aware default string encodings.
import locale
loc = locale.getdefaultlocale()
if loc[1]:
encoding = loc[1]
if 0:
# Enable to switch off string to Unicode coercion and implicit
# Unicode to string conversion.
encoding = "undefined"
if encoding != "ascii":
# On Non-Unicode builds this will raise an AttributeError...
sys.setdefaultencoding(encoding) # Needs Python Unicode build !
def execsitecustomize():
"""Run custom site specific code, if available."""
try:
import sitecustomize
except ImportError:
pass
def main():
abs__file__()
paths_in_sys = removeduppaths()
if (os.name == "posix" and sys.path and
os.path.basename(sys.path[-1]) == "Modules"):
addbuilddir()
paths_in_sys = addsitepackages(paths_in_sys)
if sys.platform == 'os2emx':
setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
aliasmbcs()
setencoding()
execsitecustomize()
# Remove sys.setdefaultencoding() so that users cannot change the
# encoding after initialization. The test for presence is needed when
# this module is run as a script, because this code is executed twice.
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
main()
def _test():
print "sys.path = ["
for dir in sys.path:
print " %r," % (dir,)
print "]"
if __name__ == '__main__':
_test()
| {
"repo_name": "ericlink/adms-server",
"path": "playframework-dist/1.1-src/python/Lib/site.py",
"copies": "2",
"size": "14828",
"license": "mit",
"hash": 3030137560166605000,
"line_mean": 32.9716981132,
"line_max": 84,
"alpha_frac": 0.5613029404,
"autogenerated": false,
"ratio": 4.220893822943354,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5782196763343354,
"avg_score": null,
"num_lines": null
} |
"append more urls to the urlconf tuple for all the model filters in more_django_filters"
from os.path import basename, dirname
from django.db.models import get_app, get_models
from django.conf.urls import url, patterns, include
#from pug.db import more_django_filters
from pug.nlp.util import listify
def append_urls(local, app_names=None):
app_names = listify(app_names or basename(dirname(local.get('__file__', None))))
urlpatterns = local.get('urlpatterns', patterns(''))
for app_name in app_names:
print 'Composing REST API URLs (Django urlconf entries) for app named %r' % app_name
views_name = app_name + '.views'
app_module = __import__(views_name)
app = get_app(app_name)
for Model in get_models(app):
# print Model
model_name = Model.__name__
View = app_module.views.__dict__[model_name + 'List']
urlpatterns += patterns('', url(r'^' + app_name + r'/' + model_name, View.as_view()))#, name='order-list'),)
local['urlpatterns'] = urlpatterns
def append_app_urls(local, app_names):
app_names = listify(app_names) # or local.get('local.settings.INSTALLED_APPS') ;)
urlpatterns = local.get('urlpatterns', patterns(''))
for app_name in app_names:
urlpatterns += patterns('', url(r'^', include('%s.urls' % app_name)))#, name='order-list'),)
local['urlpatterns'] = urlpatterns
| {
"repo_name": "hobson/pug-dj",
"path": "pug/dj/more_django_filters_urls.py",
"copies": "1",
"size": "1417",
"license": "mit",
"hash": -2505073002687481300,
"line_mean": 39.4857142857,
"line_max": 120,
"alpha_frac": 0.6450247001,
"autogenerated": false,
"ratio": 3.6240409207161126,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4769065620816113,
"avg_score": null,
"num_lines": null
} |
# append paper number to links below to get details
# for eg : http://ieeexplore.ieee.org/xpl/articleDetails.jsp?arnumber=6196220
details_link = 'http://ieeexplore.ieee.org/xpl/articleDetails.jsp?arnumber='
authors_link = 'http://ieeexplore.ieee.org/xpl/abstractAuthors.jsp?arnumber='
references_link = 'http://ieeexplore.ieee.org/xpl/abstractReferences.jsp?arnumber='
citations_link = 'http://ieeexplore.ieee.org/xpl/abstractCitations.jsp?arnumber='
import urllib2
from bs4 import BeautifulSoup
def get_id(link):
import re
try :
searchObj = re.search(r'(.*)arnumber=(\d*).*', link)
if searchObj:
return searchObj.group(2)
else:
return ""
except :
return ""
def get_authors(id):
link = authors_link + str(id)
# print "[INFO] Fetching authors from link " + str(link)
authors = []
try:
page = urllib2.urlopen(link).read()
soup = BeautifulSoup(page, 'html.parser')
# print soup.prettify()
metas = soup.find_all("meta", attrs={"name": "citation_author"})
for meta in metas:
if meta['name'] == 'citation_author':
authors.append(meta['content'])
except:
pass
# print "[DEBUG] ", authors
return authors
def get_references(id):
link = references_link + str(id)
paper = {}
paper['arnumber'] = id
paper['authors'] = get_authors(id)
paper['citations'] = []
print "[DEBUG] id : " + str(id) + ", authors : ", paper['authors']
try:
page = urllib2.urlopen(link).read()
soup = BeautifulSoup(page, 'html.parser')
ol = soup.find('ol', attrs={'class': 'docs'})
lis = ol.findAll('li')
for li in lis:
citation = li.find('a')
citation_id = get_id(str(citation))
citation_authors = get_authors(citation_id)
if citation_id is "" :
continue
paper['citations'].append({'arnumber' : citation_id, 'authors' : citation_authors})
except:
pass
return paper
all_papers = []
def compute_in_parallel(args):
for arg in args:
# print "\n[INFO] Processing " + str(arg)
id = get_id(arg)
if id is not "" :
all_papers.append( get_references(id) )
def citation_network():
from openpyxl import load_workbook
wb = load_workbook(filename='../data/Data3.xlsx', read_only=True)
ws = wb['Sheet1']
links = []
for row in ws.iter_rows('G39:G250'):
for cell in row:
links.append(cell.value)
import threading
num_per_thread = 40
divide = lambda lst, sz: [lst[i : i + sz] for i in range(0, len(lst), sz)]
divided_links = divide(links, num_per_thread)
print "[INFO] Number of threads ", (len(links) / num_per_thread) + 1
threads = []
for i in range(len(divided_links)):
t = threading.Thread(target=compute_in_parallel, args=(divided_links[i],))
threads.append(t)
t.start()
for t in threads :
t.join()
print "[INFO] Number of papers " + str(len(all_papers))
print "[INFO] Writing papers to file"
import json
file = open("../output/ieee_citation_network.json", "w")
file.write(json.dumps(all_papers, indent=4))
file.close()
print "[INFO] Done Writing papers to file"
citation_network()
| {
"repo_name": "SciBase-Project/internationality-journals",
"path": "src/ieeexplore_citations.py",
"copies": "3",
"size": "3387",
"license": "mit",
"hash": 5290525828017560000,
"line_mean": 24.4661654135,
"line_max": 95,
"alpha_frac": 0.5904930617,
"autogenerated": false,
"ratio": 3.3667992047713717,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5457292266471372,
"avg_score": null,
"num_lines": null
} |
#Append path and import
from linkedin import linkedin
import urllib
#########################
#Import secrets
from secrets import secret
#Info I don't want on Linkedin, but want on a submitted CV:
from person_info import person_info
#######################
#Load auth keys
CONSUMER_KEY = secret['API_KEY']
CONSUMER_SECRET = secret['Secret_Key']
USER_TOKEN = secret['OAUT']
USER_SECRET = secret['OAUS']
RETURN_URL = 'http://localhost:8000'
def fetch_newest_data():
authentication = linkedin.LinkedInDeveloperAuthentication(CONSUMER_KEY, CONSUMER_SECRET,
USER_TOKEN, USER_SECRET,
RETURN_URL, linkedin.PERMISSIONS.enums.values())
# Pass it in to the app...
application = linkedin.LinkedInApplication(authentication)
# Use the app....
app_data=application.get_profile(selectors=['id', 'first-name', 'last-name', 'location', 'distance', 'num-connections', 'skills', 'educations','interests','honors-awards','num-recommenders','positions'])
return app_data
def return_tidy_ln_data():
#Fetch current profile
app_data = fetch_newest_data()
#Separate items
education = app_data['educations']
names = [app_data['firstName'],app_data['lastName']]
#Strip skills
skill_list = app_data['skills']
skills = [skill['skill']['name'] for skill in skill_list['values']]
interests_lists = app_data['interests']
interests = interests_lists.split(',')
#strip honours:
awards_list = app_data['honorsAwards']
awards = [award for award in awards_list['values']]
positions = [pos for pos in app_data['positions']['values']]
return names,education,skills,interests,awards,positions
| {
"repo_name": "osheadavid7/py-resume",
"path": "fetch_linkedin.py",
"copies": "1",
"size": "1775",
"license": "mit",
"hash": 9078738610046150000,
"line_mean": 31.8703703704,
"line_max": 207,
"alpha_frac": 0.6354929577,
"autogenerated": false,
"ratio": 3.69022869022869,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.482572164792869,
"avg_score": null,
"num_lines": null
} |
"""Append result to shapefile
"""
'''import logging
import shapefile
def write_result_shapefile(lad_geometry_shp, out_shape, field_names, csv_results):
"""
Join result attributes to LAD geography with
pyhape library
Arguments
---------
lad_geometry_shp : str
Path to LAD shapefile
out_shape : str
Path to new shapefile
field_names : list
list with new attribute field name
csv_results : list
list with result dicts
Info
-----
pip install pyshp
https://github.com/GeospatialPython/pyshp#reading-shapefiles-from-file-like-objects
http://www.qgistutorials.com/en/docs/performing_table_joins_pyqgis.html
"""
# Read in our existing shapefile
lad_geometry_shp_name = lad_geometry_shp[:-3]
myshp = open(lad_geometry_shp_name + "shp", "rb")
mydbf = open(lad_geometry_shp_name + "dbf", "rb")
record = shapefile.Reader(shp=myshp, dbf=mydbf)
# Create a new shapefile in memory
writer = shapefile.Writer()
# Copy over the existing fields
writer.fields = list(record.fields)
# --------------
# Add new fields
# --------------
for field_name in field_names:
writer.field(field_name, "F", decimal=10) #Float
# Get position of field 'name'
position = 0
for field_name in record.fields[1:]:
if field_name[0] == 'name': #corresponds to LAD Geocode
position_field_name = position
break
else:
position += 1
# --------------------------
# Join fields programatically
# --------------------------
missing_recors = set()
# Loop through each record, add a column and get results
for rec in record.records():
# Get geocode for row
geo_code = rec[position_field_name]
# Iterate result entries in list
for result_per_field in csv_results:
# Iterate result entries and add
try:
result_csv = result_per_field[geo_code]
except KeyError:
# No results
result_csv = 0
missing_recors.add(geo_code)
# Add specific fuel result
rec.append(result_csv)
# Add the modified record to the new shapefile
writer.records.append(rec)
if missing_recors != []:
logging.warning(
"No result value for regions '%s' in joining shapefile",
missing_recors)
else:
pass
# Copy over the geometry without any changes
writer._shapes.extend(record.shapes())
# Save as a new shapefile (or write over the old one)
writer.save(out_shape)
logging.info("... finished writing shp")
return'''
| {
"repo_name": "nismod/energy_demand",
"path": "energy_demand/geography/write_shp.py",
"copies": "1",
"size": "2730",
"license": "mit",
"hash": -4208327431675163600,
"line_mean": 26.8571428571,
"line_max": 87,
"alpha_frac": 0.5875457875,
"autogenerated": false,
"ratio": 4.002932551319648,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5090478338819647,
"avg_score": null,
"num_lines": null
} |
# Appends attachments from one feature class into a new empty feature class. Assumes attachments related by GLOBALID.
#START OF VARIABLES TO CHANGE
GDB_location = r"path\to\file.gdb"
source = "sourceFeatureClass"
target = "emptyTargetFeatureClass"
source_attachment = "sourceAttachments__ATTACH"
#END OF VARIABLES TO CHANGE
import arcpy
target_attachment = target + "__ATTACH"
jointable = "jointable"
newfield = "oldglobalid"
tableview = "test1234"
temptable = "deleteme"
newglobalid = "newglobalid"
arcpy.env.workspace = GDB_location
arcpy.env.overwriteOutput = True
arcpy.env.qualifiedFieldNames = True
fieldsyntax = target + "_" + "GLOBALID"
print("Enabling attachments")
arcpy.EnableAttachments_management(target)
for table in [target, source, source_attachment]:
print("Adding field {0} to {1}".format(newfield, table))
arcpy.AddField_management(table, newfield, "TEXT")
edit = arcpy.da.Editor(arcpy.env.workspace)
edit.startEditing(False, False)
edit.startOperation()
oid = "GLOBALID"
if table == source_attachment: oid = "REL_GLOBALID"
print("Persisting old global id in {0}".format(table))
with arcpy.da.UpdateCursor(table, [oid, newfield]) as cur:
for row in cur:
row[1] = row[0]
cur.updateRow(row)
edit.stopOperation()
edit.stopEditing(True)
print("Appending {0} to {1}".format(source, target))
arcpy.Append_management(source, target, "NO_TEST")
print("Adding field {0} to {1}".format(newglobalid, target))
arcpy.AddField_management(target, newglobalid, "TEXT")
edit = arcpy.da.Editor(arcpy.env.workspace)
edit.startEditing(False, False)
edit.startOperation()
print("Persisting new global id in {0}".format(target))
with arcpy.da.UpdateCursor(target, ["GLOBALID", newglobalid]) as cur:
for row in cur:
row[1] = row[0]
cur.updateRow(row)
edit.stopOperation()
edit.stopEditing(True)
fieldinfo = arcpy.FieldInfo()
fields = arcpy.Describe(target).fields
for field in fields:
if field.name == newfield or field.name == newglobalid:
fieldinfo.addField(field.name, field.name, "VISIBLE", "")
continue
fieldinfo.addField(field.name, field.name, "HIDDEN", "")
arcpy.MakeFeatureLayer_management(target, jointable, field_info = fieldinfo)
arcpy.MakeTableView_management(source_attachment, tableview)
print("Joining {0} to {1} based off of key field {2}.".format(jointable, tableview, newfield))
arcpy.AddJoin_management(tableview, newfield, jointable, newfield, "KEEP_COMMON")
print("Creating table {0}".format(temptable))
arcpy.TableToTable_conversion(tableview, arcpy.env.workspace, temptable)
edit = arcpy.da.Editor(arcpy.env.workspace)
edit.startEditing(False, False)
edit.startOperation()
print("Assigning REL_GLOBALID in {0} to the new global id in {1}".format(temptable, target))
with arcpy.da.UpdateCursor(temptable, [target + "_" + newglobalid, "REL_GLOBALID"]) as cur:
for row in cur:
row[1] = row[0]
cur.updateRow(row)
edit.stopOperation()
edit.stopEditing(True)
print("Appending {0} to {1}".format(temptable, target_attachment))
arcpy.Append_management(temptable, target_attachment, "NO_TEST")
print("Deleting {0}.".format(temptable))
arcpy.Delete_management(temptable)
for table in [target, source, source_attachment]:
print("Removing field {0} from {1}".format(newfield, table))
arcpy.DeleteField_management(table, newfield)
print("Removing field {0} from {1}".format(newglobalid, target))
arcpy.DeleteField_management(target, newglobalid)
print("Finished! Check {0}".format(target))
| {
"repo_name": "briantwatson/developer-support",
"path": "python/arcpy-python/append-attachments/appendAttachments.py",
"copies": "8",
"size": "3633",
"license": "apache-2.0",
"hash": 1019858980843110400,
"line_mean": 33.9326923077,
"line_max": 117,
"alpha_frac": 0.7145609689,
"autogenerated": false,
"ratio": 3.2967332123411976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8011294181241198,
"avg_score": null,
"num_lines": null
} |
# Appends episode names
# Expects a file containing the list of episode names, one per line in chronological order
# Has no validations, tread carefully
# 25-March-2015
############################################
# Imports
############################################
import os
import re
import sys
import glob
############################################
# User Input section
############################################
# EXPECTED USER INPUT 1:
# Enter the path to the file containing the episode names
epname_source = ""
# EXPECTED USER INPUT 2:
# Enter the folder containing the episodes
# (Enter the trailing slash too)
episodes_dir = ""
# OPTIONAL USER INPUT 3:
# Enter the prefix for each episode name
# This could be something like 'S06E'
epname_prefix = "S06E"
# OPTIONAL USER INPUT 4:
# Enter the file extension to process
# Doesn't support processing multiple file extension filters
epname_extension = "*.*"
# OPTIONAL USER INPUT 5:
# Enter a list of filenames to exclude from processing
# This is added to skip over annoyances like Thumbs.db and .DS_Store
# Separate file names by ',' or ';'
filenames_to_exclude = "Thumbs.db;.DS_Store"
# OPTIONAL USER INPUT 6:
# Enter the seperator to use between the episode numbering and the episode name
epname_separator = " - "
# OPTIONAL USER INPUT 7:
# Enter the log file name to be used
logfile_name = "appender.log"
# OPTIONAL USER INPUT 7:
# Enter the error report file name to be used
errorfile_name = "appender_errors.log"
############################################
# Constants
############################################
debug_flag = "test"
file_write_mode = "w+"
regex_separator = ';|,'
file_extension_splitter = '.'
############################################
# Helper functions
############################################
#########################
# Substitute for TryParse
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
############################################
# Process command line args (if any)
############################################
# Set Mode
if debug_flag in sys.argv:
debugMode = 1
else:
debugMode = 0
############################################
# Begin processing
############################################
epname_map = {} # Map of (episode number, episode name)
rename_targets = {} # Map of (current file name, target file name)
ep_files = [] # List of files to be considered for renaming
errors = [] # List of errors encountered during processing
# Read all episode names
epnames = open(epname_source).read().splitlines()
# Populate episode name map
for i in range(len(epnames)):
# Convert 0-index to 1-index
epname_map[i+1] = epnames[i]
# Create a map for rename list
# Fetch episode files into array
ep_files = glob.glob(episodes_dir + epname_extension)
# Strip off everything except the file name
directory_len = len(episodes_dir)
for i in range(len(ep_files)):
ep_files[i] = ep_files[i][directory_len:]
# Generate exclude list
exclude_list = re.split(regex_separator, filenames_to_exclude)
############################
# Generate rename candidates
for i in range(len(ep_files)):
ep_file_name = ep_files[i]
# Skip excludes
if ep_file_name.lower() in (x.lower() for x in exclude_list):
continue
# Handle prefixes
if ep_file_name.startswith(epname_prefix):
ep_file_name = ep_file_name[len(epname_prefix):]
ep_file_name = ep_file_name.split(file_extension_splitter)[0]
# Don't process file if it can't be casted to int
if RepresentsInt(ep_file_name) == False:
errors.append("Unable to identify episode number for " + ep_files[i])
continue
# Parse filename into INT
ep_index = int(ep_file_name)
# Use the computed episode index to search the episode name map
if ep_index in epname_map:
# Corresponding episode name has been found, push it to rename list
# Split filename into name and extension
# Rant:
# This is screwed if the file has no extension
# But then, which media file doesn't have an extension?
filename = ep_files[i].split(file_extension_splitter)[0]
extension = ep_files[i].split(file_extension_splitter)[1]
# Generate target file name
target_filename = episodes_dir + filename + epname_separator + epname_map[ep_index] + file_extension_splitter + extension
# Push it into rename targets map
rename_targets[episodes_dir + ep_files[i]] = target_filename
else:
errors.append("No matching episode name found for " + ep_files[i])
#######################
# Generate Log Contents
if not debugMode:
log = {} # Map of (current file name, previous file name)
for key, value in rename_targets.iteritems():
log[value] = key
file = open(episodes_dir + logfile_name, file_write_mode)
file.write(str(log))
file.close()
##############
# Begin rename
for key, value in rename_targets.iteritems():
if debugMode:
print value
else:
os.rename(key, value)
#######################
# Generate Error Report
if len(errors) > 0:
if debugMode:
print errors
else:
file = open(episodes_dir + errorfile_name, file_write_mode)
file.write(str(errors))
file.close()
| {
"repo_name": "adithyap/rename-utils",
"path": "name_appender.py",
"copies": "1",
"size": "5129",
"license": "mit",
"hash": -609845716046894200,
"line_mean": 25.7135416667,
"line_max": 123,
"alpha_frac": 0.6262429323,
"autogenerated": false,
"ratio": 3.6170662905500706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.967792204548624,
"avg_score": 0.013077435472765951,
"num_lines": 192
} |
"""Appends XML elements specifying optional dependencies to a plugin XML file.
"""
import argparse
from itertools import izip
from xml.dom.minidom import parse
parser = argparse.ArgumentParser()
parser.add_argument(
"--plugin_xml", help="The main plugin xml file", required=True)
parser.add_argument("--output", help="The output file.")
parser.add_argument(
"optional_xml_files",
nargs="+",
help="Sequence of module, module xml... pairs")
def pairwise(t):
it = iter(t)
return izip(it, it)
def main():
args = parser.parse_args()
dom = parse(args.plugin_xml)
plugin_xml = dom.documentElement
for module, optional_xml in pairwise(args.optional_xml_files):
depends_element = dom.createElement("depends")
depends_element.setAttribute("optional", "true")
depends_element.setAttribute("config-file", optional_xml)
depends_element.appendChild(dom.createTextNode(module))
plugin_xml.appendChild(depends_element)
if args.output:
with file(args.output, "w") as f:
f.write(dom.toxml(encoding="utf-8"))
else:
print dom.toxml(encoding="utf-8")
if __name__ == "__main__":
main()
| {
"repo_name": "brendandouglas/intellij",
"path": "build_defs/append_optional_xml_elements.py",
"copies": "1",
"size": "1145",
"license": "apache-2.0",
"hash": -4176464733141390000,
"line_mean": 23.8913043478,
"line_max": 78,
"alpha_frac": 0.6943231441,
"autogenerated": false,
"ratio": 3.6234177215189876,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9759290804767442,
"avg_score": 0.011690012170309343,
"num_lines": 46
} |
"""AppEngine Datastore models for the Chrome Experience Sampling backend.
These classes define the data models for form and survey responses.
"""
from google.appengine.ext import ndb
package = 'ChromeExperienceSampling'
class ResponseModel(ndb.Model):
question = ndb.TextProperty()
answer = ndb.TextProperty()
@staticmethod
def fromMessage(message):
return ResponseModel(question=message.question,
answer=message.answer)
class SurveyModel(ndb.Model):
survey_type = ndb.StringProperty(indexed=True, required=True)
participant_id = ndb.StringProperty(indexed=True, required=True)
date_taken = ndb.DateTimeProperty(required=True)
date_received = ndb.DateTimeProperty(auto_now_add=True)
responses = ndb.StructuredProperty(ResponseModel, repeated=True)
@staticmethod
def fromMessage(message):
return SurveyModel(survey_type=message.survey_type,
participant_id=message.participant_id,
date_taken=message.date_taken,
responses=map(ResponseModel.fromMessage,
message.responses))
| {
"repo_name": "moduloprime/experience-sampling",
"path": "backend/models.py",
"copies": "4",
"size": "1141",
"license": "apache-2.0",
"hash": 4492196681260452000,
"line_mean": 32.5588235294,
"line_max": 73,
"alpha_frac": 0.6985100789,
"autogenerated": false,
"ratio": 4.305660377358491,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010098143337592977,
"num_lines": 34
} |
# AppEngine imports
from google.appengine.ext import db
# Django imports
from django.contrib.sites.models import Site
# Python
#import logging
import random
import math
import csv
# Local imports
import models
# skip: il1o0, uppercase
_rchars = "abcdefghjkmnpqrstuvwxyz23456789"
# Return NaN in a portable way for Python 2.5.
# float('NaN') doesn't work until Python 2.6.
# See http://www.gossamer-threads.com/lists/python/python/656960
NaN = 1e1000 / 1e1000
def assertNear(self, doub1, doub2, eps):
self.assertTrue(abs(doub1 - doub2) <= eps)
def assertIsNanOrNear(self, doub1, doub2, eps):
self.assertTrue(isNanOrNear(doub1, doub2, eps),
("doub1: %s, doub2: %s, abs(doub1 - doub2): %s, eps: %s" %
(doub1, doub2, abs(doub1-doub2), eps)))
def isNanOrNear(doub1, doub2, eps):
nanOrNear = (isNaN(doub1) and isNaN(doub2)) or (abs(doub1 - doub2) <= eps)
return nanOrNear
def printable_properties(model_class):
'''Return properties we know how to print.'''
properties = model_class.properties()
props = {}
for name in properties:
thing = properties[name]
if (isinstance(thing, db.IntegerProperty) or
isinstance(thing, db.FloatProperty) or
isinstance(thing, db.StringProperty) or
isinstance(thing, db.DateProperty) or
isinstance(thing, db.DateTimeProperty) or
isinstance(thing, db.TextProperty) or
isinstance(thing, models.ZscoreAndPercentileProperty)):
props[name] = thing
else:
pass
#logging.info("Reject %s" % properties[name])
return props
class StringWriter(object):
'''Class with a write() method that appends to a str'''
def __init__(self):
self.str = ''
def write(self, a_str):
import logging
self.str += a_str
def get_str(self):
return self.str
def _csv_string(a_val):
'''Return a string encoded for a csvwriter.
They don't handle unicode, so encode in utf-8.
Set None to empty string.
'''
if a_val is None: a_val = ''
else:
a_val = '%s' % a_val
a_val = a_val.encode('utf-8')
return a_val
def csv_line(prop_names, props, model_class):
'''Return a CSV line with the properties of the given model_class.
A CSV line comes from a csv.writer. \r and \n are quoted, but line
unterminated.
Print any property returned from prop_names.
'''
vals = []
for name in prop_names:
val = getattr(model_class, name)
if isinstance(props[name], models.ZscoreAndPercentileProperty):
# TODO(dan): Remove this hack
if val:
vals.extend([val.zscore, val.percentile])
else:
vals.extend([None, None])
# Doesn't work, don't know why
# elif isinstance(props[name], db.FloatProperty) and isNaN(float(val)):
# val = "?"
else:
vals.append(val)
string_writer = StringWriter()
# leave lineterminator \r\n so that those chars will be quoted
cwriter = csv.writer(string_writer)
cwriter.writerow(map(_csv_string, vals))
the_str = string_writer.get_str()
# remove final \r\n
return the_str[0:(len(the_str)-2)]
def random_string(len):
"""Generate a user-friendly random string of the given length."""
rstr = ''
for dummy in range(len):
rstr += random.choice(_rchars)
return rstr
def get_domain():
return 'http://%s' % Site.objects.get_current().domain # django caches this
def isNaN(num):
"""Return True iff num is a float nan"""
# This is absurd, but Python 2.5.2 on Windows XP does not allow float("nan")!
return isinstance(num, float) and (num != num)
def isNaNString(numstr):
"""Return true if str is a string whose lower() is 'nan' or '-1.#ind'
On Windows it might be nan, on Linux -1.#ind.
"""
return ((isinstance(numstr, str)
and ((numstr.lower() == 'nan') or (numstr.lower() == '-1.#ind')))
or (isinstance(numstr, unicode)
and ((numstr.lower() == u'nan') or (numstr.lower() == u'-1.#ind'))))
def bucket_zscore(val):
"""Put val into a bucket of width 1, return left edge.
Return NaN if we can't take floor.
0.1 => 0.0 to 1.0
0.6 => 0.0 to 1.0
-0.1 => -1.0 to 0.0
-0.6 => -1.0 to -0.0
"""
ret = NaN
try:
ret = math.floor(val)
except TypeError, err:
pass
return ret
def string_eq_nocase(str1, str2):
'''Compare two strings and return true if they are equal or None (case ignored)
'''
if (str2 == ''):
return True
elif ((str1 is None) and (str2 is None)):
return True
elif ((str1 is None) or (str2 is None)):
return False
elif ((str1 is not None) and (str2 is not None)):
if (str1.lower() == str2.lower()):
return True
return False
| {
"repo_name": "avastjohn/maventy_new",
"path": "healthdb/util.py",
"copies": "1",
"size": "4799",
"license": "bsd-3-clause",
"hash": -1376725913665096700,
"line_mean": 27.0848484848,
"line_max": 81,
"alpha_frac": 0.6236715982,
"autogenerated": false,
"ratio": 3.2668481960517357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4390519794251736,
"avg_score": null,
"num_lines": null
} |
# AppEngine ndb Models for Users/Authentication, etc
from google.appengine.ext import ndb
class AuthUserEntity(ndb.Model):
"""
Persistance model representing a system user
# TODO: Consider adding profile img
"""
username = ndb.StringProperty()
email = ndb.StringProperty()
first_name = ndb.StringProperty()
last_name = ndb.StringProperty()
is_activated = ndb.BooleanProperty()
@classmethod
def _get_kind(self):
return 'AuthUser'
def __repr__(self):
"""
For easy debugging
"""
return '<AuthUserEntity username="%s">' % (self.username)
class AuthUserMethodEntity(ndb.Model):
"""
Storage of the various authentication records for a user
Note: Entity keys are defined by AuthLogin.generate_key() and the user is the parent of the EG
"""
auth_type = ndb.StringProperty()
auth_key = ndb.StringProperty()
auth_data = ndb.TextProperty()
user_key = ndb.KeyProperty(kind='AuthUser') # TODO: Make this user_resource_id
@classmethod
def _get_kind(self):
return 'AuthLogin'
@staticmethod
def generate_key_name(user_key, auth_type, auth_key):
"""
Generate a ndb keyname for the given entity
"""
return "%s:%s:%s" % (user_key.id(), auth_type, auth_key)
@staticmethod
def generate_key(user_key, auth_type, auth_key):
key_name = AuthUserMethodEntity.generate_key_name(user_key, auth_type, auth_key)
return ndb.Key('AuthLogin', key_name,
parent=user_key)
| {
"repo_name": "digibodies/auth_core",
"path": "auth_core/internal/entities.py",
"copies": "1",
"size": "1574",
"license": "mit",
"hash": 8380421622861622000,
"line_mean": 26.6140350877,
"line_max": 98,
"alpha_frac": 0.6365946633,
"autogenerated": false,
"ratio": 3.935,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.50715946633,
"avg_score": null,
"num_lines": null
} |
APPENGINE_PATH = '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine/'
APPENGINE_LIB_PATH = APPENGINE_PATH + 'lib/'
APPENGINE_LIBS = [
APPENGINE_PATH,
APPENGINE_LIB_PATH + 'antlr3/',
APPENGINE_LIB_PATH + 'cacerts/',
APPENGINE_LIB_PATH + 'ipaddr/',
APPENGINE_LIB_PATH + 'graphy/',
APPENGINE_LIB_PATH + 'ipaddr/',
APPENGINE_LIB_PATH + 'protorpc/',
APPENGINE_LIB_PATH + 'simplejson/',
APPENGINE_LIB_PATH + 'webapp2/',
APPENGINE_LIB_PATH + 'webob/',
APPENGINE_LIB_PATH + 'yaml/lib/',
]
import sys
sys.path[1:1] = APPENGINE_LIBS
from google.appengine.dist import use_library
use_library('django', '1.2')
from time import sleep
import os
from threading import Thread
from tic.development.admin.api import IAdminCommandProvider
from tic.core import Component, implements, ExtensionPoint
from tic.development.tools.directory_watcher import DirectoryWatcher
from tic.development.tools.api import IRunServerTask
from tic import loader
from symbol import except_clause
import logging
class ServerCommand(Component):
implements(IAdminCommandProvider)
pre_tasks = ExtensionPoint(IRunServerTask)
def get_admin_commands(self):
"""
Returns a list of commands to execute
@see tic.admin.api.IAdminCommandProvider
"""
#(command, args, help, complete, execute)
return (
("runserver", None, "runs the server", None, self._runserver),
)
def _runserver(self):
import sys
from google.appengine.tools import dev_appserver_main
from google.appengine.tools import dev_appserver_import_hook
print dev_appserver_import_hook.FakeFile.NOT_ALLOWED_DIRS
root = '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine/'
sys.path.append(root)
sys.path.append(root + "lib/antlr3/")
sys.path.append(root + "lib/cacerts/")
sys.path.append(root + "lib/ipaddr/")
sys.path.append(root + "lib/graphy/")
sys.path.append(root + "lib/ipaddr/")
sys.path.append(root + "lib/protorpc/")
sys.path.append(root + "lib/simplejson/")
sys.path.append(root + "lib/webapp2/")
sys.path.append(root + "lib/webob/")
sys.path.append(root + "lib/yaml/lib/")
# sys.path.append(root + "lib/whoosh/")
from google.appengine.dist import use_library
use_library('django', '1.2')
try:
for task in self.pre_tasks:
task.run()
except Exception, e:
logging.error(e)
sys.exit(1)
progname = sys.argv[0]
args = ['--enable_sendmail']
# hack __main__ so --help in dev_appserver_main works.
sys.modules['__main__'] = dev_appserver_main
sys.exit(dev_appserver_main.main([progname] + args + [os.getcwdu()]))
class StartWatchingForDirectoryChangesTask(Component):
implements(IRunServerTask)
def run(self):
'''run '''
directory_watcher = DirectoryWatcher(self.compmgr)
directory_watcher.watch(loader.root_path())
| {
"repo_name": "selkhateeb/tic",
"path": "src/tic/development/appengine/server/__init__.py",
"copies": "1",
"size": "3235",
"license": "apache-2.0",
"hash": 5922542109299983000,
"line_mean": 34.5494505495,
"line_max": 147,
"alpha_frac": 0.657496136,
"autogenerated": false,
"ratio": 3.6553672316384183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9763431433719512,
"avg_score": 0.009886386783781299,
"num_lines": 91
} |
# App engine platform
import appengine_config
import json
import logging
import quopri
import urllib
import webapp2
from google.appengine.api import urlfetch
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import template, blobstore_handlers
# Sessions
from gaesessions import get_current_session
# Google Cloud Storage
# Application files
import DataModels as models
import GlobalUtilities as tools
class BaseHandler(webapp2.RequestHandler):
def get(self):
# By default, need admin priveleges to view
isAdmin, s = tools.checkAuthentication(self, False)
if isAdmin == None and s == None:
self.redirect("/login")
else:
return self.task(isAdmin, s)
def task(self, isAdmin, s):
self.response.out.write("BaseHandler default response out.")
class BaseHandlerAdmin(webapp2.RequestHandler):
def get(self):
# By default, need admin priveleges to view
isAdmin, s = tools.checkAuthentication(self, True)
if isAdmin == None and s == None:
self.redirect("/login")
else:
return self.task(isAdmin, s)
def task(self, isAdmin, s):
self.response.out.write("BaseHandlerAdmin default response out.")
class AllContacts(BaseHandlerAdmin):
def task(self, isAdmin, s):
search_query = self.request.get("search")
template_variables = {"search_query": search_query}
self.response.write(
template.render('pages/all_contacts.html', template_variables))
class AllDeposits(BaseHandlerAdmin):
def task(self, isAdmin, s):
search_query = self.request.get("search")
template_variables = {"search_query": search_query}
self.response.write(
template.render('pages/all_deposits.html', template_variables))
class AllIndividuals(BaseHandlerAdmin):
def task(self, isAdmin, s):
search_query = self.request.get("search")
template_variables = {"search_query": search_query}
self.response.write(
template.render('pages/all_individuals.html', template_variables))
class AllTeams(BaseHandlerAdmin):
def task(self, isAdmin, s):
search_query = self.request.get("search")
template_variables = {"search_query": search_query}
self.response.write(
template.render('pages/all_teams.html', template_variables))
class Contact(BaseHandlerAdmin):
def task(self, isAdmin, s):
contact_key = self.request.get("c")
c = tools.getKey(contact_key).get()
template_variables = {"c": c, "s": s}
self.response.write(
template.render('pages/contact.html', template_variables))
class Container(BaseHandler):
def task(self, isAdmin, s):
username = tools.getUsername(self)
logging.info("Loading container for: " + str(username))
session = get_current_session()
redirect = False
mobile_redirect = "none"
user_agent = str(self.request.headers['User-Agent'])
mobile_devices = ["android", "blackberry", "googlebot-mobile", "iemobile", "iphone", "ipod", "opera", "mobile",
"palmos", "webos"]
# If user agent matches anything in the list above, redirect
for m in mobile_devices:
if user_agent.lower().find(m) != -1:
redirect = True
# If user has explicitly requested to be sent to desktop site
try:
if session["no-redirect"] == "1":
redirect = False
mobile_redirect = "block"
except:
pass
if redirect == True:
self.redirect("/m")
try:
template_variables = {"settings": s.key.urlsafe(), "username": username, "mobile_redirect": mobile_redirect}
except:
self.redirect("/login")
if isAdmin == True:
self.response.write(
template.render('pages/container.html', template_variables))
elif isAdmin == False:
self.response.write(
template.render('pages/container_ind.html', template_variables))
class Dashboard(BaseHandlerAdmin):
def task(self, isAdmin, s):
vals = s.data.one_week_history
past_donations = vals[0]
past_money = str(vals[1])
template_variables = {"num_open_donations": s.data.num_open_donations, "past_donations": past_donations,
"past_money": past_money}
self.response.write(
template.render('pages/dashboard.html', template_variables))
class Deposit(BaseHandlerAdmin):
def task(self, isAdmin, s):
# WARNING - this is a complicated and kind of a hacked-together
# solution. I didn't understand it the day after I wrote it.
# ... But it works.
deposit_key = self.request.get("d")
deposit = tools.getKey(deposit_key).get()
entity_keys = deposit.entity_keys
gross_amount = tools.toDecimal(0)
net_amount = tools.toDecimal(0)
general_fund = tools.toDecimal(0)
donations = []
team_breakout = {}
for k in entity_keys:
d = k.get()
if d != None:
donations.append(d)
gross_amount += d.confirmation_amount
net_amount += d.amount_donated
if d.team:
t = d.team.get()
try:
team_breakout[t.name]
except:
team_breakout[t.name] = [tools.toDecimal(0), []]
team_breakout[t.name][0] += d.amount_donated
if d.individual:
i = d.individual.get()
array = [i.name, d.amount_donated]
team_breakout[t.name][1].append(array)
else:
# Add count to general fund
general_fund += d.amount_donated
team_breakout["General Fund"] = [tools.toDecimal(general_fund), []]
new_team_breakout = {}
for k, v in team_breakout.items():
name = k
amount_donated = v[0]
array = v[1]
new_array = []
for a in array:
string = a[0] + " ($" + str(a[1]) + ")"
new_array.append(string)
new_team_breakout[unicode(name) + " ($" + str(amount_donated) + ")"] = new_array
template_variables = {"d": deposit, "donations": donations, "team_breakout": new_team_breakout,
"gross_amount": gross_amount, "net_amount": net_amount}
self.response.write(
template.render('pages/deposit.html', template_variables))
class DonatePage(webapp2.RequestHandler):
def get(self):
self.response.headers['Access-Control-Allow-Origin'] = '*'
settings = self.request.get("s")
try:
s = tools.getKey(settings).get()
template_variables = {"s": s}
self.response.write(
template.render('pages/public_pages/pub_donate.html', template_variables))
except:
self.response.write("Sorry, this URL triggered an error. Please try again.")
class DonorReport(BaseHandlerAdmin):
def task(self, isAdmin, s):
try:
contact_key = self.request.get("c")
year = int(self.request.get("y"))
if contact_key == "" or year == "" or len(str(year)) != 4:
# Throw an error if you don't have those two pieces of info or if the year isn't a number
raise Exception("Don't know contact key or year.")
c = tools.getKey(contact_key).get()
s = c.settings.get()
donations = c.data.annual_donations(year)
donation_total = tools.toDecimal(0)
for d in donations:
donation_total += d.confirmation_amount
donation_total = "${:,.2f}".format(donation_total)
template_variables = {"s": s, "c": c, "donations": donations, "year": str(year),
"donation_total": str(donation_total), "street": c.address[0], "city": c.address[1],
"state": c.address[2], "zip": c.address[3]}
self.response.write(
template.render("pages/letters/donor_report_print.html", template_variables))
except:
# If there's a malformed URL, give a 500 error
self.error(500)
self.response.write(
template.render('pages/letters/thankyou_error.html', {}))
class DonorReportSelect(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {"s": s}
self.response.write(
template.render('pages/donor_report_select.html', template_variables))
class ExportDonations(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {}
self.response.write(
template.render('pages/export_donations.html', template_variables))
class IndividualProfile(BaseHandler):
def task(self, isAdmin, s):
if isAdmin == True:
# If an admin, they can get whatever user they want
i_key = self.request.get("i")
# if no key specified, go to admin's personal account
if i_key == "":
i_key = tools.getUserKey(self)
else:
i_key = tools.getKey(i_key)
else:
# If a regular user, they can ONLY get their own profile
i_key = tools.getUserKey(self)
i = i_key.get()
logging.info("Getting profile page for: " + i.name)
# Creating a blobstore upload url
upload_url = blobstore.create_upload_url('/ajax/profile/upload')
template_variables = {"s": s, "i": i, "upload_url": upload_url, "isAdmin": isAdmin}
self.response.write(
template.render("pages/profile.html", template_variables))
class IndividualWelcome(BaseHandlerAdmin):
def task(self, isAdmin, s):
try:
mode = self.request.get("m")
individual_key = self.request.get("i")
if mode == "" or individual_key == "":
# Throw an error if you don't have those two pieces of info
raise Exception("Don't know mode or individual_key.")
i = tools.getKey(individual_key).get()
s = tools.getKey(i.settings).get()
template_variables = {"s": s, "i": i}
self.response.write(
template.render('pages/letters/individual.html', template_variables))
except:
# If there's a malformed URL, give a 500 error
self.error(500)
self.response.write(
template.render('pages/letters/thankyou_error.html', {}))
class Login(webapp2.RequestHandler):
def get(self):
self.session = get_current_session()
# Flash message
message = tools.getFlash(self)
# Delete existing session if it exists
self.session.terminate()
template_variables = {"flash": message}
self.response.write(
template.render('pages/login.html', template_variables))
def post(self):
self.session = get_current_session()
email = self.request.get("email")
password = self.request.get("password")
if email != "" and password != "":
authenticated, user = tools.checkCredentials(self, email, password)
else:
authenticated = False
if authenticated == True:
logging.info("Authenticated: " + str(authenticated) + " and User: " + str(user.name))
# Log in
self.session["key"] = str(user.key.urlsafe())
self.redirect("/")
else:
# Invalid login
logging.info("Incorrect login.")
tools.setFlash(self, "Whoops, that didn't get you in. Try again.")
self.redirect("/login")
class Logout(webapp2.RequestHandler):
def get(self):
self.session = get_current_session()
self.session.terminate()
self.redirect("/login")
class MergeContacts(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {}
self.response.write(
template.render('pages/merge_contacts.html', template_variables))
class Mobile(BaseHandler):
def task(self, isAdmin, s):
i_key = tools.getUserKey(self)
i = i_key.get()
template_variables = {"i": i}
self.response.write(
template.render('pages/mobile.html', template_variables))
class NewContact(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {}
self.response.write(
template.render('pages/new_contact.html', template_variables))
class NewIndividual(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {"teams": s.data.all_teams}
self.response.write(
template.render('pages/new_individual.html', template_variables))
class NewTeam(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {}
self.response.write(
template.render('pages/new_team.html', template_variables))
class MobileRedirectSetting(webapp2.RequestHandler):
def get(self):
session = get_current_session()
setting = self.request.get("r")
session["no-redirect"] = setting
self.redirect("/")
class NotAuthorized(webapp2.RequestHandler):
def get(self):
template_variables = {}
self.response.write(
template.render('pages/not_authorized.html', template_variables))
class OfflineDonation(BaseHandlerAdmin):
def task(self, isAdmin, s):
i = tools.getUserKey(self).get()
template_variables = {"teams": None, "individual_name": i.name,
"individual_key": i.key.urlsafe(), "teams": s.data.all_teams}
self.response.write(
template.render('pages/offline.html', template_variables))
class ReviewQueue(BaseHandlerAdmin):
def task(self, isAdmin, s):
search_query = self.request.get("search")
template_variables = {"search_query": search_query}
self.response.write(
template.render('pages/review_queue.html', template_variables))
class ReviewQueueDetails(BaseHandler):
def task(self, isAdmin, s):
donation_key = self.request.get("id")
if donation_key == "":
# If they didn't type any arguments into the address bar - meaning it didn't come from the app
tools.giveError(self, 500)
else:
# Getting donation by its key (from address bar argument)
d = tools.getKey(donation_key).get()
c = d.contact.get()
i_key = tools.getUserKey(self)
i = i_key.get()
donation_date = [d.donation_date.day, d.donation_date.month, d.donation_date.year]
donation_date = json.dumps(donation_date)
template_variables = {"d": d, "c": c, "s": s, "i": i, "donation_date": donation_date}
self.response.write(
template.render('pages/rq_details.html', template_variables))
class Settings(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {"s": s}
self.response.write(
template.render('pages/settings.html', template_variables))
class SpreadsheetDownload(blobstore_handlers.BlobstoreDownloadHandler):
def get(self):
isAdmin, s = tools.checkAuthentication(self, True)
str_blob_key = urllib.unquote(self.request.get("blob_key"))
blob_key = blobstore.BlobInfo.get(str_blob_key)
if not blobstore.get(str_blob_key):
logging.error("404 on blob key: " + str_blob_key)
self.error(404)
else:
logging.info("Serving blob: " + str_blob_key)
self.send_blob(blob_key, save_as=True)
class TeamMembers(BaseHandlerAdmin):
def task(self, isAdmin, s):
team_key = self.request.get("t")
t = tools.getKey(team_key).get()
template_variables = {"t": t}
self.response.write(
template.render('pages/team_members.html', template_variables))
class ThankYou(webapp2.RequestHandler):
def get(self):
try:
mode = self.request.get("m")
donation_key = self.request.get("id")
if mode == "" or donation_key == "":
# Throw an error if you don't have those two pieces of info
raise Exception("Don't know mode or donation key.")
d = tools.getKey(donation_key).get()
date = tools.convertTime(d.donation_date).strftime("%B %d, %Y")
s = d.settings.get()
if d.individual:
individual_name = d.individual.get().name
elif d.team:
individual_name = d.team.get().name
else:
individual_name = None
template_variables = {"s": s, "d": d, "c": d.contact, "date": date, "individual_name": individual_name}
if mode == "w":
template_location = "pages/letters/thanks_live.html"
elif mode == "p":
template_location = "pages/letters/thanks_print.html"
elif mode == "e":
who = "http://ghidonations.appspot.com"
template_variables["see_url"] = d.confirmation.see_url(who)
template_variables["print_url"] = d.confirmation.print_url(who)
template_location = "pages/letters/thanks_email.html"
self.response.write(
template.render(template_location, template_variables))
except:
# If there's a malformed URL, give a 500 error
self.error(500)
self.response.write(
template.render('pages/letters/thankyou_error.html', {}))
class UndepositedDonations(BaseHandlerAdmin):
def task(self, isAdmin, s):
template_variables = {"donations": s.data.undeposited_donations}
self.response.write(
template.render('pages/undeposited_donations.html', template_variables))
class UpdateProfile(blobstore_handlers.BlobstoreUploadHandler):
def post(self):
# Has a new image been selected?
change_image = True
try:
upload_files = self.get_uploads('new_photo')
blob_info = upload_files[0]
except:
change_image = False
individual_key = self.request.get("individual_key")
name = self.request.get("name")
email = self.request.get("email")
team = self.request.get("team_list").replace("=\r\n", "")
description = quopri.decodestring(self.request.get("description"))
password = self.request.get("password")
show_donation_page = self.request.get("show_donation_page")
show_progress_bar = self.request.get("show_progress_bar")
if show_donation_page == "on":
show_donation_page = True
elif show_donation_page == "":
show_donation_page = False
if show_progress_bar == "on":
show_progress_bar = True
elif show_progress_bar == "":
show_progress_bar = False
i = tools.getKey(individual_key).get()
if change_image == True:
new_blobkey = str(blob_info.key())
else:
new_blobkey = None
logging.info("Saving profile for: " + name)
i.update(name, email, team, description, new_blobkey, password, show_donation_page, show_progress_bar)
self.redirect("/#profile?i=" + individual_key)
class IPN(webapp2.RequestHandler):
def get(self):
self.response.write("GHI Donations - PayPal IPN Handler")
def post(self):
# Below URL used for the live version.
PP_URL = "https://www.paypal.com/cgi-bin/webscr"
# Below URL used for testing with the sandbox - if this is uncommented, all real
# donations will not be authenticated. ONLY use with dev versions.
# PP_URL = "https://www.sandbox.paypal.com/cgi-bin/webscr"
# Gets all account emails from Settings data models
# to authenticate PayPal (don't accept payment from unknown)
all_account_emails = tools.getAccountEmails()
parameters = None
if self.request.POST:
parameters = self.request.POST.copy()
if self.request.GET:
parameters = self.request.GET.copy()
payment_status = self.request.get("payment_status")
logging.info("Payment status: " + payment_status)
# Check payment is completed, not Pending or Failed.
if payment_status == "Failed" or payment_status == "Pending":
logging.error("Payment status is " + payment_status + ", so not continuing.")
else:
logging.info("All parameters: " + str(parameters))
# Check the IPN POST request came from real PayPal, not from a fraudster.
if parameters:
parameters['cmd'] = '_notify-validate'
# Encode the parameters in UTF-8 out of Unicode
str_parameters = {}
for k, v in parameters.iteritems():
str_parameters[k] = unicode(v).encode('utf-8')
params = urllib.urlencode(str_parameters)
status = urlfetch.fetch(
url=PP_URL,
method=urlfetch.POST,
payload=params,
).content
if not status == "VERIFIED":
logging.debug("PayPal returned status:" + str(status))
logging.debug('Error. The request could not be verified, check for fraud.')
parameters['homemadeParameterValidity'] = False
# Comparing receiver email to list of allowed email addresses
try:
receiver_email = parameters['receiver_email']
authenticated = False
settings = None
# If the receiver_email isn't in the database, this will fail
settings = all_account_emails[receiver_email]
authenticated = True
logging.info("Getting payment to account: " + receiver_email + ", #: " + settings)
except:
authenticated = False
logging.info("No match for incoming payment email address. Not continuing.")
# Make sure money is going to the correct account - otherwise fraudulent
if authenticated == True:
# Currency of the donation
# currency = parameters['mc_currency']
s = tools.getKey(settings).get()
ipn_data = str(parameters)
# Email and payer ID numbers
try:
email = parameters['payer_email']
except:
email = None
try:
name = parameters['first_name'] + " " + parameters['last_name']
except:
name = "Anonymous Donor"
# Check if an address was given by the donor
try:
# Stich all the address stuff together
address = [parameters['address_street'], parameters['address_city'], parameters['address_state'],
parameters['address_zip']]
except:
address = None
# Reading designation and notes values encoded in JSON from
# donate form
decoded_custom = None
try:
decoded_custom = json.loads(parameters["custom"])
team_key = tools.getKeyIfExists(decoded_custom[0])
individual_key = tools.getKeyIfExists(decoded_custom[1])
special_notes = decoded_custom[2]
if s.exists.entity(team_key) == False:
team_key = None
if s.exists.entity(individual_key) == False:
individual_key = None
except:
logging.error("Excepted on designation.")
team_key = None
individual_key = None
special_notes = None
try:
cover_trans = decoded_custom[3]
email_subscr = decoded_custom[4]
except:
cover_trans = False
email_subscr = False
try:
phone = parameters['contact_phone']
if len(phone) > 10:
special_notes += "\nContact phone: " + phone
phone = None
except:
logging.info("Excepted on phone number.")
phone = None
confirmation_amount = tools.toDecimal(0)
amount_donated = tools.toDecimal(0)
try:
confirmation_amount = parameters['mc_gross']
amount_donated = float(parameters['mc_gross']) - float(parameters['mc_fee'])
except:
pass
# Find out what kind of payment this was - recurring, one-time, etc.
try:
payment_type = parameters['txn_type']
except:
logging.info("Txn_type not available, so continuing with payment status")
payment_type = payment_status
if payment_type == "recurring_payment_profile_created" or payment_type == "subscr_signup":
logging.info("This is the start of a recurring payment. Create info object.")
payment_id = parameters['subscr_id']
# Duration between payments
duration = "recurring"
# s.create.recurring_donation(payment_id, duration, ipn_data)
elif payment_type == "recurring_payment" or payment_type == "subscr_payment":
logging.info("This is a recurring donation payment.")
payment_id = parameters['subscr_id']
payment_type = "recurring"
# Create a new donation
s.create.donation(name, email, amount_donated, payment_type,
confirmation_amount=confirmation_amount,
phone=phone, address=address, team_key=team_key, individual_key=individual_key,
payment_id=payment_id, special_notes=special_notes, email_subscr=email_subscr,
ipn_data=ipn_data)
elif payment_type == "web_accept":
logging.info("This is a one-time donation.")
if payment_status == "Completed":
payment_id = parameters['txn_id']
# Create a new donation
s.create.donation(name, email, amount_donated, "one-time",
confirmation_amount=confirmation_amount, address=address,
team_key=team_key, individual_key=individual_key, payment_id=payment_id,
special_notes=special_notes,
email_subscr=email_subscr, ipn_data=ipn_data)
else:
logging.info("Payment status not complete. Not logging the donation.")
elif payment_type == "subscr_cancel":
logging.info("A subscriber has cancelled.")
amount_donated = "N/A"
elif payment_type == "subscr_failed":
logging.info("A subscriber payment has failed.")
amount_donated = "N/A"
elif payment_type == "Refunded":
try:
donation = models.Donation.gql("WHERE payment_id = :t", t=parameters["txn_id"])
donation_key = donation[0].key()
donation_key.delete()
logging.info("Refund detected and donation deleted. (" + donation_key.urlsafe() + ")")
except:
logging.info("Donation tried to be deleted, but failed. Most likely already deleted.")
try:
logging.info("Recording IPN")
logging.info("Payment type: " + payment_type)
logging.info("Name: " + name)
logging.info("Email: " + email)
logging.info("Amount donated: " + str(amount_donated))
except:
logging.error("Failed somewhere in the logs.")
app = webapp2.WSGIApplication([
('/ajax/allcontacts', AllContacts),
('/ajax/alldeposits', AllDeposits),
('/ajax/allindividuals', AllIndividuals),
('/ajax/allteams', AllTeams),
('/ajax/contact', Contact),
('/', Container),
('/ajax/dashboard', Dashboard),
('/ajax/deposit', Deposit),
('/donate', DonatePage),
('/reports/donor', DonorReport),
('/ajax/donorreport', DonorReportSelect),
('/ajax/exportdonations', ExportDonations),
('/ajax/profile', IndividualProfile),
('/login', Login),
('/logout', Logout),
('/ajax/mergecontacts', MergeContacts),
('/m', Mobile),
('/ajax/newcontact', NewContact),
('/ajax/newindividual', NewIndividual),
('/ajax/newteam', NewTeam),
('/m/redirect', MobileRedirectSetting),
('/ajax/notauthorized', NotAuthorized),
('/ajax/offline', OfflineDonation),
('/ajax/review', ReviewQueue),
('/ajax/reviewdetails', ReviewQueueDetails),
('/ajax/settings', Settings),
('/ajax/spreadsheet/download', SpreadsheetDownload),
('/ajax/teammembers', TeamMembers),
('/thanks', ThankYou),
('/ajax/undeposited', UndepositedDonations),
('/ajax/profile/upload', UpdateProfile),
('/ipn', IPN)],
debug=True)
app = appengine_config.webapp_add_wsgi_middleware(app)
app = appengine_config.recording_add_wsgi_middleware(app)
| {
"repo_name": "rhefner1/ghidonations",
"path": "mooha.py",
"copies": "1",
"size": "30531",
"license": "apache-2.0",
"hash": 8108650885693614000,
"line_mean": 34.2551963048,
"line_max": 120,
"alpha_frac": 0.5611018309,
"autogenerated": false,
"ratio": 4.231014412416852,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5292116243316852,
"avg_score": null,
"num_lines": null
} |
# App engine platform
import appengine_config
import json
import logging
import webapp2
from datetime import datetime, timedelta
import DataModels as models
import GlobalUtilities as tools
from google.appengine.api import mail, taskqueue
from google.appengine.ext import deferred
from google.appengine.ext.webapp import template
# Excel export
# Google Cloud Storage
import cloudstorage as gcs
class AggregateAnnualReport(webapp2.RequestHandler):
def post(self):
target_year = int(self.request.get("year"))
s = tools.getKey(self.request.get("skey"))
mode = self.request.get("mode")
td1 = datetime(target_year, 1, 1, 0, 0)
td2 = datetime(target_year, 12, 31, 0, 0)
annual_donations = models.Donation.query(models.Donation.settings == s,
models.Donation.donation_date >= td1,
models.Donation.donation_date <= td2)
all_contacts = set([d.contact for d in annual_donations])
with_email = []
without_email = []
missing_contacts = []
for c_key in all_contacts:
c = c_key.get()
if not c:
missing_contacts.append(c_key)
else:
if c.email != ['']:
with_email.append(c)
else:
donation_total = c.data.donation_total
if donation_total >= tools.toDecimal("250"):
without_email.append(c)
elif c.data.number_donations == 1 and donation_total >= tools.toDecimal("100"):
without_email.append(c)
body = ""
body += "\n" + "#### " + str(len(with_email)) + " Donors with Email Addresses ####"
for c in with_email:
body += "\n" + c.websafe
body += "\n" + "\n\n\n#### " + str(len(without_email)) + " Donors WITHOUT Email Addresses ####"
for c in without_email:
body += "\n" + "https://ghidonations.appspot.com/reports/donor?c=" + c.websafe + "&y=2013"
body += "\n" + "\n\n\n#### " + str(len(missing_contacts)) + " Missing Contacts ####"
for c in missing_contacts:
body += "\n" + str(c)
# Writing text file
gcs_file_key, gcs_file = tools.newFile("text/plain", "GHI_Donations_" + str(target_year) + ".txt")
gcs_file.write(body)
gcs_file.close()
class AnnualReport(webapp2.RequestHandler):
def post(self):
contact_key = self.request.get("contact_key")
year = self.request.get("year")
c = tools.getKey(contact_key).get()
logging.info("AnnualReport handler hit with contact key " + contact_key + " and year " + year)
if c.email:
message = mail.EmailMessage()
try:
email = c.email[0]
except:
email = c.email
message.to = email
message.sender = "Global Hope India <donate@globalhopeindia.org>"
message.subject = str(year) + " Global Hope India Donations"
donations = c.data.annual_donations(year)
donation_total = tools.toDecimal(0)
for d in donations:
donation_total += d.confirmation_amount
donation_total = "${:,.2f}".format(donation_total)
template_variables = {"s": c.settings.get(), "c": c, "donations": donations, "year": year,
"donation_total": str(donation_total)}
message.html = template.render("pages/letters/donor_report.html", template_variables)
message.send()
logging.info("Annual report sent to:" + c.name)
else:
logging.info("Annual report not sent sent because " + c.name + "doesn't have an email.")
class Confirmation(webapp2.RequestHandler):
def post(self):
donation_key = self.request.get("donation_key")
d = tools.getKey(donation_key).get()
logging.info("Retrying confirmation email through task queue for donation: " + donation_key)
d.confirmation.email()
class DelayIndexing(webapp2.RequestHandler):
def post(self):
entity_key = self.request.get("e")
try:
e = tools.getKey(entity_key).get()
e.search.index()
except Exception, e:
logging.error(str(e))
self.error(500)
class DeleteSpreadsheet(webapp2.RequestHandler):
def post(self):
file_key = self.request.get("k")
try:
gcs.delete(file_key)
except:
pass
class IndexAll(webapp2.RequestHandler):
def post(self):
mode = self.request.get("mode")
if mode == "contacts":
contacts = models.Contact.query()
deferred.defer(tools.indexEntitiesFromQuery, contacts, _queue="backend")
elif mode == "deposits":
deposits = models.DepositReceipt.query()
deferred.defer(tools.indexEntitiesFromQuery, deposits, _queue="backend")
elif mode == "donations":
donations = models.Donation.query()
deferred.defer(tools.indexEntitiesFromQuery, donations, _queue="backend")
elif mode == "individuals":
individuals = models.Individual.query()
deferred.defer(tools.indexEntitiesFromQuery, individuals, _queue="backend")
elif mode == "teams":
teams = models.Team.query()
deferred.defer(tools.indexEntitiesFromQuery, teams, _queue="backend")
class MailchimpAdd(webapp2.RequestHandler):
def post(self):
email = self.request.get("email")
name = self.request.get("name")
settings_key = self.request.get("settings")
s = tools.getKey(settings_key).get()
s.mailchimp.add(email, name, True)
logging.info("Retrying Mailchimp add through task queue for: " + email + " under settings ID: " + settings_key)
class ReindexEntities(webapp2.RequestHandler):
def post(self):
mode = self.request.get("mode")
e_key = self.request.get("key")
base = tools.getKey(e_key).get()
if mode == "contact":
query = models.Donation.query(models.Donation.settings == base.settings,
models.Donation.contact == base.key)
query = tools.qCache(query)
elif mode == "individual":
query = base.data.donations
elif mode == "team":
query = base.data.donations
for e in query:
taskqueue.add(url="/tasks/delayindexing", params={'e': e.key.urlsafe()}, countdown=2,
queue_name="delayindexing")
class UpdateAnalytics(webapp2.RequestHandler):
def _run(self):
# Scheduled cron job to update analytics for all settings accounts every hour
all_settings = models.Settings.query()
for s in all_settings:
## Update one_week_history
last_week = datetime.today() - timedelta(days=7)
# Get donations made in the last week
donations = models.Donation.gql(
"WHERE settings = :s AND donation_date > :last_week ORDER BY donation_date DESC",
s=s.key, last_week=last_week)
donation_count = 0
total_money = tools.toDecimal(0)
for d in donations:
# Counting total money
total_money += d.amount_donated
# Counting number of donations
donation_count += 1
one_week_history = [donation_count, str(total_money)]
s.one_week_history = json.dumps(one_week_history)
#####################################################################################################
## Update one_month_history
last_week = datetime.today() - timedelta(days=30)
# Get donations made in the last week
donations = models.Donation.gql(
"WHERE settings = :s AND donation_date > :last_week ORDER BY donation_date DESC",
s=s.key, last_week=last_week)
one_month_history = [["Date", "Amount Donated ($)"]]
donations_dict = {}
for d in donations:
d_date = tools.convertTime(d.donation_date)
day = str(d_date.month).zfill(2) + "/" + str(d_date.day).zfill(2)
if day in donations_dict:
donations_dict[day] += d.amount_donated
else:
donations_dict[day] = d.amount_donated
for date in sorted(donations_dict.iterkeys()):
one_month_history.append([date, float(donations_dict[date])])
s.one_month_history = json.dumps(one_month_history)
s.put()
get = post = _run
class UpdateContactsJSON(webapp2.RequestHandler):
def post(self):
s_key = self.request.get("s_key")
s = tools.getKey(s_key).get()
contacts = []
for c in s.data.all_contacts:
contact = {}
contact["label"] = c.name
contact["key"] = str(c.websafe)
contacts.append(contact)
s.contacts_json = json.dumps(contacts)
s.put()
app = webapp2.WSGIApplication([
('/tasks/annualreport', AnnualReport),
('/tasks/aggregateannualreport', AggregateAnnualReport),
('/tasks/confirmation', Confirmation),
('/tasks/delayindexing', DelayIndexing),
('/tasks/deletespreadsheet', DeleteSpreadsheet),
('/tasks/indexall', IndexAll),
('/tasks/mailchimp', MailchimpAdd),
('/tasks/reindex', ReindexEntities),
('/tasks/updateanalytics', UpdateAnalytics),
('/tasks/contactsjson', UpdateContactsJSON)],
debug=True)
app = appengine_config.recording_add_wsgi_middleware(app)
| {
"repo_name": "rhefner1/ghidonations",
"path": "tasks.py",
"copies": "1",
"size": "9878",
"license": "apache-2.0",
"hash": 7980986925675388000,
"line_mean": 31.7086092715,
"line_max": 119,
"alpha_frac": 0.5664102045,
"autogenerated": false,
"ratio": 3.944888178913738,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014498892686452872,
"num_lines": 302
} |
# App Engine platform
import cStringIO
import csv
import gc
import logging
from google.appengine.api import memcache, taskqueue
# Application files
import GlobalUtilities as tools
# Pipeline API
import pipeline
# Google Cloud Storage
import cloudstorage as gcs
my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
max_delay=5.0, backoff_factor=2,
max_retry_period=15)
gcs.set_default_retry_params(my_default_retry_params)
class GenerateReport(pipeline.Pipeline):
def run(self, settings_key, mode, job_id):
s = tools.getKey(settings_key).get()
if mode == "contacts":
query = s.data.all_contacts
num_results = 150
elif mode == "donations":
query = s.data.all_donations
num_results = 250
elif mode == "individuals":
query = s.data.all_individuals
num_results = 150
else:
raise Exception("Unidentified mode in GenerateReport")
blobs = []
cursor = None
# Create header CSV file
header_file_name = job_id + "-0.csv"
blobs.append((yield HeaderCSV(mode, header_file_name)))
while True:
results = tools.queryCursorDB(query, cursor, keys_only=True, num_results=num_results)
keys, cursor = results[0], results[1]
file_name = job_id + "-" + str(len(blobs)) + ".csv"
keys = tools.ndbKeyToUrlsafe(keys)
blobs.append((yield CreateCSV(mode, file_name, keys)))
if cursor == None:
break
final_file_name = s.name + "-" + mode.title() + ".csv"
gcs_file_key = yield ConcatCSV(job_id, final_file_name, *blobs)
yield ConfirmCompletion(job_id, gcs_file_key)
class HeaderCSV(pipeline.Pipeline):
def run(self, mode, file_name):
# Open GCS file for writing
gcs_file_key, gcs_file = tools.newFile("text/csv", file_name)
si = cStringIO.StringIO()
writer = csv.writer(si)
# Write headers
header_data = []
if mode == "contacts":
header_data.append("Name")
header_data.append("Total Donated")
header_data.append("Number Donations")
header_data.append("Phone")
header_data.append("Street")
header_data.append("City")
header_data.append("State")
header_data.append("Zipcode")
header_data.append("Created")
header_data.append("Email")
if mode == "donations":
header_data.append("Date")
header_data.append("Name")
header_data.append("Email")
header_data.append("Amount Donated")
header_data.append("Payment Type")
header_data.append("Team")
header_data.append("Individual")
header_data.append("Reviewed")
header_data.append("Phone")
header_data.append("Street")
header_data.append("City")
header_data.append("State")
header_data.append("Zipcode")
elif mode == "individuals":
header_data.append("Name")
header_data.append("Email")
header_data.append("Teams")
header_data.append("Raised")
header_data.append("Date Created")
writer.writerow(header_data)
gcs_file.write(si.getvalue())
gcs_file.close()
taskqueue.add(url="/tasks/deletespreadsheet", params={'k': gcs_file_key}, countdown=1800,
queue_name="deletespreadsheet")
return gcs_file_key
class CreateCSV(pipeline.Pipeline):
def run(self, mode, file_name, keys):
# Open GCS file for writing
gcs_file_key, gcs_file = tools.newFile("text/csv", file_name)
si = cStringIO.StringIO()
writer = csv.writer(si)
for k in keys:
try:
e = tools.getKey(k).get()
row_data = []
if mode == "contacts":
c = e
row_data.append(c.name)
row_data.append(c.data.donation_total)
row_data.append(c.data.number_donations)
row_data.append(c.phone)
row_data.append(c.address[0])
row_data.append(c.address[1])
row_data.append(c.address[2])
row_data.append(c.address[3])
row_data.append(c.creation_date)
for e in c.email:
row_data.append(e)
elif mode == "donations":
d = e
c = d.contact.get()
row_data.append(d.donation_date)
row_data.append(d.name)
row_data.append(d.given_email)
row_data.append(d.amount_donated)
row_data.append(d.payment_type)
row_data.append(d.designated_team)
row_data.append(d.designated_individual)
row_data.append(d.reviewed)
row_data.append(c.phone)
row_data.append(c.address[0])
row_data.append(c.address[1])
row_data.append(c.address[2])
row_data.append(c.address[3])
elif mode == "individuals":
i = e
row_data.append(i.name)
row_data.append(i.email)
row_data.append(i.data.readable_team_names)
row_data.append(i.data.donation_total)
row_data.append(i.creation_date)
writer.writerow(row_data)
except Exception, e:
logging.error("Failed on key " + k + " because " + str(e))
# Call the garbage handler
gc.collect()
gcs_file.write(si.getvalue())
gcs_file.close()
taskqueue.add(url="/tasks/deletespreadsheet", params={'k': gcs_file_key}, countdown=1800,
queue_name="deletespreadsheet")
return gcs_file_key
class ConcatCSV(pipeline.Pipeline):
def run(self, job_id, file_name, *blobs):
gcs_writer_key, gcs_writer = tools.newFile("text/csv", file_name)
for b in blobs:
gcs_reader = gcs.open(b)
data = gcs_reader.read()
gcs_writer.write(data)
# Call the garbage handler
gc.collect()
gcs_writer.close()
taskqueue.add(url="/tasks/deletespreadsheet", params={'k': gcs_writer_key}, countdown=1800,
queue_name="deletespreadsheet")
return gcs_writer_key
class ConfirmCompletion(pipeline.Pipeline):
def run(self, job_id, gcs_file_key):
memcache.set(job_id, gcs_file_key)
| {
"repo_name": "rhefner1/ghidonations",
"path": "spreadsheet_pipelines.py",
"copies": "1",
"size": "6935",
"license": "apache-2.0",
"hash": 7837743230372926000,
"line_mean": 31.2558139535,
"line_max": 99,
"alpha_frac": 0.5339581831,
"autogenerated": false,
"ratio": 3.960593946316391,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4994552129416391,
"avg_score": null,
"num_lines": null
} |
"""App Engine settings.
Reads app keys and secrets into constants from these files:
dropbox_app_key
dropbox_app_secret
facebook_app_id
facebook_app_secret
facebook_app_id_local
facebook_app_secret_local
google_client_id
google_client_secret
instagram_client_id
instagram_client_secret
instagram_client_id_local
instagram_client_secret_local
twitter_app_key
twitter_app_secret
tumblr_app_key
tumblr_app_secret
wordpress_client_id
wordpress_client_secret
wordpress_client_id_local
wordpress_client_secret_local
"""
import os
import sys
from webutil.appengine_config import *
# default timeout. the G+ and Instagram APIs use httplib2, which honors this.
import socket
socket.setdefaulttimeout(HTTP_TIMEOUT)
# monkey-patch socket.getdefaulttimeout() because it often gets reset, e.g. by
# socket.setblocking() and maybe other operations.
# http://stackoverflow.com/a/8465202/186123
socket.getdefaulttimeout = lambda: HTTP_TIMEOUT
# Add library modules directories to sys.path so they can be imported.
#
# I used to use symlinks and munge sys.modules, but both of those ended up in
# duplicate instances of modules, which caused problems. Background in
# https://github.com/snarfed/bridgy/issues/31
for path in (
'google-api-python-client',
'gdata-python-client/src',
'httplib2_module/python2',
'oauthlib_module',
'python-dropbox',
'requests_module',
'requests-oauthlib',
'python-tumblpy',
'tweepy_module',
):
path = os.path.join(os.path.dirname(__file__), path)
if path not in sys.path:
sys.path.append(path)
import python_dropbox
sys.modules['python_dropbox'] = python_dropbox
def read(filename):
"""Returns the contents of filename, or None if it doesn't exist."""
if os.path.exists(filename):
with open(filename) as f:
return f.read().strip()
MOCKFACEBOOK = False
if DEBUG:
FACEBOOK_APP_ID = read('facebook_app_id_local')
FACEBOOK_APP_SECRET = read('facebook_app_secret_local')
INSTAGRAM_CLIENT_ID = read('instagram_client_id_local')
INSTAGRAM_CLIENT_SECRET = read('instagram_client_secret_local')
WORDPRESS_CLIENT_ID = read('wordpress.com_client_id_local')
WORDPRESS_CLIENT_SECRET = read('wordpress.com_client_secret_local')
DISQUS_CLIENT_ID = read('disqus_client_id_local')
DISQUS_CLIENT_SECRET = read('disqus_client_secret_local')
else:
FACEBOOK_APP_ID = read('facebook_app_id')
FACEBOOK_APP_SECRET = read('facebook_app_secret')
INSTAGRAM_CLIENT_ID = read('instagram_client_id')
INSTAGRAM_CLIENT_SECRET = read('instagram_client_secret')
WORDPRESS_CLIENT_ID = read('wordpress.com_client_id')
WORDPRESS_CLIENT_SECRET = read('wordpress.com_client_secret')
DISQUS_CLIENT_ID = read('disqus_client_id')
DISQUS_CLIENT_SECRET = read('disqus_client_secret')
DROPBOX_APP_KEY = read('dropbox_app_key')
DROPBOX_APP_SECRET = read('dropbox_app_secret')
FLICKR_APP_KEY = read('flickr_app_key')
FLICKR_APP_SECRET = read('flickr_app_secret')
GOOGLE_CLIENT_ID = read('google_client_id')
GOOGLE_CLIENT_SECRET = read('google_client_secret')
TUMBLR_APP_KEY = read('tumblr_app_key')
TUMBLR_APP_SECRET = read('tumblr_app_secret')
TWITTER_APP_KEY = read('twitter_app_key')
TWITTER_APP_SECRET = read('twitter_app_secret')
| {
"repo_name": "kylewm/oauth-dropins",
"path": "appengine_config.py",
"copies": "1",
"size": "3185",
"license": "unlicense",
"hash": 4188830888068553000,
"line_mean": 30.85,
"line_max": 78,
"alpha_frac": 0.7491365777,
"autogenerated": false,
"ratio": 3.053691275167785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.927705234891461,
"avg_score": 0.005155100790635056,
"num_lines": 100
} |
# APP_ENV should be switched to production
APP_ENV = "production"
# Disable DEBUG
DEBUG = False
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
# Replace ci.vecnet.org with actual hostname
ALLOWED_HOSTS = ["ci.vecnet.org"]
# Specify django log file.
LOG_FILE = "/var/log/django/ci.vecnet.org.django.log"
# Configure database parameters
#DATAWAREHOUSE_HOST =
#DATAWAREHOUSE_USER =
#DATAWAREHOUSE_PASSWORD =
#Configure login and logout page for SSO
LOGIN_URL = "https://www.vecnet.org/index.php/login-register"
LOGOUT_URL = "https://www.vecnet.org/index.php/log-out"
# The name of the GET field which contains the URL to redirect to after login
# By default, Django uses "next" and auth_pubtkt uses "back"
REDIRECT_FIELD_NAME = "back"
# Email host
EMAIL_HOST = "smtp.nd.edu"
EMAIL_PORT = 25
EMAIL_USE_TLS = True
SERVER_EMAIL = "ci@ci.vecnet.org"
# Enable mod_auth_pubtkt support
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.RemoteUserBackend',
)
from settings import MIDDLEWARE_CLASSES
MIDDLEWARE_CLASSES += (
'django.contrib.auth.middleware.RemoteUserMiddleware',
)
# Optional parameters
#DATAWAREHOSE_DW_NAME = "dw"
#DATAWAREHOSE_PORT = 5432
| {
"repo_name": "tph-thuering/vnetsource",
"path": "VECNet/doc/deployment/settings_local_example.py",
"copies": "3",
"size": "1274",
"license": "mpl-2.0",
"hash": 8306415885730285000,
"line_mean": 27.9545454545,
"line_max": 77,
"alpha_frac": 0.751177394,
"autogenerated": false,
"ratio": 3.1534653465346536,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02844784478080219,
"num_lines": 44
} |
# app/extras/forms.py
# coding: utf-8
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
class EstadoCivilForm(FlaskForm):
"""
Form para agregar o modificar EstadoCivil
"""
nombre_ec = StringField(u'Nombre', validators=[DataRequired()])
descripcion_ec = StringField(u'Descripción')
submit = SubmitField(u'Aceptar')
class TipoMiembroForm(FlaskForm):
"""
Form para agregar o modificar Tipo de Miembro
"""
nombre_tm = StringField(u'Nombre', validators=[DataRequired()])
descripcion_tm = StringField(u'Descripción')
submit = SubmitField(u'Aceptar')
class RolFamiliarForm(FlaskForm):
"""
Form para agregar o modificar Rol Familiar
"""
nombre_rf = StringField(u'Nombre', validators=[DataRequired()])
descripcion_rf = StringField(u'Descripción')
submit = SubmitField(u'Aceptar')
class TipoFamiliaForm(FlaskForm):
"""
Form para agregar o modificar Tipo de Familia
"""
nombre_tf = StringField(u'Nombre', validators=[DataRequired()])
descripcion_tf = StringField(u'Descripción')
submit = SubmitField(u'Aceptar')
| {
"repo_name": "originaltebas/chmembers",
"path": "app/extras/forms.py",
"copies": "1",
"size": "1223",
"license": "mit",
"hash": 4443949621216292400,
"line_mean": 27.0238095238,
"line_max": 67,
"alpha_frac": 0.6817063167,
"autogenerated": false,
"ratio": 3.2857142857142856,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9467420602414286,
"avg_score": 0,
"num_lines": 42
} |
# app/extras/views.py
# coding: utf-8
from flask import flash
from flask import redirect, render_template, url_for
from flask_login import current_user, login_required
from app.extras import extras
from app.extras.forms import EstadoCivilForm, TipoMiembroForm, RolFamiliarForm
from app.extras.forms import TipoFamiliaForm
from app import db
from app.models import EstadoCivil, TipoMiembro, RolFamiliar, TipoFamilia
def check_edit_or_admin():
"""
Si no es admin o editor lo manda al inicio
"""
if not current_user.get_urole() >= 1:
return redirect(url_for("home.hub"))
# SECCION: *****ESTADOS CIVILES*****
@extras.route('/extras/estadosciviles', methods=['GET'])
@login_required
def ver_estadosciviles():
"""
Ver una lista de todos los estados civiles
con la opción de modificar, borrar o agregar uno nuevo
"""
check_edit_or_admin()
# de arranque carga el listado
flag_listar = True
query_ecivil = EstadoCivil.query.all()
return render_template('extras/estadosciviles/base_estadosciviles.html',
estadosciviles=query_ecivil,
flag_listar=flag_listar)
@extras.route('/extras/estadosciviles/crear', methods=['GET', 'POST'])
@login_required
def crear_estadocivil():
"""
Agregar un Estado Civil a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = EstadoCivilForm()
if form.validate_on_submit():
obj_ecivil = EstadoCivil(nombre_estado=form.nombre_ec.data,
descripcion_estado=form.descripcion_ec.data)
try:
db.session.add(obj_ecivil)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error:', e, 'danger')
return redirect(url_for('extras.ver_estadosciviles'))
return render_template(
'extras/estadosciviles/base_estadosciviles.html',
add_estadocivil=flag_crear, flag_listar=flag_listar, form=form)
@extras.route('/extras/estadosciviles/modif/<int:id>', methods=['GET', 'POST'])
@login_required
def modif_estadocivil(id):
"""
Modificar un estado civil
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
obj_ecivil = EstadoCivil.query.get_or_404(id)
form = EstadoCivilForm(obj=obj_ecivil)
if form.validate_on_submit():
obj_ecivil.nombre_estado = form.nombre_ec.data
obj_ecivil.descripcion_estado = form.descripcion_ec.data
try:
db.session.commit()
flash('Has modificado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_estadosciviles'))
form.nombre_ec.data = obj_ecivil.nombre_estado
form.descripcion_ec.data = obj_ecivil.descripcion_estado
return render_template(
'extras/estadosciviles/base_estadosciviles.html',
add_estadocivil=flag_crear, flag_listar=flag_listar,
form=form, estadocivil=obj_ecivil)
@extras.route('/extras/estadosciviles/borrar/<int:id>',
methods=['GET'])
@login_required
def borrar_estadocivil(id):
"""
Borrar un estado civil
"""
check_edit_or_admin()
obj_ecivil = EstadoCivil.query.get_or_404(id)
try:
db.session.delete(obj_ecivil)
db.session.commit()
flash('Has borrado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_estadosciviles'))
# SECCION: *****TIPOS DE MIEMBROS*****
@extras.route('/extras/tiposmiembros', methods=['GET'])
@login_required
def ver_tiposmiembros():
"""
Ver una lista de todos los tipos de miembros de la iglesia
con la opción de modificar, borrar o agregar uno nuevo
"""
check_edit_or_admin()
# de arranque carga el listado
flag_listar = True
query_tmiembro = TipoMiembro.query.all()
return render_template('extras/tiposmiembros/base_tiposmiembros.html',
tiposmiembros=query_tmiembro,
flag_listar=flag_listar)
@extras.route('/extras/tiposmiembros/crear', methods=['GET', 'POST'])
@login_required
def crear_tipomiembro():
"""
Agregar un Tipo de Miembro a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = TipoMiembroForm()
if form.validate_on_submit():
obj_tmiembro = TipoMiembro(
nombre_tipomiembro=form.nombre_tm.data,
descripcion_tipomiembro=form.descripcion_tm.data)
try:
db.session.add(obj_tmiembro)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error:', e, 'danger')
return redirect(url_for('extras.ver_tiposmiembros'))
return render_template(
'extras/tiposmiembros/base_tiposmiembros.html',
add_tipomiembro=flag_crear, flag_listar=flag_listar, form=form)
@extras.route('/extras/tiposmiembros/modif/<int:id>', methods=['GET', 'POST'])
@login_required
def modif_tipomiembro(id):
"""
Modificar un tipo de miembro
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
obj_tmiembro = TipoMiembro.query.get_or_404(id)
form = TipoMiembroForm(obj=obj_tmiembro)
if form.validate_on_submit():
obj_tmiembro.nombre_tipomiembro = form.nombre_tm.data
obj_tmiembro.descripcion_tipomiembro = form.descripcion_tm.data
try:
db.session.commit()
flash('Has modificado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_tiposmiembros'))
form.nombre_tm.data = obj_tmiembro.nombre_tipomiembro
form.descripcion_tm.data = obj_tmiembro.descripcion_tipomiembro
return render_template(
'extras/tiposmiembros/base_tiposmiembros.html',
add_tipomiembro=flag_crear, flag_listar=flag_listar,
form=form, tipomiembro=obj_tmiembro)
@extras.route('/extras/tiposmiembros/borrar/<int:id>', methods=['GET'])
@login_required
def borrar_tipomiembro(id):
"""
Borrar un tipo de miembros
"""
check_edit_or_admin()
obj_tmiembro = TipoMiembro.query.get_or_404(id)
db.session.delete(obj_tmiembro)
try:
db.session.commit()
flash('Has borrado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_tiposmiembros'))
# SECCION: ***** ROLES FAMILIARES *****
@extras.route('/extras/rolesfamiliares', methods=['GET'])
@login_required
def ver_rolesfamiliares():
"""
Ver una lista de todos los roles familiares de la iglesia
con la opción de modificar, borrar o agregar uno nuevo
"""
check_edit_or_admin()
# de arranque carga el listado
flag_listar = True
query_rfamiliar = RolFamiliar.query.all()
return render_template('extras/rolesfamiliares/base_rolesfamiliares.html',
rolesfamiliares=query_rfamiliar,
flag_listar=flag_listar)
@extras.route('/extras/rolesfamiliares/crear', methods=['GET', 'POST'])
@login_required
def crear_rolfamiliar():
"""
Agregar un Rol Familiar a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = RolFamiliarForm()
if form.validate_on_submit():
obj_rfamiliar = RolFamiliar(
nombre_rolfam=form.nombre_rf.data,
descripcion_rolfam=form.descripcion_rf.data)
try:
db.session.add(obj_rfamiliar)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error:', e, 'danger')
return redirect(url_for('extras.ver_rolesfamiliares'))
return render_template(
'extras/rolesfamiliares/base_rolesfamiliares.html',
add_rolfamiliar=flag_crear, flag_listar=flag_listar,
form=form)
@extras.route('/extras/rolesfamiliares/modif/<int:id>',
methods=['GET', 'POST'])
@login_required
def modif_rolfamiliar(id):
"""
Modificar un rol familiar
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
obj_rfamiliar = RolFamiliar.query.get_or_404(id)
form = RolFamiliarForm(obj=obj_rfamiliar)
if form.validate_on_submit():
obj_rfamiliar.nombre_rolfam = form.nombre_rf.data
obj_rfamiliar.descripcion_rolfam = form.descripcion_rf.data
try:
db.session.commit()
flash('Has modificado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_rolesfamiliares'))
form.nombre_rf.data = obj_rfamiliar.nombre_rolfam
form.descripcion_rf.data = obj_rfamiliar.descripcion_rolfam
return render_template(
'extras/rolesfamiliares/base_rolesfamiliares.html',
add_rolfamiliar=flag_crear, flag_listar=flag_listar,
form=form, rolfamiliar=obj_rfamiliar)
@extras.route('/extras/rolesfamiliares/borrar/<int:id>',
methods=['GET'])
@login_required
def borrar_rolfamiliar(id):
"""
Borrar un rol familiar
"""
check_edit_or_admin()
obj_rfamiliar = RolFamiliar.query.get_or_404(id)
db.session.delete(obj_rfamiliar)
try:
db.session.commit()
flash('Has borrado los datos correctamente', 'success')
except Exception as e:
flash('Error:', e, 'danger')
return redirect(url_for('extras.ver_rolesfamiliares'))
# SECCION: ***** TIPOS DE FAMILIA *****
@extras.route('/extras/tiposfamilias', methods=['GET'])
@login_required
def ver_tiposfamilias():
"""
Ver una lista de todos los tipos de familias de la iglesia
con la opción de modificar, borrar o agregar uno nuevo
"""
check_edit_or_admin()
# de arranque carga el listado
flag_listar = True
query_tfamilia = TipoFamilia.query.all()
return render_template('extras/tiposfamilias/base_tiposfamilias.html',
tiposfamilias=query_tfamilia,
flag_listar=flag_listar)
@extras.route('/extras/tiposfamilias/crear', methods=['GET', 'POST'])
@login_required
def crear_tipofamilia():
"""
Agregar un Tipo de Familia a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = TipoFamiliaForm()
if form.validate_on_submit():
obj_tfamilia = TipoFamilia(
tipo_familia=form.nombre_tf.data,
descripcion_tipo_familia=form.descripcion_tf.data)
try:
db.session.add(obj_tfamilia)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_tiposfamilias'))
return render_template(
'extras/tiposfamilias/base_tiposfamilias.html',
add_tipofamilia=flag_crear, flag_listar=flag_listar,
form=form)
@extras.route('/extras/tiposfamilias/modif/<int:id>',
methods=['GET', 'POST'])
@login_required
def modif_tipofamilia(id):
"""
Modificar un Tipo de Familia
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
obj_tfamilia = TipoFamilia.query.get_or_404(id)
form = TipoFamiliaForm(obj=obj_tfamilia)
if form.validate_on_submit():
obj_tfamilia.tipo_familia = form.nombre_tf.data
obj_tfamilia.descripcion_tipo_familia = form.descripcion_tf.data
try:
db.session.commit()
flash('Has modificado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_tiposfamilias'))
form.nombre_tf.data = obj_tfamilia.tipo_familia
form.descripcion_tf.data = obj_tfamilia.descripcion_tipo_familia
return render_template(
'extras/tiposfamilias/base_tiposfamilias.html',
add_tipofamilia=flag_crear, flag_listar=flag_listar,
form=form, tipofamilia=obj_tfamilia)
@extras.route('/extras/tiposfamilias/borrar/<int:id>',
methods=['GET'])
@login_required
def borrar_tipofamilia(id):
"""
Borrar un tipo de familia
"""
check_edit_or_admin()
obj_tfamilia = TipoFamilia.query.get_or_404(id)
db.session.delete(obj_tfamilia)
try:
db.session.commit()
flash('Has borrado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('extras.ver_tiposfamilias'))
| {
"repo_name": "originaltebas/chmembers",
"path": "app/extras/views.py",
"copies": "1",
"size": "13995",
"license": "mit",
"hash": 5168303634307323000,
"line_mean": 29.9429223744,
"line_max": 79,
"alpha_frac": 0.61189336,
"autogenerated": false,
"ratio": 3.1250837614473976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4236977121447397,
"avg_score": null,
"num_lines": null
} |
# app/familias/forms.py
# coding: utf-8
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField, SelectField
from wtforms.validators import InputRequired, Length
class FamiliasForm(FlaskForm):
"""
Formulario para familias
"""
id = HiddenField("id")
id_direccion = HiddenField("idDir")
# Modelo Familia
apellidos_familia = StringField(u'Apellido de la Familia',
validators=[InputRequired(),
Length(min=1, max=60)])
descripcion_familia = StringField(u'Descripción de la Familia',
validators=[InputRequired(),
Length(min=0, max=200)])
telefono_familia = StringField(u'Teléfono de la Familia',
validators=[InputRequired(),
Length(min=0, max=15)])
TipoFamilia = SelectField(u'Tipo de Familia', coerce=int)
submit = SubmitField(u'Aceptar')
class DireccionModalForm(FlaskForm):
# Modelo Direccion
tipo_via = StringField(u'Tipo de vía',
validators=[InputRequired(),
Length(min=1, max=20)])
nombre_via = StringField(u'Nombre de la vía',
validators=[InputRequired(),
Length(min=1, max=100)])
nro_via = StringField(u'Nro',
validators=[InputRequired(),
Length(min=1, max=10)])
portalescalotros_via = StringField(u'Portal/Esc/Otro')
piso_nroletra_via = StringField(u'Nro/Letra del Piso')
cp_via = StringField(u'CP',
validators=[InputRequired(),
Length(min=1, max=10)])
ciudad_via = StringField(u'Ciudad',
validators=[InputRequired(),
Length(min=1, max=50)])
provincia_via = StringField(u'Provincia',
validators=[InputRequired(),
Length(min=1, max=50)])
pais_via = StringField(u'País',
validators=[InputRequired(),
Length(min=1, max=50)])
submit = SubmitField(u'Crear Dirección')
class AsignacionMiembrosForm(FlaskForm):
"""
Formulario para la asignacion de personas a las
ggcc. Las personas tienen que ser miembros creados
"""
ids_in = HiddenField('Ids IN')
ids_out = HiddenField('Ids OUT')
submit = SubmitField(u'Aceptar')
| {
"repo_name": "originaltebas/chmembers",
"path": "app/familias/forms.py",
"copies": "1",
"size": "2788",
"license": "mit",
"hash": -5743181648455708000,
"line_mean": 35.0933333333,
"line_max": 74,
"alpha_frac": 0.502875629,
"autogenerated": false,
"ratio": 4.091176470588235,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 75
} |
# app/familias/views.py
# coding: utf-8
from flask import flash, jsonify
from flask import redirect, render_template, url_for, request
from flask_login import current_user, login_required
from sqlalchemy import func, or_
from app.familias import familias
from app.familias.forms import FamiliasForm
from app.familias.forms import DireccionModalForm, AsignacionMiembrosForm
from app import db
from app.models import Familia, Direccion, Miembro, TipoFamilia
def check_edit_or_admin():
"""
Si no es admin o editor lo manda al inicio
"""
if not current_user.get_urole() >= 1:
return redirect(url_for("home.hub"))
@familias.route('/familias', methods=['GET'])
@login_required
def ver_familias():
"""
Ver una lista de todos los familias
"""
check_edit_or_admin()
flag_listar = True
nro_personas = db.session.query(Miembro.id_familia,
func.count(Miembro.id_familia)
.label('contar'))\
.group_by(Miembro.id_familia).subquery()
query_familias = db.session.query(Familia)\
.join(Direccion,
Familia.id_direccion ==
Direccion.id)\
.outerjoin(nro_personas,
Familia.id ==
nro_personas.c.id_familia)\
.outerjoin(TipoFamilia,
Familia.id_tipofamilia ==
TipoFamilia.id)\
.add_columns(
Familia.id,
Familia.apellidos_familia,
Familia.descripcion_familia,
Familia.telefono_familia,
TipoFamilia.tipo_familia,
Direccion.tipo_via,
Direccion.nombre_via,
Direccion.nro_via,
Direccion.portalescalotros_via,
Direccion.cp_via,
Direccion.ciudad_via,
Direccion.provincia_via,
Direccion.pais_via,
nro_personas.c.contar)
return render_template('familias/base_familias.html',
familias=query_familias, flag_listar=flag_listar)
@familias.route('/familias/crear', methods=['GET', 'POST'])
@login_required
def crear_familia():
"""
Agregar un familia a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = FamiliasForm()
form.TipoFamilia.choices = [(row.id, row.tipo_familia)
for row in TipoFamilia.query.all()]
if form.validate_on_submit():
obj_familia = Familia(
apellidos_familia=form.apellidos_familia.data,
descripcion_familia=form.descripcion_familia.data,
telefono_familia=form.telefono_familia.data,
id_tipofamilia=form.TipoFamilia.data,
id_direccion=form.id_direccion.data)
try:
db.session.add(obj_familia)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
status = 'ok'
except Exception as e:
flash('Error: ' + str(e), 'danger')
status = 'ko'
url = url_for('familias.ver_familias')
return jsonify(status=status, url=url)
return render_template('familias/base_familias.html',
flag_crear=flag_crear,
flag_listar=flag_listar, form=form)
@familias.route('/familias/modificar/<int:id>',
methods=['GET', 'POST'])
@login_required
def modif_familia(id):
"""
Modificar un familia
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
# lo hago por partes para actualizar más facil
# la dir si se crea una nueva
obj_familia = Familia.query.get_or_404(id)
form_familia = FamiliasForm(obj=obj_familia)
form_familia.TipoFamilia.choices = [(row.id, row.tipo_familia)
for row in TipoFamilia.query.all()]
if request.method == 'GET':
obj_dir = Direccion.query.get_or_404(obj_familia.id_direccion)
form_dir = DireccionModalForm(obj=obj_dir)
form_familia.TipoFamilia.data = obj_familia.id_tipofamilia
form_familia.id_direccion.data = obj_familia.id_direccion
if form_familia.validate_on_submit():
obj_familia.apellidos_familia = form_familia.apellidos_familia.data
obj_familia.descripcion_familia = form_familia.descripcion_familia.data
obj_familia.telefono_familia = form_familia.telefono_familia.data
obj_familia.id_tipofamilia = form_familia.TipoFamilia.data
obj_familia.id_direccion = form_familia.id_direccion.data
try:
# confirmo todos los datos en la db
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
status = 'ok'
except Exception as e:
flash('Error: ' + str(e), 'danger')
status = 'ko'
url = url_for('familias.ver_familias')
return jsonify(status=status, url=url)
return render_template('familias/base_familias.html',
flag_crear=flag_crear,
flag_listar=flag_listar,
form_familia=form_familia,
form_dir=form_dir)
@familias.route('/familias/borrar/<int:id>',
methods=['GET'])
@login_required
def borrar_familia(id):
"""
Borrar una familia
"""
check_edit_or_admin()
obj_familia = Familia.query.get_or_404(id)
try:
db.session.delete(obj_familia)
db.session.commit()
flash('Has borrado los datos correctamente.', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('familias.ver_familias'))
@familias.route('/familias/asignar', methods=['GET'])
@login_required
def ver_familias_asignar():
"""
Asignar miembros a un familia
"""
check_edit_or_admin()
flag_listar = True
nro_personas = db.session.query(Miembro.id_familia,
func.count(Miembro.id_familia)
.label('contar'))\
.group_by(Miembro.id_familia).subquery()
query_familias = db.session.query(Familia)\
.join(Direccion,
Familia.id_direccion ==
Direccion.id)\
.outerjoin(nro_personas,
Familia.id ==
nro_personas.c.id_familia)\
.outerjoin(TipoFamilia,
Familia.id_tipofamilia ==
TipoFamilia.id)\
.add_columns(
Familia.id,
Familia.apellidos_familia,
Familia.descripcion_familia,
Familia.telefono_familia,
TipoFamilia.tipo_familia,
Direccion.tipo_via,
Direccion.nombre_via,
Direccion.nro_via,
Direccion.portalescalotros_via,
Direccion.cp_via,
Direccion.ciudad_via,
Direccion.provincia_via,
Direccion.pais_via,
nro_personas.c.contar)
return render_template('familias/base_familias_asignar.html',
familias=query_familias,
flag_listar=flag_listar)
@familias.route('/familias/asignar/miembros/<int:id>',
methods=['GET', 'POST'])
@login_required
def asignar_miembros(id):
"""
Asignar miembros a una Familia
"""
check_edit_or_admin()
flag_listar = False
FormMiembros = AsignacionMiembrosForm()
if request.method == 'GET':
obj_familia = Familia.query.get_or_404(id)
form_familia = FamiliasForm(obj=obj_familia)
obj_miembros_in = db.session.query(Miembro.id, Miembro.nombres,
Miembro.apellidos)\
.filter(Miembro.id_familia == id)\
.all()
obj_miembros_out = db.session.query(Miembro.id, Miembro.nombres,
Miembro.apellidos)\
.filter(or_(
Miembro.id_familia == 0,
Miembro.id_familia.is_(None)))\
.all()
# genero una cadena de ids con los datos de los miembros
# incluídos para guardarlos en un Hidden Field
FormMiembros.ids_in.data = ""
for idm in obj_miembros_in:
FormMiembros.ids_in.data = str(FormMiembros.ids_in.data
) + str(idm.id) + ","
# genero una cadena de ids con los datos de los miembros
# incluídos para guardarlos en un Hidden Field
FormMiembros.ids_out.data = ""
for idm in obj_miembros_out:
FormMiembros.ids_out.data = str(FormMiembros.ids_out.data)\
+ str(idm.id) + ","
return render_template('familias/base_familias_asignar.html',
form_familia=form_familia,
miembros_in=obj_miembros_in,
miembros_out=obj_miembros_out,
flag_listar=flag_listar,
FormMiembros=FormMiembros)
elif FormMiembros.validate_on_submit():
ids_in = FormMiembros.ids_in.data[:-1].split(",")
ids_out = FormMiembros.ids_out.data[:-1].split(",")
obj_in = Miembro.query.filter(Miembro.id.in_(ids_in))
obj_out = Miembro.query.filter(
or_(Miembro.id.in_(ids_out),
Miembro.id.is_(None)))
# Para borrar las relaciones de los antiguos
for o in obj_out:
o.id_familia = None
# Para agregar a los recien asignados
for m in obj_in:
m.id_familia = id
try:
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error: ', e, 'danger')
url = url_for('familias.ver_familias_asignar')
return jsonify(url=url)
else:
# Es Post pero no pasa el validate.
flash('Los datos de miembros no han podido modificarse', 'danger')
return redirect(url_for('familias.ver_familias_asignar'))
url = url_for('familias.ver_familias_asignar')
return jsonify(url=url)
| {
"repo_name": "originaltebas/chmembers",
"path": "app/familias/views.py",
"copies": "1",
"size": "12305",
"license": "mit",
"hash": 6258288862185953000,
"line_mean": 38.2026143791,
"line_max": 79,
"alpha_frac": 0.4800845391,
"autogenerated": false,
"ratio": 3.6171714201705383,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45972559592705386,
"avg_score": null,
"num_lines": null
} |
# App for browsing the server.
import os
import time
from stat import *
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import simplejson
from django.utils.simplejson.encoder import JSONEncoder
class ExtJsonEncoder(JSONEncoder):
def default(self, c):
# Handles generators and iterators
if hasattr(c, '__iter__'):
return [i for i in c]
# Handles closures and functors
if hasattr(c, '__call__'):
return c()
return JSONEncoder.default(self, c)
def entry_info(path):
"""
Return info on a directory entry.
"""
try:
flist = os.listdir(path)
except OSError, (errno, strerr):
return {"error": "%s: %s" % (strerr, path)}
except Exception, e:
print e
return {"error": "%s: %s" % (e.message, path)}
stats = []
for entry in flist:
if entry.startswith("."):
continue
type = "unknown"
st = os.stat(os.path.join(path, entry))
mode = st[ST_MODE]
if S_ISDIR(mode):
type = "dir"
elif S_ISLNK(mode):
type = "link"
elif S_ISREG(mode):
type = "file"
stats.append((
entry,
type,
st.st_size,
st.st_atime,
st.st_mtime,
st.st_ctime
))
return stats
@login_required
def ls(request):
"""
List a directory on the server.
"""
dir = request.GET.get("dir", "")
root = os.path.relpath(os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH
))
fulldir = os.path.join(root, dir)
response = HttpResponse(mimetype="application/json")
simplejson.dump(entry_info(fulldir), response)
return response
@login_required
def explore(request):
"""
Browse the server file system.
"""
return render_to_response("filebrowser/explore.html",
{}, context_instance=RequestContext(request))
| {
"repo_name": "vitorio/ocropodium",
"path": "ocradmin/filebrowser/views.py",
"copies": "1",
"size": "2187",
"license": "apache-2.0",
"hash": -2592366683541191700,
"line_mean": 24.1379310345,
"line_max": 67,
"alpha_frac": 0.5939643347,
"autogenerated": false,
"ratio": 3.9193548387096775,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00019817677368212446,
"num_lines": 87
} |
# APPFS Ex8
# by Tam Tran
'''This program will generate the steiner tree, which is the minimum subgraph of G spanning all the terminal nodes'''
import sys
import networkx as nx
from sympy import sieve
import time
from resource import getrusage as resource_usage, RUSAGE_SELF
import heapq
def run(graphfile):
'''reads a graph file and prints stats on the steiner subgraph generated'''
# generate graph from file
G = nx.read_edgelist(graphfile, nodetype=int, data=(('weight',float),))
for u,v in G.edges():
if not G[u][v]: #removing edges w/o a weight
G.remove_edge(u,v) # the nodes will still remain tho
if G.neighbors(u) == []:
G.remove_node(u)
if G.neighbors(v) == []:
G.remove_node(v)
# set terminal nodes
V = list(G.nodes())
n = len(V)
if set(V) == set(list(range(1,n+1))):
T = list(sieve.primerange(1, n+1))
else:
T = []
for v in V:
if v in list(sieve.primerange(v, v+1)):
T.append(v)
# generate steiner tree
steinerG = gen_steiner(G,T)
print('original G -> no of edges:', G.size(), ', sum of edge weights', G.size(weight='weight'))
print('steinerG -> no of edges:', steinerG.size(), '(', format(100*steinerG.size()/float(G.size()), '.2f'), '%)', \
', sum of edge weights', steinerG.size(weight='weight'), '(', format(100*steinerG.size(weight='weight')/float(G.size(weight='weight')), '.2f'), '%)')
print('steinerG is connected:', nx.is_connected(steinerG))
print('steinerG is acyclic:', not bool(nx.cycle_basis(steinerG)))
def gen_steiner(G,T):
'''generates steiner subgraph based on graph G and list of terminal nodes T, \
based on m2: fixed loneT x all steinerNodes, with heapq instead of min()'''
# generate steiner tree
steinerG = nx.Graph()
steinerG.add_node(T[0]) # set root node
loneTerminals = list(set(T) - set(steinerG.nodes()))
while loneTerminals:
# just selecting an arbitrary loneT since they will all get added to steinerG anyways
# disclaimer: but then the edges that connect the Tnodes will differ
# loneT = loneTerminals[0] -> b15.gph 215; loneT = loneTerminals[-1] -> b15.gph 211
loneT = loneTerminals[-1]
# this is equivalent to a Dijkstra of loneT as root, to the nodes of steinerG
loneDistances = [] #unique for each loneT, hence it's ok to be generated each time
for steinerNode in steinerG.nodes():
length = nx.shortest_path_length(G, source=steinerNode, target=loneT, weight='weight')
heapq.heappush(loneDistances, (steinerNode, loneT, length))
mSrc,mDst, mLength = heapq.nsmallest(1, loneDistances, key=lambda x: (x[2], x[0]))[0] #bc it's a list
# add path of steinerG to loneT to steinerG
new_path = nx.shortest_path(G, mSrc, mDst, weight='weight') # mDst should just be loneT
for u,v in zip(new_path[:-1],new_path[1:]): # parse through list to obtain weight info
steinerG.add_edge(u,v, weight=G[u][v]['weight'])
loneTerminals.remove(mDst)
return steinerG
t0, s0 = time.time(), resource_usage(RUSAGE_SELF)
run(sys.argv[1])
s1, t1 = resource_usage(RUSAGE_SELF), time.time()
print("wall clock time", t1 - t0)
print("user clock time", s1.ru_stime - s0.ru_stime)
| {
"repo_name": "appfs/appfs",
"path": "Tam/exercise8/ex8documentation/ex8.py",
"copies": "4",
"size": "3423",
"license": "mit",
"hash": 2061045415334643500,
"line_mean": 39.75,
"line_max": 159,
"alpha_frac": 0.6231375986,
"autogenerated": false,
"ratio": 3.1723818350324375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.019122863058958887,
"num_lines": 84
} |
# APPFS Exercise 2
# by Tam Tran
# in Python3
# 2 goals:
# 1) read XML to extract desired information and write it into an CSV
# 2) validate the XML against its XSD schema
# DEFINE FUNCTIONS HERE
import sys, csv
from bs4 import BeautifulSoup as Soup
from lxml import etree
#import pdb
def read_XML(file_XML):
'''reads XML to extract desired information and outputs as list of lists'''
'''filters out values that don't match CSV format, like nan or non-numerics'''
# initialize
list_data = []
collect_date = ''
collect_hour = ''
collect_powervalue = ''
with open(file_XML, 'r') as f:
xmlsoup = Soup(f, 'lxml')
# now swirling through this soup to collect the pieces we want
# make sure to grab data in specified format
for gasday in xmlsoup.find_all('gasday'): #soup lowers cases so your search term needs to be of lower case as well
collect_date = gasday.attrs['date']
# TODO
# some formatting for date
# going one level down at a time from gasday to gradually collect the tree values that we want
for boundarynode in gasday.find_all('boundarynode'):
for time in boundarynode.find_all('time'):
collect_hour = time.attrs['hour'].zfill(2) #so numerics can fit HH format
if not collect_hour.isdigit():
continue
for power in time.find_all('amountofpower'):
collect_powervalue = power.attrs['value']
# making sure only numeric power values are collected, whether positive or negative
if (not collect_powervalue.isdigit()) and (not (collect_powervalue[0] == '-' and collect_powervalue[1:].isdigit())):
continue
# now that all desired data has been collected, concatenate it into one list per individual branch of the tree
list_data.append([collect_date, str(' ' + collect_hour), str(' ' + collect_powervalue)])
# could also just format the data as a string with ; in between, so that it will require less formatting in write_CSV()
# but then it would be too specific of a function, better to keep it general
#print(len(list_data))
return list_data # list of lists
def write_CSV(data, file_CSV):
'''input of list of lists to write to CSV'''
f = open(file_CSV, 'wt', encoding = 'utf-8')
writer = csv.writer(f, delimiter = ';')
#writer.writerow(['YYYY-MM-DD', ' HH', ' amountOfPower-Value'])
for sublist in data:
writer.writerow(sublist)
def validate_XML(file_XML, file_XSD):
'''validate that XML file follows XSD'''
xmlschema_doc = etree.parse(file_XSD)
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_doc = etree.parse(file_XML)
result = xmlschema.validate(xml_doc)
if result:
print('valid XML schema')
else:
raise ValueError('invalid XML schema')
# RUN FUNCTION CALLS HERE
input_XML = sys.argv[1]
output_CSV = sys.argv[2]
input_XSD = sys.argv[3]
data_extracted = read_XML(input_XML)
write_CSV(data_extracted, output_CSV)
validate_XML(input_XML, input_XSD)
# to validate that your written CSV will be read legibly
#ff = open(output_CSV, 'rt')
#reader = csv.reader(ff, delimiter = ';')
#for row in reader:
# print(row)
| {
"repo_name": "frodo4fingers/appfs",
"path": "Tam/exercise2code.py",
"copies": "3",
"size": "3582",
"license": "mit",
"hash": -1873519125606626300,
"line_mean": 29.1008403361,
"line_max": 143,
"alpha_frac": 0.598548297,
"autogenerated": false,
"ratio": 4.033783783783784,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.028085267270150142,
"num_lines": 119
} |
# APPFS Exercise 5
# by Tam Tran
# in Python3
# 3 goals:
# 1) read in graph as CSV and extract its edge information to distances between vertices
# 2) update the distance between the vertices anytime a shorter distance is found (shortest path)
# 3) return the longest shortest path
import sys
import pandas as pd
import numpy as np
def run(graphfile):
graph = pd.read_csv(graphfile, sep = " ", names = ['src', 'dst', 'weight']).drop(0)
# generate distances dataframe
unique_vertices = np.unique(np.append(graph['src'].unique(), graph['dst'].unique()))
distances = pd.DataFrame(unique_vertices, columns = ['vertex'])
distances['shortest_distance'] = [float('inf')] * distances.shape[0]
distances['previous_vertex'] = [None] * distances.shape[0]
# to keep track of the vertices that have been processed
visited = []
unvisited = list(distances['vertex'])
# setting the vertex 1 as our reference/starting vertex
s = 1
unvisited.remove(s)
visited.append(s)
distances.loc[distances.vertex == s, 'shortest_distance'] = 0
next_s = s
# run shortest path algorithm
while unvisited:
distances, next_s = update_distances(graph, distances, next_s, unvisited)
unvisited.remove(next_s)
visited.append(next_s)
# get longest shortest path
print("RESULT VERTEX", distances.loc[distances['shortest_distance'].argmax(), 'vertex'])
print("RESULT DIST", int(distances['shortest_distance'].max()))
def update_distances(graph, distances, s, unvisited):
print('s', s)
s_neighbors = list(graph[graph['src'] == s]['dst']) + list(graph[graph['dst'] == s]['src'])
for dnode in s_neighbors:
current_dist = distances[distances.vertex == dnode]['shortest_distance'].iloc[0]
s_weight = distances.loc[distances.vertex == s, 'shortest_distance']
edge_weight1 = graph.loc[(graph['src'] == s) & (graph['dst'] == dnode), 'weight']
edge_weight2 = graph.loc[(graph['dst'] == s) & (graph['src'] == dnode), 'weight']
if not edge_weight1.empty: edge_weight = edge_weight1.iloc[0]
elif not edge_weight2.empty: edge_weight = edge_weight2.iloc[0]
compare_dist = float(s_weight) + float(edge_weight)
if current_dist > compare_dist:
distances.loc[distances.vertex == dnode, ['shortest_distance', 'previous_vertex']] = compare_dist, s
# get the next_s that is still in unvisited
indices = distances['shortest_distance'].argsort() #returns list of indices of elements in ascending order
for i in indices:
if distances.iloc[i]['vertex'] in unvisited:
next_s = distances.iloc[i]['vertex']
break
return distances, next_s
graphfile = sys.argv[1]
run(graphfile)
| {
"repo_name": "FirstSanny/appfs",
"path": "Tam/ex5.py",
"copies": "2",
"size": "2851",
"license": "mit",
"hash": -3769262805365254700,
"line_mean": 35.0886075949,
"line_max": 112,
"alpha_frac": 0.6373202385,
"autogenerated": false,
"ratio": 3.7612137203166225,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.04255284620462263,
"num_lines": 79
} |
# app/ggcc/forms.py
# coding: utf-8
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField
from wtforms.validators import InputRequired, Length
class GGCCForm(FlaskForm):
"""
Formulario para gruposcaseros
"""
id = HiddenField("id")
id_direccion = HiddenField("idDir")
# Modelo GGCC
nombre_grupo = StringField(u'Nombre del Grupo Casero',
validators=[InputRequired(),
Length(min=1, max=60)])
descripcion_grupo = StringField(u'Descripción del Grupo Casero',
validators=[InputRequired(),
Length(min=0, max=200)])
submit = SubmitField(u'Aceptar')
class DireccionModalForm(FlaskForm):
# Modelo Direccion
tipo_via = StringField(u'Tipo de vía',
validators=[InputRequired(),
Length(min=1, max=20)])
nombre_via = StringField(u'Nombre de la vía',
validators=[InputRequired(),
Length(min=1, max=100)])
nro_via = StringField(u'Nro',
validators=[InputRequired(),
Length(min=1, max=10)])
portalescalotros_via = StringField(u'Portal/Esc/Otro')
piso_nroletra_via = StringField(u'Nro/Letra del Piso')
cp_via = StringField(u'CP',
validators=[InputRequired(),
Length(min=1, max=10)])
ciudad_via = StringField(u'Ciudad',
validators=[InputRequired(),
Length(min=1, max=50)])
provincia_via = StringField(u'Provincia',
validators=[InputRequired(),
Length(min=1, max=50)])
pais_via = StringField(u'País',
validators=[InputRequired(),
Length(min=1, max=50)])
submit = SubmitField(u'Crear Dirección')
class AsignacionMiembrosForm(FlaskForm):
"""
Formulario para la asignacion de personas a las
ggcc. Las personas tienen que ser miembros creados
"""
ids_in = HiddenField('Ids IN')
ids_out = HiddenField('Ids OUT')
submit = SubmitField(u'Aceptar')
| {
"repo_name": "originaltebas/chmembers",
"path": "app/ggcc/forms.py",
"copies": "1",
"size": "2480",
"license": "mit",
"hash": -5265926113339010000,
"line_mean": 34.9402985075,
"line_max": 72,
"alpha_frac": 0.5050505051,
"autogenerated": false,
"ratio": 4.118136439267887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5123186944367887,
"avg_score": null,
"num_lines": null
} |
# app/ggcc/views.py
# coding: utf-8
from flask import flash, jsonify
from flask import redirect, render_template, url_for, request
from flask_login import current_user, login_required
from sqlalchemy import func, or_
from app.ggcc import ggcc
from app.ggcc.forms import GGCCForm, DireccionModalForm, AsignacionMiembrosForm
from app import db
from app.models import GrupoCasero, Direccion, Miembro
from flask_paginate import Pagination, get_page_parameter
def check_edit_or_admin():
"""
Si no es admin o editor lo manda al inicio
"""
if not current_user.get_urole() >= 1:
return redirect(url_for("home.hub"))
@ggcc.route('/ggcc', methods=['GET'])
@login_required
def ver_ggcc():
"""
Ver una lista de todos los ggcc
"""
check_edit_or_admin()
flag_listar = True
nro_personas = db.session.query(Miembro.id_grupocasero,
func.count(Miembro.id_grupocasero)
.label('contar'))\
.group_by(Miembro.id_grupocasero).subquery()
query_ggcc = db.session.query(GrupoCasero)\
.join(Direccion,
GrupoCasero.id_direccion ==
Direccion.id)\
.outerjoin(nro_personas,
GrupoCasero.id ==
nro_personas.c.id_grupocasero)\
.add_columns(
GrupoCasero.id,
GrupoCasero.nombre_grupo,
GrupoCasero.descripcion_grupo,
Direccion.tipo_via,
Direccion.nombre_via,
Direccion.nro_via,
Direccion.portalescalotros_via,
Direccion.cp_via,
Direccion.ciudad_via,
Direccion.provincia_via,
Direccion.pais_via,
nro_personas.c.contar)
return render_template('ggcc/base_ggcc.html',
ggcc=query_ggcc, flag_listar=flag_listar)
@ggcc.route('/ggcc/crear', methods=['GET', 'POST'])
@login_required
def crear_gc():
"""
Agregar un GC a la Base de Datos
"""
check_edit_or_admin()
# Variable para el template. Para decirle si es Alta o Modif
flag_crear = True
flag_listar = False
form = GGCCForm()
if form.validate_on_submit():
obj_gc = GrupoCasero(nombre_grupo=form.nombre_grupo.data,
descripcion_grupo=form.descripcion_grupo.data,
id_direccion=form.id_direccion.data)
try:
db.session.add(obj_gc)
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
status = 'ok'
except Exception as e:
flash('Error: ' + str(e), 'danger')
status = 'ko'
url = url_for('ggcc.ver_ggcc')
return jsonify(status=status, url=url)
return render_template('ggcc/base_ggcc.html',
flag_crear=flag_crear,
flag_listar=flag_listar, form=form)
@ggcc.route('/ggcc/modificar/<int:id>',
methods=['GET', 'POST'])
@login_required
def modif_gc(id):
"""
Modificar un grupo casero
"""
check_edit_or_admin()
flag_crear = False
flag_listar = False
# lo hago por partes para actualizar más facil
# la dir si se crea una nueva
obj_gc = GrupoCasero.query.get_or_404(id)
if request.method == 'GET':
obj_dir = Direccion.query.get_or_404(obj_gc.id_direccion)
form_dir = DireccionModalForm(obj=obj_dir)
# Instancio el formulario si pasarle ningún dato para
# luego contectarlo a mano
form_gc = GGCCForm(obj=obj_gc)
if form_gc.validate_on_submit():
obj_gc.nombre_grupo = form_gc.nombre_grupo.data,
obj_gc.descripcion_grupo = form_gc.descripcion_grupo.data
obj_gc.id_direccion = form_gc.id_direccion.data
try:
# confirmo todos los datos en la db
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
status = 'ok'
except Exception as e:
flash('Error: ' + str(e), 'danger')
status = 'ko'
url = url_for('ggcc.ver_ggcc')
return jsonify(status=status, url=url)
return render_template(
'ggcc/base_ggcc.html', flag_crear=flag_crear,
flag_listar=flag_listar, form_gc=form_gc, form_dir=form_dir)
@ggcc.route('/ggcc/borrar/<int:id>',
methods=['GET'])
@login_required
def borrar_gc(id):
"""
Borrar un gc
"""
check_edit_or_admin()
obj_gc = GrupoCasero.query.get_or_404(id)
try:
db.session.delete(obj_gc)
db.session.commit()
flash('Has borrado los datos correctamente.', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
return redirect(url_for('ggcc.ver_ggcc'))
@ggcc.route('/ggcc/asignar', methods=['GET'])
@login_required
def ver_ggcc_asignar():
"""
Asignar miembros a un grupo casero
"""
check_edit_or_admin()
flag_listar = True
nro_personas = db.session.query(Miembro.id_grupocasero,
func.count(Miembro.id_grupocasero)
.label('contar'))\
.group_by(Miembro.id_grupocasero).subquery()
query_ggcc = db.session.query(GrupoCasero)\
.join(Direccion,
GrupoCasero.id_direccion ==
Direccion.id)\
.outerjoin(nro_personas,
GrupoCasero.id ==
nro_personas.c.id_grupocasero)\
.add_columns(
GrupoCasero.id,
GrupoCasero.nombre_grupo,
GrupoCasero.descripcion_grupo,
Direccion.tipo_via,
Direccion.nombre_via,
Direccion.nro_via,
Direccion.portalescalotros_via,
Direccion.cp_via,
Direccion.ciudad_via,
Direccion.provincia_via,
Direccion.pais_via,
nro_personas.c.contar)
return render_template('ggcc/base_ggcc_asignar.html',
ggcc=query_ggcc,
flag_listar=flag_listar)
@ggcc.route('/ggcc/asignar/miembros/<int:id>',
methods=['GET', 'POST'])
@login_required
def asignar_miembros(id):
"""
Asignar miembros a un grupo casero
"""
check_edit_or_admin()
flag_listar = False
FormMiembros = AsignacionMiembrosForm()
if request.method == 'GET':
obj_gc = GrupoCasero.query.get_or_404(id)
form_gc = GGCCForm(obj=obj_gc)
obj_miembros_in = db.session.query(Miembro.id, Miembro.nombres,
Miembro.apellidos)\
.filter(Miembro.id_grupocasero == id)\
.all()
obj_miembros_out = db.session.query(Miembro.id, Miembro.nombres,
Miembro.apellidos)\
.filter(or_(
Miembro.id_grupocasero == 0,
Miembro.id_grupocasero.is_(None)))\
.all()
# genero una cadena de ids con los datos de los miembros
# incluídos para guardarlos en un Hidden Field
FormMiembros.ids_in.data = ""
for idm in obj_miembros_in:
FormMiembros.ids_in.data = str(FormMiembros.ids_in.data
) + str(idm.id) + ","
# genero una cadena de ids con los datos de los miembros
# incluídos para guardarlos en un Hidden Field
FormMiembros.ids_out.data = ""
for idm in obj_miembros_out:
FormMiembros.ids_out.data = str(FormMiembros.ids_out.data)\
+ str(idm.id) + ","
return render_template('ggcc/base_ggcc_asignar.html',
form_gc=form_gc,
miembros_in=obj_miembros_in,
miembros_out=obj_miembros_out,
flag_listar=flag_listar,
FormMiembros=FormMiembros)
elif FormMiembros.validate_on_submit():
ids_in = FormMiembros.ids_in.data[:-1].split(",")
ids_out = FormMiembros.ids_out.data[:-1].split(",")
obj_in = Miembro.query.filter(Miembro.id.in_(ids_in))
obj_out = Miembro.query.filter(
or_(Miembro.id.in_(ids_out),
Miembro.id.is_(None)))
# Para borrar las relaciones de los antiguos
for o in obj_out:
o.id_grupocasero = None
# Para agregar a los recien asignados
for m in obj_in:
m.id_grupocasero = id
try:
db.session.commit()
flash('Has guardado los datos correctamente', 'success')
except Exception as e:
flash('Error: ' + str(e), 'danger')
url = url_for('ggcc.ver_ggcc_asignar')
return jsonify(url=url)
else:
# Es Post pero no pasa el validate.
flash('Los datos de miembros no han podido modificarse', 'danger')
return redirect(url_for('ggcc.ver_ggcc_asignar'))
url = url_for('ggcc.ver_ggcc_asignar')
return jsonify(url=url)
@ggcc.route('/direcciones/loadFormNueva')
@login_required
def cargarForm_direccionblanco():
check_edit_or_admin()
form = DireccionModalForm(prefix="m_")
return render_template('ggcc/_modal_direccion_agregar.html', form=form)
@ggcc.route('/direcciones/loadFormUsar')
@login_required
def cargarForm_direcciones():
check_edit_or_admin()
search = False
q = request.args.get('q')
if q:
search = True
page = request.args.get(get_page_parameter(), type=int, default=1)
nro_dirs = db.session.query(Direccion).count()
query_dir = Direccion.query.offset(((page-1)*10)).limit(10)
pagination = Pagination(page=page, total=nro_dirs,
search=search, record_name='query_dir',
css_framework='bootstrap4')
return render_template('ggcc/_modal_direccion_usar.html',
direcciones=query_dir,
pagination=pagination)
@ggcc.route('/direcciones/loadFormMisma/<int:id>')
@login_required
def cargar_direccion(id):
check_edit_or_admin()
query = Direccion.query.get_or_404(id)
form = DireccionModalForm(obj=query)
return render_template('ggcc/_modal_direccion_misma.html', form=form)
@ggcc.route('/direcciones/loadDir/<int:id>')
@login_required
def cargar_Direccion(id):
check_edit_or_admin()
query = Direccion.query.get_or_404(id)
form = DireccionModalForm(obj=query)
return render_template('ggcc/_sub_direccion.html', form=form)
@ggcc.route('/direcciones/creardireccion', methods=['POST'])
@login_required
def crear_nuevadir():
check_edit_or_admin()
form = DireccionModalForm(prefix="m_")
if form.validate_on_submit():
obj_dir = Direccion(
tipo_via=form.tipo_via.data,
nombre_via=form.nombre_via.data,
nro_via=form.nro_via.data,
portalescalotros_via=form.portalescalotros_via.data,
piso_nroletra_via=form.piso_nroletra_via.data,
cp_via=form.cp_via.data,
ciudad_via=form.ciudad_via.data,
provincia_via=form.provincia_via.data,
pais_via=form.pais_via.data)
try:
db.session.add(obj_dir)
db.session.flush()
dirid = obj_dir.id
db.session.commit()
return jsonify(status='ok', id=dirid)
except Exception as e:
return jsonify(status='ko'+str(e))
else:
errores = []
# no se ha validado correctamente
for field, errors in form.errors.items():
for error in errors:
errores.append((getattr(form, field).name))
return jsonify(status='v_error', errores=errores)
@ggcc.route('/direcciones/modifdiractual/<int:id>', methods=['POST'])
@login_required
def modif_diractual(id):
check_edit_or_admin()
obj_dir = Direccion.query.get_or_404(id)
form_dir = DireccionModalForm()
if form_dir.validate_on_submit():
obj_dir.tipo_via = form_dir.tipo_via.data,
obj_dir.nombre_via = form_dir.nombre_via.data,
obj_dir.nro_via = form_dir.nro_via.data,
obj_dir.portalescalotros_via = form_dir.portalescalotros_via.data,
obj_dir.piso_nroletra_via = form_dir.piso_nroletra_via.data,
obj_dir.cp_via = form_dir.cp_via.data,
obj_dir.ciudad_via = form_dir.ciudad_via.data,
obj_dir.provincia_via = form_dir.provincia_via.data,
obj_dir.pais_via = form_dir.pais_via.data
try:
db.session.commit()
return jsonify(status='ok', id=id)
except Exception as e:
return jsonify(status='ko'+str(e))
else:
errores = []
# no se ha validado correctamente
for field, errors in form_dir.errors.items():
for error in errors:
errores.append((getattr(form_dir, field).name))
return jsonify(status='v_error', errores=errores)
| {
"repo_name": "originaltebas/chmembers",
"path": "app/ggcc/views.py",
"copies": "1",
"size": "14704",
"license": "mit",
"hash": -6348897118569564000,
"line_mean": 34.476426799,
"line_max": 79,
"alpha_frac": 0.5153741497,
"autogenerated": false,
"ratio": 3.4571966133584198,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.447257076305842,
"avg_score": null,
"num_lines": null
} |
# app/home/views.py
from flask import Blueprint, render_template
home = Blueprint(
'home',
__name__,
template_folder='templates',
static_folder='static'
)
@home.route('/')
def index():
return render_template('home.html')
@home.route('/developers')
def developers():
developers = [
{
'name': 'Brasil',
'github': 'https://github.com/raphaabrasil',
'image': 'img/developers/brasil.jpg',
},
{
'name': 'Darlene',
'github': 'https://github.com/darlenedms',
'image': 'img/developers/darlene.jpg',
},
{
'name': 'Hugo',
'github': 'https://github.com/hugoantunes',
'image': 'img/developers/hugo.jpg',
},
{
'name': 'Lusac',
'github': 'https://github.com/lusac/',
'image': 'img/developers/lusac.jpg',
},
{
'name': 'Tarsis',
'github': 'https://github.com/tarsisazevedo',
'image': 'img/developers/tarsis.jpg',
},
]
return render_template('developers.html', developers=developers)
| {
"repo_name": "lusac/coders",
"path": "coders/home/views.py",
"copies": "1",
"size": "1164",
"license": "mit",
"hash": 2523670560307702000,
"line_mean": 23.7659574468,
"line_max": 68,
"alpha_frac": 0.5042955326,
"autogenerated": false,
"ratio": 3.5925925925925926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4596888125192593,
"avg_score": null,
"num_lines": null
} |
# app/home/views.py
# coding: utf-8
from flask_login import current_user, login_required, user_logged_in
from flask import redirect, url_for, render_template
from app.home import home
from app import db
from app.models import Miembro, Familia, GrupoCasero
from sqlalchemy import and_, func
@home.route('/')
def homepage():
"""
Pagina de Inicio / Bienvenida
"""
# si no está logado mandar a login
if not user_logged_in:
return redirect(url_for('auth.login'))
else:
return redirect(url_for('home.hub'))
@home.route('/noaccess')
@login_required
def noaccess():
"""
Pagina de Usuario NO ADMIN & NO EDITOR logado. Por ahora no muestra nada.
Solo un mensaje diciendo que no es admin
"""
return render_template('home/index_noaccess.html')
@home.route('/hub')
@login_required
def hub():
"""
Redirige al dashboard correcto segun rol
"""
if (current_user.get_urole() == 2):
return redirect(url_for('home.dashboard_admin'))
elif (current_user.get_urole() == 1):
return redirect(url_for('home.dashboard_editor'))
else:
return redirect(url_for('home.noaccess'))
@home.route('/dashboard_admin', methods=['GET', 'POST'])
@login_required
def dashboard_admin():
"""
Pagina de panel de control de administracion
Esta es la pagina principal de donde se va a todos lados
"""
# prevent non-admins from accessing the page
if not current_user.is_admin():
return redirect(url_for('home.noaccess'))
panel_p = datos_personas()
panel_f = datos_familias()
panel_gc = datos_gruposcaseros()
return render_template('home/index_admin.html',
panel_p=panel_p, panel_f=panel_f,
panel_gc=panel_gc)
@home.route('/dashboard_editor')
@login_required
def dashboard_editor():
"""
Pagina de panel de control de edicion
restringe el acceso a cosas privadas de las personas como el seguimiento
"""
if not current_user.get_urole() == 1:
return redirect(url_for('home.noaccess'))
panel_p = datos_personas()
panel_f = datos_familias()
panel_gc = datos_gruposcaseros()
return render_template('home/index_editor.html',
panel_p=panel_p, panel_f=panel_f,
panel_gc=panel_gc)
def datos_personas():
total_personas = db.session.query(Miembro).count()
total_miembros = db.session.query(Miembro)\
.filter(Miembro.id_tipomiembro == 1).count()
total_asistentes = db.session.query(Miembro)\
.filter(Miembro.id_tipomiembro == 2).count()
total_otros = db.session.query(Miembro)\
.filter(and_(Miembro.id_tipomiembro != 1,
Miembro.id_tipomiembro != 2)).count()
return [total_personas, total_miembros, total_asistentes, total_otros]
def datos_familias():
from datetime import date
today = date.today()
total_familias = db.session.query(Familia).count()
total_adultos = db.session.query(Miembro)\
.filter(func.datediff(today, Miembro.fecha_nac) > 25)\
.count()
total_jovenes = db.session.query(Miembro)\
.filter(and_(func.datediff(today, Miembro.fecha_nac) <= 25,
func.datediff(today, Miembro.fecha_nac) >= 13))\
.count()
total_ninios = db.session.query(Miembro)\
.filter(func.datediff(today, Miembro.fecha_nac) < 13)\
.count()
return [total_familias, total_adultos, total_jovenes, total_ninios]
def datos_gruposcaseros():
total_gc = db.session.query(GrupoCasero).count()
total_personas = db.session.query(Miembro)\
.filter(Miembro.id_grupocasero.isnot(None)).count()
grupos = db.session.query(GrupoCasero.nombre_grupo)\
.outerjoin(Miembro,
GrupoCasero.id ==
Miembro.id_grupocasero)\
.add_columns(func.count(Miembro.id))\
.group_by(GrupoCasero.nombre_grupo)\
.all()
print("-----------", grupos)
return [total_gc, total_personas, grupos]
| {
"repo_name": "originaltebas/chmembers",
"path": "app/home/views.py",
"copies": "1",
"size": "4472",
"license": "mit",
"hash": -5548658753815193000,
"line_mean": 29.7092198582,
"line_max": 83,
"alpha_frac": 0.5752628047,
"autogenerated": false,
"ratio": 3.3069526627218937,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4382215467421894,
"avg_score": null,
"num_lines": null
} |
# app imports
from oweb.models import Supply1, Supply2, Supply3, Supply4, Supply12, Station14, Station15, Civil212, Research113, Research122
from oweb.libs.production import get_metal_production, get_crystal_production, get_deuterium_production, get_plasma_bonus, get_capacity, get_sat_production, get_energy_production
from oweb.libs.costs import costs_onepointfive, costs_onepointsix, costs_two
from oweb.libs.shortcuts import get_object_or_404
def get_mse(resources, trade):
"""Returns the equivalent of a given resource tuple
:param resources: A resource tuple
:type resources: tuple
:param trade: A tuple with the account's trading rates
:type trade: tuple
:returns: int -- resources in MSE
"""
mse = resources[0]
mse = mse + (trade[0] / float(trade[1]) * resources[1])
mse = mse + (trade[0] / float(trade[2]) * resources[2])
return int(mse)
def queue_item(id, name, level,
next_cost, next_prod, this_prod, trade,
this_capacity, next_capacity, next_cap_cost, next_cap_time,
required_energy, required_sats,
planet):
"""Returns a queue item, which is a tuple of related values in a specific order
:param id: The ID of this item
:type id: int
:param name: The name of this item
:type name: string
:param level: The level of this item
:type level: int
:param next_cost: The resource tuple with the items costs
:type next_cost: tuple
:param next_prod: This items production in MSE
:type next_prod: int
:param this_prod: Production of the items current level in MSE
:type this_prod: int
:param trade: A tuple with the account's trading rates
:type trade: tuple
:param this_capacity: The current capacity of the planet. Do not have to be set for researches.
:type this_capacity: int
:param next_capacity: The capacity, if Robotics of Nanites are build up
:type next_capacity: int
:param next_cap_cost: The costs for the build up in MSE
:type next_cap_cost: int
:param next_cap_time: The time to build up the Robotics/Nanites
:type next_cap_time: int
:param required_energy: The required energy of this item
:type required_energy: int
:param required_sats: The number of satellites needed to even the energy need
:type required_sats: int
:param planet: The planet of this item (can be used to assign a Research, too)
:type planet: Planet object
:returns: tuple -- A queue item
"""
# init some vars...
# This catches some issues with Plasma research
need_capacity = 0
this_build_time = 0
# calculate MSE
next_cost_mse = get_mse(next_cost, trade)
# calc production gain
gain = next_prod - this_prod
# calc amortisation time
try:
amortisation = next_cost_mse / float(gain)
except ZeroDivisionError:
amortisation = 0
# calculate building time
if this_capacity and next_capacity:
ress = next_cost[0] + next_cost[1] + next_cost[2]
this_build_time = ress / float(this_capacity)
next_build_time = ress / float(next_capacity)
cap_bonus = (this_build_time - (next_cap_time + next_build_time)) * gain
if cap_bonus < next_cap_cost:
need_capacity = 0
else:
need_capacity = 1
# determine score
try:
score = int(next_cost_mse / gain)
except ZeroDivisionError:
score = 1000000000000
# normalize required sats
if required_sats < 0:
required_sats = 0
return (score,
required_sats,
this_build_time,
need_capacity,
{
'id': id,
'name': name,
'level': level,
'gain': gain,
'required_energy': -1 * required_energy,
'planet': planet,
})
def get_planet_queue(planet,
speed=None,
trade=None,
supply1=None,
supply2=None,
supply3=None,
supply4=None,
supply12=None,
station14=None,
station15=None,
civil212=None,
research113=None):
"""Returns the queue for a given planet
:param planet: The planet in question
:type planet: Planet object
:param speed: The account's speed (Default: None)
:type speed: int
:param trade: The account's trading rates (Default: None)
:type trade: tuple
:param supply1: This planet's metal mine (default: None)
:type supply1: Supply1 object
:param supply2: This planet's crystal mine (default: None)
:type supply2: Supply2 object
:param supply3: This planet's deuterium synthesizer (default: None)
:type supply3: Supply3 object
:param supply4: This planet's solar plant (default: None)
:type supply4: Supply4 object
:param supply12: This planet's fusion plant (default: None)
:type supply12: Supply12 object
:param station14: This planet's robotics factory (default: None)
:type station14: Station14 object
:param station15: This planet's nanite factory (default: None)
:type station15: Station15 object
:param civil212: This planet's solar satellites (default: None)
:type civil212: Civil212 object
:param research113: This account's energy technology (default: None)
:type research113: Research113 object
:returns: list -- The build queue of a planet
Most of the parameters of this function are optional, but necessary. If
they are not specified while calling this function, they will be fetched.
"""
# Metal
if not supply1:
supply1 = get_object_or_404(Supply1, astro_object=planet.id)
# Crystal
if not supply2:
supply2 = get_object_or_404(Supply2, astro_object=planet.id)
# Deut
if not supply3:
supply3 = get_object_or_404(Supply3, astro_object=planet.id)
# Solar
if not supply4:
supply4 = get_object_or_404(Supply4, astro_object=planet.id)
# Fusion
if not supply12:
supply12 = get_object_or_404(Supply12, astro_object=planet.id)
# Robo
if not station14:
station14 = get_object_or_404(Station14, astro_object=planet.id)
# Nani
if not station15:
station15 = get_object_or_404(Station15, astro_object=planet.id)
# Sat
if not civil212:
civil212 = get_object_or_404(Civil212, astro_object=planet.id)
# Energy
if not research113:
research113 = get_object_or_404(Research113, account=planet.account.id)
# account speed
if not speed:
speed = planet.account.speed
# trade rates
if not trade:
trade = (planet.account.trade_metal, planet.account.trade_crystal, planet.account.trade_deut)
# calculate current metal production
this_metal_prod = get_metal_production(supply1.level, speed=speed)
this_metal_prod_mse = get_mse(
this_metal_prod,
trade
)
# calculate current crystal production
this_crystal_prod = get_crystal_production(supply2.level, speed=speed)
this_crystal_prod_mse = get_mse(
this_crystal_prod,
trade
)
# calculate current deuterium production
this_deut_prod = get_deuterium_production(supply3.level, temp=planet.max_temp, speed=speed)
this_deut_prod_mse = get_mse(
this_deut_prod,
trade
)
# calculate planet's energy
sol, fus, sat = get_energy_production(
supply4.level,
supply12.level,
civil212.count,
temp=planet.max_temp,
energy=research113.level,
max_performance=True)
planet_energy = tuple(sum(x) for x in zip(sol, fus, sat))[3]
planet_energy += this_metal_prod[3]
planet_energy += this_crystal_prod[3]
planet_energy += this_deut_prod[3]
# how much can one sat produce?
sat_prod = get_sat_production(1, temp=planet.max_temp)[3]
# calculate current capacity (ress per hour)
this_capacity = get_capacity(station14.level, station15.level, speed)
if station14.level > 9:
next_capacity = get_capacity(station14.level, station15.level + 1, speed)
next_cap_cost = costs_two(station15.base_cost, station15.level, offset=1)
else:
next_capacity = get_capacity(station14.level + 1, station15.level, speed)
next_cap_cost = costs_two(station14.base_cost, station14.level, offset=1)
next_cap_cost_mse = get_mse(next_cap_cost, trade)
next_cap_time = (next_cap_cost[0] + next_cap_cost[1] + next_cap_cost[2]) / float(this_capacity)
queue = []
for i in range(1, 6):
# *** Metal Mine
# cost of this level
next_cost = costs_onepointfive(supply1.base_cost, supply1.level, offset=i)
# production of this level
next_metal_prod = get_metal_production(supply1.level + i, speed=speed)
next_metal_prod_mse = get_mse(
next_metal_prod,
trade
)
# energy (number of sats to determine the energy consumption)
this_energy = -1 * (planet_energy + next_metal_prod[3] - this_metal_prod[3])
required_sats = this_energy / sat_prod
queue.append(queue_item(
supply1.id,
supply1.name,
supply1.level + i,
next_cost,
next_metal_prod_mse,
this_metal_prod_mse,
trade,
this_capacity,
next_capacity,
next_cap_cost_mse,
next_cap_time,
this_energy,
required_sats,
planet))
this_metal_prod_mse = next_metal_prod_mse
# *** Crystal Mine
# cost of this level
next_cost = costs_onepointsix(supply2.base_cost, supply2.level, offset=i)
# production of this level
next_crystal_prod = get_crystal_production(supply2.level + i, speed=speed)
next_crystal_prod_mse = get_mse(
next_crystal_prod,
trade
)
# energy (number of sats to determine the energy consumption)
this_energy = -1 * (planet_energy + next_crystal_prod[3] - this_crystal_prod[3])
required_sats = this_energy / sat_prod
queue.append(queue_item(
supply2.id,
supply2.name,
supply2.level + i,
next_cost,
next_crystal_prod_mse,
this_crystal_prod_mse,
trade,
this_capacity,
next_capacity,
next_cap_cost_mse,
next_cap_time,
this_energy,
required_sats,
planet))
this_crystal_prod_mse = next_crystal_prod_mse
# *** Deuterium Synthesizer
# cost of this level
next_cost = costs_onepointfive(supply3.base_cost, supply3.level, offset=i)
# production of this level
next_deut_prod = get_deuterium_production(supply3.level + i, temp=planet.max_temp, speed=speed)
next_deut_prod_mse = get_mse(
next_deut_prod,
trade
)
# energy (number of sats to determine the energy consumption)
this_energy = -1 * (planet_energy + next_deut_prod[3] - this_deut_prod[3])
required_sats = this_energy / sat_prod
queue.append(queue_item(
supply3.id,
supply3.name,
supply3.level + i,
next_cost,
next_deut_prod_mse,
this_deut_prod_mse,
trade,
this_capacity,
next_capacity,
next_cap_cost_mse,
next_cap_time,
this_energy,
required_sats,
planet))
this_deut_prod_mse = next_deut_prod_mse
queue.sort()
return queue
def get_plasma_queue(account, research122=None, production=(0, 0, 0, 0)):
"""Returns the queue of plasma technology
:param account: The account in question
:type account: Account object
:param research122: The Plasma Technology (default: None)
:type research122: Research122 object
:param production: The current production (default: (0, 0, 0, 0))
:type production: tuple
:returns: list -- the plasma queue
"""
if not research122:
research122 = get_object_or_404(Research122, account=account.id)
trade = (account.trade_metal, account.trade_crystal, account.trade_deut)
this_prod = get_mse(
get_plasma_bonus(research122.level, production[0], production[1]),
trade
)
queue = []
for i in range(1, 6):
next_cost = costs_two(research122.base_cost, research122.level, offset=i)
next_prod = get_mse(
get_plasma_bonus(research122.level + i, production[0], production[1]),
trade
)
queue.append(queue_item(
research122.id,
research122.name,
research122.level + i,
next_cost,
next_prod,
this_prod,
trade,
None,
None,
None,
None,
0,
0,
account))
this_prod = next_prod
return queue
| {
"repo_name": "Mischback/django-oweb",
"path": "oweb/libs/queue.py",
"copies": "1",
"size": "13231",
"license": "mit",
"hash": 620875766741333600,
"line_mean": 33.4557291667,
"line_max": 178,
"alpha_frac": 0.6086463608,
"autogenerated": false,
"ratio": 3.589527943570266,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9691557081806228,
"avg_score": 0.0013234445128077193,
"num_lines": 384
} |
# app imports
from oweb.models.planet import Moon
from oweb.models.building import Building, Supply1, Supply2, Supply3, Supply4, Supply12
from oweb.models.defense import Defense
from oweb.models.ship import Ship, Civil202, Civil203, Civil208, Civil210, Civil212
from oweb.models.research import Research
from oweb.libs.shortcuts import get_list_or_404
def get_planet_points(planet):
buildings = get_list_or_404(Building, astro_object=planet)
defense = get_list_or_404(Defense, astro_object=planet)
other_points = 0
production_points = 0
defense_points = 0
for b in buildings:
this_building_cost = b.get_total_cost()
this_building_points = this_building_cost[0] + this_building_cost[1] + this_building_cost[2]
if b.content_type.model_class() in [Supply1, Supply2, Supply3, Supply4, Supply12]:
production_points += this_building_points
else:
other_points += this_building_points
for d in defense:
this_defense = d.as_real_class()
this_defense_points = this_defense.count * (this_defense.cost[0] + this_defense.cost[1] + this_defense.cost[2])
defense_points += this_defense_points
try:
moon = Moon.objects.get(planet=planet)
moon_points, moon_buildings, moon_defense = get_moon_points(moon)
except Moon.DoesNotExist:
moon_points = 0
moon_buildings = 0
moon_defense = 0
planet_points = production_points + other_points + defense_points + moon_points
return planet_points, production_points, other_points, defense_points, moon_points, moon_buildings, moon_defense
def get_moon_points(moon):
buildings = get_list_or_404(Building, astro_object=moon)
defense = get_list_or_404(Defense, astro_object=moon)
other_points = 0
defense_points = 0
for b in buildings:
this_building_cost = b.get_total_cost()
this_building_points = this_building_cost[0] + this_building_cost[1] + this_building_cost[2]
other_points += this_building_points
for d in defense:
this_defense = d.as_real_class()
this_defense_points = this_defense.count * (this_defense.cost[0] + this_defense.cost[1] + this_defense.cost[2])
defense_points += this_defense_points
return other_points + defense_points, other_points, defense_points
def get_ship_points(account):
ships = get_list_or_404(Ship, account=account)
civil_points = 0
military_points = 0
for s in ships:
this_ship = s.as_real_class()
this_ship_points = this_ship.count * (this_ship.cost[0] + this_ship.cost[1] + this_ship.cost[2])
if s.content_type.model_class() in [Civil202, Civil203, Civil208, Civil210, Civil212]:
civil_points += this_ship_points
else:
military_points += this_ship_points
ship_points = civil_points + military_points
return ship_points, civil_points, military_points
def get_research_points(account):
research = get_list_or_404(Research, account=account)
research_points = 0
for r in research:
this_research_cost = r.get_total_cost()
research_points += this_research_cost[0] + this_research_cost[1] + this_research_cost[2]
return research_points
| {
"repo_name": "Mischback/django-oweb",
"path": "oweb/libs/points.py",
"copies": "1",
"size": "3265",
"license": "mit",
"hash": 1011170595512014300,
"line_mean": 33.0104166667,
"line_max": 119,
"alpha_frac": 0.6732006126,
"autogenerated": false,
"ratio": 3.1124880838894184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42856886964894186,
"avg_score": null,
"num_lines": null
} |
"""App Info handler."""
import logging
import six
from tornado.web import RequestHandler
from tornado.httpclient import AsyncHTTPClient, HTTPError
from tornado.escape import json_encode, json_decode
from tornado import gen
str = six.text_type
log = logging.getLogger('tornadoappinfo.handlers')
class InfoHandler(RequestHandler):
def initialize(self):
self.client = AsyncHTTPClient()
@gen.coroutine
def get(self):
app = self.application
info = app.info
deps = app.info_dependencies
log.debug("Getting application information from {} collectors "
"and {} dependent services.".format(len(app.info),
len(deps or {})))
if deps is not None:
info['dependencies'] = yield self._get_dependencies()
self.write(json_encode(info))
@gen.coroutine
def _get_dependencies(self):
app = self.application
deps = {}
for dep, url in app.info_dependencies.items():
log.info(
"Fetching info about '{}' service @ '{}'".format(dep, dep))
try:
resp = yield self.client.fetch(url)
except HTTPError as http_err:
log.error(
"Dependent service '{}' error @ '{}': {}"
"".format(dep, url, http_err))
resp = http_err.response
if resp:
deps[dep] = {
'code': resp.code,
'error': resp.body.decode()
}
else:
deps[dep] = {
'error': str(http_err)
}
except Exception as err:
log.error(
"Dependent service '{}' connection error @ '{}': {}"
"".format(dep, url, err))
deps[dep] = {'error': str(err)}
else:
deps[dep] = json_decode(resp.body)
finally:
deps[dep]['url'] = url
raise gen.Return(deps)
| {
"repo_name": "adamkal/tornado-app-info",
"path": "tornadoappinfo/handlers.py",
"copies": "1",
"size": "2129",
"license": "mit",
"hash": 9053286779728882000,
"line_mean": 27.0131578947,
"line_max": 75,
"alpha_frac": 0.4899013621,
"autogenerated": false,
"ratio": 4.731111111111111,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5721012473211111,
"avg_score": null,
"num_lines": null
} |
# app/__init__.py
from flask import Flask, render_template, jsonify
from flask_nav import Nav, register_renderer
from flask_nav.elements import *
from flask_bootstrap.nav import BootstrapRenderer
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import MetaData
from flask_login import LoginManager, current_user
from flask_migrate import Migrate
from flask_bootstrap import Bootstrap
from .default_settings import configs
# from flask_httpauth import HTTPBasicAuth
from .nav import configure_nav
convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
login_manager = LoginManager()
def create_app(config):
app = Flask(__name__, static_folder='static', template_folder='templates', instance_relative_config=True)
# Pull config
if config not in configs.keys():
raise ValueError('Config does not exists')
app.config.from_object(configs[config])
# pull in the instance config.
if config != 'test':
app.config.from_pyfile('instance.cfg') # Should only contain info related to DB credentials and other server specific code that shouldn't go on github
login_manager.init_app(app)
login_manager.login_message = "Access Denied: Log in"
login_manager.login_view = 'auth.login'
Bootstrap(app)
configure_nav(app)
# Initialize the models/other stuff here
# Import the blueprints for views and such.
# e.g. from .views.view import blueprint
from .views.admin import admin
from .views.auth import auth
from .views.application import application
from .views.api import api
from .views.user import user
app.register_blueprint(admin)
app.register_blueprint(auth)
app.register_blueprint(application)
app.register_blueprint(api)
app.register_blueprint(user)
# Initialize database and alembic for data migrations
db.init_app(app)
migrate = Migrate(app, db)
#Implement some custom error routes that app will know about
@app.errorhandler(401)
def internalservererror(error):
if request.accept_mimetypes.accept_json and not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'Internal server error'})
response.status_code = 401
return response
return render_template('errors/401.html'), 401
@app.errorhandler(403)
def forbidden(error):
if request.accept_mimetypes.accept_json and not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'Action is forbidden, check the syntax of your api request'})
response.status_code = 403
return response
return render_template('errors/403.html'), 403
@app.errorhandler(404)
def notfound(error):
if request.accept_mimetypes.accept_json and not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'Not found'})
response.status_code = 404
return response
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def internalservererror(error):
if request.accept_mimetypes.accept_json and not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'Internal server error'})
response.status_code = 500
return response
return render_template('errors/500.html'), 500
from app import models
return app
| {
"repo_name": "jacobsky/shopping-aggregator",
"path": "shopping-aggregator/app/__init__.py",
"copies": "1",
"size": "3371",
"license": "apache-2.0",
"hash": -6227787028023402000,
"line_mean": 31.4134615385,
"line_max": 152,
"alpha_frac": 0.7522990211,
"autogenerated": false,
"ratio": 3.436289500509684,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9579684669758057,
"avg_score": 0.0217807703703254,
"num_lines": 104
} |
# app/__init__.py
#################
#### imports ####
#################
import os
from flask import Flask, render_template
from flask_login import LoginManager
from flask_bcrypt import Bcrypt
from flask_debugtoolbar import DebugToolbarExtension
from flask_bootstrap import Bootstrap
bcrypt = Bcrypt()
from models import db
################
#### config ####
################
def create_app():
app = Flask(__name__)
app.config.from_object(os.environ['APP_SETTINGS'])
####################
#### extensions ####
####################
login_manager = LoginManager()
login_manager.init_app(app)
bcrypt.init_app(app)
toolbar = DebugToolbarExtension(app)
bootstrap = Bootstrap(app)
db.init_app(app)
###################
### blueprints ####
###################
from mod_main.views import main_blueprint
from mod_user.views import user_blueprint
from mod_inventory.views import inventory_blueprint
from mod_api.resources import api_module
app.register_blueprint(user_blueprint)
app.register_blueprint(main_blueprint)
app.register_blueprint(inventory_blueprint)
app.register_blueprint(api_module)
###################
### flask-login ####
###################
from models import User
login_manager.login_view = "user.login"
login_manager.login_message_category = 'danger'
@login_manager.user_loader
def load_user(user_id):
return User.query.filter(User.id == int(user_id)).first()
########################
#### error handlers ####
########################
@app.errorhandler(403)
def forbidden_page(error):
return render_template("errors/403.html"), 403
@app.errorhandler(404)
def page_not_found(error):
return render_template("errors/404.html"), 404
@app.errorhandler(500)
def server_error_page(error):
return render_template("errors/500.html"), 500
return app
| {
"repo_name": "paris3200/flask-inventory",
"path": "app/__init__.py",
"copies": "2",
"size": "1834",
"license": "mit",
"hash": 4404304111102660600,
"line_mean": 19.606741573,
"line_max": 62,
"alpha_frac": 0.6275899673,
"autogenerated": false,
"ratio": 3.5611650485436894,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5188755015843689,
"avg_score": null,
"num_lines": null
} |
"""App is defined here"""
# pylint: disable=invalid-name
import traceback
from flask import jsonify
from flask_cors import CORS
import connexion
from todo.config import DEBUG, HOST
from todo.database import session, create_tables
from todo.exceptions import NotFoundError, UnauthorizedError
def handle_general_exception(error):
"""Return response for general errors."""
if DEBUG:
traceback.print_exc()
response = jsonify({'error': str(error), 'status': 500})
response.status_code = 500
return response
def handle_client_error(error):
"""Handle not found error"""
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
app = connexion.FlaskApp(__name__, debug=DEBUG)
app.add_api('todo-api.yaml', arguments={'host': HOST})
app.add_error_handler(Exception, handle_general_exception)
app.add_error_handler(NotFoundError, handle_client_error)
app.add_error_handler(UnauthorizedError, handle_client_error)
application = app.app
CORS(application, origins='*')
create_tables()
@application.teardown_appcontext
def remove_sessions(exception=None): # pylint: disable=unused-argument
"""Remove session on teardown"""
session.remove()
if __name__ == '__main__':
app.run(port=9090)
| {
"repo_name": "kokimoribe/todo-api",
"path": "app.py",
"copies": "1",
"size": "1278",
"license": "mit",
"hash": 5678454548221348000,
"line_mean": 25.0816326531,
"line_max": 71,
"alpha_frac": 0.7269170579,
"autogenerated": false,
"ratio": 3.736842105263158,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4963759163163158,
"avg_score": null,
"num_lines": null
} |
"""AppKit helpers.
Exported functions:
* runEventLoop - run NSApplicationMain in a safer way
* runConsoleEventLoop - run NSRunLoop.run() in a stoppable manner
* stopEventLoop - stops the event loop or terminates the application
* endSheetMethod - set correct signature for NSSheet callbacks
* callAfter - call a function on the main thread (async)
* callLater - call a function on the main thread after a delay (async)
"""
__all__ = ( 'runEventLoop', 'runConsoleEventLoop', 'stopEventLoop', 'endSheetMethod', 'callAfter', 'callLater' )
from AppKit import (NSApp, NSRunAlertPanel, NSApplicationMain,
NSApplicationDidFinishLaunchingNotification)
from Foundation import (NSObject, NSRunLoop, NSTimer, NSDefaultRunLoopMode,
NSNotificationCenter, NSLog, NSAutoreleasePool)
import os
import sys
import traceback
import objc
class PyObjCAppHelperCaller(NSObject):
def initWithArgs_(self, args):
self = self.init()
self.args = args
return self
def callAfter_(self, sender):
self.performSelectorOnMainThread_withObject_waitUntilDone_(
self.call_, self.args, False)
def callLater_(self, delay):
self.performSelector_withObject_afterDelay_(
self.callAfter_, None, delay)
def call_(self, func_args_kwargs):
(func, args, kwargs) = func_args_kwargs
func(*args, **kwargs)
def callAfter(func, *args, **kwargs):
"""call a function on the main thread (async)"""
pool = NSAutoreleasePool.alloc().init()
obj = PyObjCAppHelperCaller.alloc().initWithArgs_((func, args, kwargs))
obj.callAfter_(None)
del obj
del pool
def callLater(delay, func, *args, **kwargs):
"""call a function on the main thread after a delay (async)"""
pool = NSAutoreleasePool.alloc().init()
obj = PyObjCAppHelperCaller.alloc().initWithArgs_((func, args, kwargs))
obj.callLater_(delay)
del obj
del pool
class PyObjCAppHelperApplicationActivator(NSObject):
def activateNow_(self, aNotification):
NSApp().activateIgnoringOtherApps_(True)
class PyObjCAppHelperRunLoopStopper(NSObject):
singletons = {}
def currentRunLoopStopper(cls):
runLoop = NSRunLoop.currentRunLoop()
return cls.singletons.get(runLoop)
currentRunLoopStopper = classmethod(currentRunLoopStopper)
def init(self):
self = super(PyObjCAppHelperRunLoopStopper, self).init()
self.shouldStop = False
return self
def shouldRun(self):
return not self.shouldStop
def addRunLoopStopper_toRunLoop_(cls, runLoopStopper, runLoop):
if runLoop in cls.singletons:
raise ValueError("Stopper already registered for this runLoop")
cls.singletons[runLoop] = runLoopStopper
addRunLoopStopper_toRunLoop_ = classmethod(addRunLoopStopper_toRunLoop_)
def removeRunLoopStopperFromRunLoop_(cls, runLoop):
if runLoop not in cls.singletons:
raise ValueError("Stopper not registered for this runLoop")
del cls.singletons[runLoop]
removeRunLoopStopperFromRunLoop_ = classmethod(removeRunLoopStopperFromRunLoop_)
def stop(self):
self.shouldStop = True
# this should go away when/if runEventLoop uses
# runLoop iteration
if NSApp() is not None:
NSApp().terminate_(self)
def performStop_(self, sender):
self.stop()
def stopEventLoop():
"""
Stop the current event loop if possible
returns True if it expects that it was successful, False otherwise
"""
stopper = PyObjCAppHelperRunLoopStopper.currentRunLoopStopper()
if stopper is None:
if NSApp() is not None:
NSApp().terminate_(None)
return True
return False
NSTimer.scheduledTimerWithTimeInterval_target_selector_userInfo_repeats_(
0.0,
stopper,
'performStop:',
None,
False)
return True
def endSheetMethod(meth):
"""
Return a selector that can be used as the delegate callback for
sheet methods
"""
return objc.selector(meth, signature=b'v@:@ii')
def unexpectedErrorAlertPanel():
exceptionInfo = traceback.format_exception_only(
*sys.exc_info()[:2])[0].strip()
return NSRunAlertPanel("An unexpected error has occurred",
"%@",
"Continue", "Quit", None, "(%s)" % exceptionInfo)
def unexpectedErrorAlertPdb():
import pdb
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[2])
return True
def machInterrupt(signum):
stopper = PyObjCAppHelperRunLoopStopper.currentRunLoopStopper()
if stopper is not None:
stopper.stop()
elif NSApp() is not None:
NSApp().terminate_(None)
else:
import os
os._exit(1)
def installMachInterrupt():
try:
import signal
from PyObjCTools import MachSignals
except:
return
MachSignals.signal(signal.SIGINT, machInterrupt)
def runConsoleEventLoop(argv=None, installInterrupt=False, mode=NSDefaultRunLoopMode):
if argv is None:
argv = sys.argv
if installInterrupt:
installMachInterrupt()
runLoop = NSRunLoop.currentRunLoop()
stopper = PyObjCAppHelperRunLoopStopper.alloc().init()
PyObjCAppHelperRunLoopStopper.addRunLoopStopper_toRunLoop_(stopper, runLoop)
try:
while stopper.shouldRun():
nextfire = runLoop.limitDateForMode_(mode)
if not stopper.shouldRun():
break
if not runLoop.runMode_beforeDate_(mode, nextfire):
stopper.stop()
finally:
PyObjCAppHelperRunLoopStopper.removeRunLoopStopperFromRunLoop_(runLoop)
RAISETHESE = (SystemExit, MemoryError, KeyboardInterrupt)
def runEventLoop(argv=None, unexpectedErrorAlert=None, installInterrupt=None, pdb=None, main=NSApplicationMain):
"""Run the event loop, ask the user if we should continue if an
exception is caught. Use this function instead of NSApplicationMain().
"""
if argv is None:
argv = sys.argv
if pdb is None:
pdb = 'USE_PDB' in os.environ
if pdb:
from PyObjCTools import Debugging
Debugging.installVerboseExceptionHandler()
# bring it to the front, starting from terminal
# often won't
activator = PyObjCAppHelperApplicationActivator.alloc().init()
NSNotificationCenter.defaultCenter().addObserver_selector_name_object_(
activator,
'activateNow:',
NSApplicationDidFinishLaunchingNotification,
None,
)
else:
Debugging = None
if installInterrupt is None and pdb:
installInterrupt = True
if unexpectedErrorAlert is None:
if pdb:
unexpectedErrorAlert = unexpectedErrorAlertPdb
else:
unexpectedErrorAlert = unexpectedErrorAlertPanel
runLoop = NSRunLoop.currentRunLoop()
stopper = PyObjCAppHelperRunLoopStopper.alloc().init()
PyObjCAppHelperRunLoopStopper.addRunLoopStopper_toRunLoop_(stopper, runLoop)
firstRun = NSApp() is None
try:
while stopper.shouldRun():
try:
if firstRun:
firstRun = False
if installInterrupt:
installMachInterrupt()
main(argv)
else:
NSApp().run()
except RAISETHESE:
traceback.print_exc()
break
except:
exctype, e, tb = sys.exc_info()
objc_exception = False
if isinstance(e, objc.error):
NSLog("%@", unicode(str(e), 'utf-8', 'replace'))
elif not unexpectedErrorAlert():
NSLog("%@", "An exception has occured:")
traceback.print_exc()
sys.exit(0)
else:
NSLog("%@", "An exception has occured:")
traceback.print_exc()
else:
break
finally:
if Debugging is not None:
Debugging.removeExceptionHandler()
PyObjCAppHelperRunLoopStopper.removeRunLoopStopperFromRunLoop_(runLoop)
| {
"repo_name": "ariabuckles/pyobjc-framework-Cocoa",
"path": "Lib/PyObjCTools/AppHelper.py",
"copies": "3",
"size": "8256",
"license": "mit",
"hash": -6282494759951106000,
"line_mean": 30.391634981,
"line_max": 112,
"alpha_frac": 0.6436531008,
"autogenerated": false,
"ratio": 4.055009823182711,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6198662923982712,
"avg_score": null,
"num_lines": null
} |
# apple/controllers.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from apple.models import AppleUser
from voter.controllers import voter_merge_two_accounts_action
from voter.models import VoterManager
from wevote_functions.functions import positive_value_exists
import wevote_functions.admin
from .jwt_apple_signin import retrieve_user
logger = wevote_functions.admin.get_logger(__name__)
def apple_sign_in_save_merge_if_needed(
email_from_apple='',
previously_signed_in_apple_voter_found=False,
previously_signed_in_apple_voter_we_vote_id='',
voter_device_link=None,
voter_starting_process=None):
status = ''
success = True
previously_signed_in_voter = None
previously_signed_in_voter_found = False
previously_signed_in_voter_we_vote_id = ''
voter_manager = VoterManager()
try:
# Test to see if we have a valid voter_starting_process object
voter_starting_process_we_vote_id = voter_starting_process.we_vote_id
except Exception as e:
status += "VOTER_STARTING_PROCESS_NOT_FOUND: " + str(e) + ' '
results = {
'status': status,
'success': False,
'previously_signed_in_voter': previously_signed_in_voter,
'previously_signed_in_voter_found': previously_signed_in_voter_found,
'previously_signed_in_voter_we_vote_id': previously_signed_in_voter_we_vote_id,
}
return results
try:
# Test to see if we have a valid voter_device_link object
voter_id_in_voter_device_link = voter_device_link.voter_id
except Exception as e:
status += "VOTER_DEVICE_LINK_NOT_FOUND: " + str(e) + ' '
results = {
'status': status,
'success': False,
'previously_signed_in_voter': previously_signed_in_voter,
'previously_signed_in_voter_found': previously_signed_in_voter_found,
'previously_signed_in_voter_we_vote_id': previously_signed_in_voter_we_vote_id,
}
return results
status += "EMAIL_FROM_APPLE(" + str(email_from_apple) + ") "
if previously_signed_in_apple_voter_found and positive_value_exists(previously_signed_in_apple_voter_we_vote_id):
results = voter_manager.retrieve_voter_by_we_vote_id(
previously_signed_in_apple_voter_we_vote_id, read_only=False)
if results['voter_found']:
# This is the voter account the person is using when they click "Sign in with Apple"
previously_signed_in_voter = results['voter']
previously_signed_in_voter_we_vote_id = previously_signed_in_voter.we_vote_id
previously_signed_in_voter_found = True
status += "PREVIOUSLY_SIGNED_IN_VOTER_FOUND_BY_APPLE_WE_VOTE_ID "
else:
status += results['status']
status += "PREVIOUSLY_SIGNED_IN_VOTER_NOT_FOUND_BY_APPLE_WE_VOTE_ID "
elif positive_value_exists(email_from_apple):
# This is a new sign in, so we want to check to make sure we don't have an account with this email already
results = voter_manager.retrieve_voter_by_email(email_from_apple, read_only=False)
if results['voter_found']:
previously_signed_in_voter = results['voter']
previously_signed_in_voter_we_vote_id = previously_signed_in_voter.we_vote_id
previously_signed_in_voter_found = True
status += "VOTER_WITH_MATCHING_EMAIL_FOUND(" + str(email_from_apple) + ") "
else:
status += results['status']
status += "VOTER_WITH_MATCHING_EMAIL_NOT_FOUND(" + str(email_from_apple) + ") "
else:
status += "NO_PRIOR_VOTER_FOUND "
if previously_signed_in_voter_found:
status += "PREVIOUSLY_SIGNED_IN_VOTER-" + str(previously_signed_in_voter_we_vote_id) + " "
merge_results = voter_merge_two_accounts_action(
voter_starting_process,
previously_signed_in_voter,
voter_device_link,
status=status,
email_owner_voter_found=False,
facebook_owner_voter_found=False,
invitation_owner_voter_found=False)
status += merge_results['status']
results = {
'status': status,
'success': success,
'previously_signed_in_voter': previously_signed_in_voter,
'previously_signed_in_voter_found': previously_signed_in_voter_found,
'previously_signed_in_voter_we_vote_id': previously_signed_in_voter_we_vote_id,
}
return results
# def apple_sign_in_retrieve_voter_id(email, first_name, last_name):
#
# # look for an email match in voters
# voter_results = voter_manager.retrieve_voter_list_with_emails()
# for voter in voter_results['voter_list']:
# if voter.email == email:
# voter_we_vote_id = voter.we_vote_id
# if positive_value_exists(voter_we_vote_id):
# success = True
# results = {
# 'success': success,
# 'status': "APPLE_SIGN_IN_FOUND_A_VOTER_ID_BY_EMAIL ",
# 'voter_device_id': voter_device_id,
# 'voter_we_vote_id': voter_we_vote_id,
# }
# return results
#
# # next look for a name match in voters
# voter_results = voter_manager.retrieve_voter_list_by_name(first_name, last_name)
# for voter in voter_results['voter_list']:
# voter_we_vote_id = voter.we_vote_id
# if positive_value_exists(voter_we_vote_id):
# success = True
# results = {
# 'success': success,
# 'status': "APPLE_SIGN_IN_FOUND_A_VOTER_ID_BY_NAME_MATCH ",
# 'voter_device_id': voter_device_id,
# 'voter_we_vote_id': voter_we_vote_id,
# }
# return results
#
# return True
def delete_apple_user_entries_for_voter(voter_to_delete_we_vote_id):
status = ''
success = True
apple_user_entries_deleted = 0
apple_user_entries_not_deleted = 0
if not positive_value_exists(voter_to_delete_we_vote_id):
status += "DELETE_APPLE_USER_ENTRIES-Missing voter_to_delete_we_vote_id "
success = False
results = {
'status': status,
'success': success,
'voter_to_delete_we_vote_id': voter_to_delete_we_vote_id,
'apple_user_entries_deleted': apple_user_entries_deleted,
'apple_user_entries_not_deleted': apple_user_entries_not_deleted,
}
return results
apple_users_query = AppleUser.objects.all()
apple_users_query = apple_users_query.filter(voter_we_vote_id__iexact=voter_to_delete_we_vote_id)
apple_users_list = list(apple_users_query)
for apple_user_link in apple_users_list:
try:
apple_user_link.delete()
apple_user_entries_deleted += 1
except Exception as e:
# This might just mean that another entry already exists for the "to" voter
status += "COULD_NOT_DELETE_APPLE_USER: " + str(e) + ' '
success = False
apple_user_entries_not_deleted += 1
results = {
'status': status,
'success': success,
'voter_to_delete_we_vote_id': voter_to_delete_we_vote_id,
'apple_user_entries_deleted': apple_user_entries_deleted,
'apple_user_entries_not_deleted': apple_user_entries_not_deleted,
}
return results
def move_apple_user_entries_to_another_voter(from_voter_we_vote_id, to_voter_we_vote_id):
status = ''
success = True
apple_user_entries_moved = 0
apple_user_entries_not_moved = 0
if not positive_value_exists(from_voter_we_vote_id) or not positive_value_exists(to_voter_we_vote_id):
status += "MOVE_APPLE_USER_ENTRIES_TO_ANOTHER_VOTER-" \
"Missing either from_voter_we_vote_id or to_voter_we_vote_id "
success = False
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'apple_user_entries_moved': apple_user_entries_moved,
'apple_user_entries_not_moved': apple_user_entries_not_moved,
}
return results
if from_voter_we_vote_id == to_voter_we_vote_id:
status += "MOVE_APPLE_USER_ENTRIES_TO_ANOTHER_VOTER-" \
"from_voter_we_vote_id and to_voter_we_vote_id identical "
success = False
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'apple_user_entries_moved': apple_user_entries_moved,
'apple_user_entries_not_moved': apple_user_entries_not_moved,
}
return results
apple_users_query = AppleUser.objects.all()
apple_users_query = apple_users_query.filter(
voter_we_vote_id__iexact=from_voter_we_vote_id)
apple_users_list = list(apple_users_query)
for apple_user_link in apple_users_list:
try:
apple_user_link.voter_we_vote_id = to_voter_we_vote_id
apple_user_link.save()
apple_user_entries_moved += 1
except Exception as e:
# This might just mean that another entry already exists for the "to" voter
status += "COULD_NOT_SAVE_APPLE_USER: " + str(e) + ' '
success = False
apple_user_entries_not_moved += 1
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'apple_user_entries_moved': apple_user_entries_moved,
'apple_user_entries_not_moved': apple_user_entries_not_moved,
}
return results
def validate_sign_in_with_apple_token_for_api(apple_oauth_code):
# apple.jwt_apple_signin.retrieve_user(apple_oauth_code)
appleUser = retrieve_user(apple_oauth_code)
print(appleUser)
logger.debug('appleuser: ', appleUser)
| {
"repo_name": "wevote/WeVoteServer",
"path": "apple/controllers.py",
"copies": "1",
"size": "10509",
"license": "mit",
"hash": 1746876343021738500,
"line_mean": 42.2469135802,
"line_max": 117,
"alpha_frac": 0.5897801884,
"autogenerated": false,
"ratio": 3.3564356435643563,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44462158319643563,
"avg_score": null,
"num_lines": null
} |
""" Apple homekit accessory for CO2meter
(c) Vladimir Filimonov, 2018
E-mail: vladimir.a.filimonov@gmail.com
"""
import logging
import signal
from pyhap.accessory_driver import AccessoryDriver
from pyhap.accessory import Accessory, Category
import pyhap.loader as loader
import co2meter as co2
###############################################################################
PORT = 51826
PINCODE = b"800-11-400"
NAME = 'CO2 Monitor'
IDENTIFY = 'co2meter (https://github.com/vfilimonov/co2meter)'
CO2_THRESHOLD = 1200 # iPhone will show warning if the concentration is above
FREQUENCY = 45 # seconds - between consecutive reads from the device
###############################################################################
# Extended from: https://github.com/ikalchev/HAP-python
###############################################################################
class CO2Accessory(Accessory):
category = Category.SENSOR # This is for the icon in the iOS Home app.
def __init__(self, mon=None, freq=FREQUENCY, monitoring=True, **kwargs):
""" Initialize sensor:
- call parent __init__
- save references to characteristics
- (optional) set up callbacks
If monitor object is not passed, it will be created.
freq defines interval in seconds between updating the values.
"""
if not monitoring and mon is None:
raise ValueError('For monitoring=False monitor object should be passed')
self.monitor = co2.CO2monitor() if mon is None else mon
self.frequency = freq
self.monitoring = monitoring
super(CO2Accessory, self).__init__(NAME, **kwargs)
#########################################################################
def temperature_changed(self, value):
""" Dummy callback """
logging.info("Temperature changed to: %s" % value)
def co2_changed(self, value):
""" Dummy callback """
logging.info("CO2 level is changed to: %s" % value)
#########################################################################
def _set_services(self):
""" Add services to be supported (called from __init__).
A loader creates Service and Characteristic objects based on json
representation such as the Apple-defined ones in pyhap/resources/.
"""
# This call sets AccessoryInformation, so we'll do this below
# super(CO2Accessory, self)._set_services()
char_loader = loader.get_char_loader()
serv_loader = loader.get_serv_loader()
# Mandatory: Information about device
info = self.monitor.info
serv_info = serv_loader.get("AccessoryInformation")
serv_info.get_characteristic("Name").set_value(NAME, False)
serv_info.get_characteristic("Manufacturer").set_value(info['manufacturer'], False)
serv_info.get_characteristic("Model").set_value(info['product_name'], False)
serv_info.get_characteristic("SerialNumber").set_value(info['serial_no'], False)
serv_info.get_characteristic("Identify").set_value(IDENTIFY, False)
# Need to ensure AccessoryInformation is with IID 1
self.add_service(serv_info)
# Temperature sensor: only mandatory characteristic
serv_temp = serv_loader.get("TemperatureSensor")
self.char_temp = serv_temp.get_characteristic("CurrentTemperature")
serv_temp.add_characteristic(self.char_temp)
# CO2 sensor: both mandatory and optional characteristic
serv_co2 = serv_loader.get("CarbonDioxideSensor")
self.char_high_co2 = serv_co2.get_characteristic("CarbonDioxideDetected")
self.char_co2 = char_loader.get("CarbonDioxideLevel")
serv_co2.add_characteristic(self.char_high_co2)
serv_co2.add_opt_characteristic(self.char_co2)
self.char_temp.setter_callback = self.temperature_changed
self.char_co2.setter_callback = self.co2_changed
self.add_service(serv_temp)
self.add_service(serv_co2)
#########################################################################
def _read_and_set(self):
if self.monitoring:
vals = self.monitor.read_data_raw(max_requests=1000)
else:
try:
vals = self.monitor._last_data
except:
return
self.char_co2.set_value(vals[1])
self.char_high_co2.set_value(vals[1] > CO2_THRESHOLD)
self.char_temp.set_value(int(vals[2]))
def run(self):
""" We override this method to implement what the accessory will do when it is
started. An accessory is started and stopped from the AccessoryDriver.
It might be convenient to use the Accessory's run_sentinel, which is a
threading. Event object which is set when the accessory should stop running.
"""
self._read_and_set()
while not self.run_sentinel.wait(self.frequency):
self._read_and_set()
def stop(self):
""" Here we should clean-up resources if necessary.
It is called by the AccessoryDriver when the Accessory is being stopped
(it is called right after run_sentinel is set).
"""
logging.info("Stopping accessory.")
###############################################################################
###############################################################################
def start_homekit(mon=None, port=PORT, host=None, monitoring=True,
handle_sigint=True):
logging.basicConfig(level=logging.INFO)
acc = CO2Accessory(mon=mon, pincode=PINCODE, monitoring=monitoring)
# Start the accessory on selected port
driver = AccessoryDriver(acc, port=port, address=host)
# We want KeyboardInterrupts and SIGTERM (kill) to be handled by the driver itself,
# so that it can gracefully stop the accessory, server and advertising.
if handle_sigint:
signal.signal(signal.SIGINT, driver.signal_handler)
signal.signal(signal.SIGTERM, driver.signal_handler)
# Start it!
driver.start()
return driver
###############################################################################
if __name__ == '__main__':
start_homekit()
| {
"repo_name": "vfilimonov/co2meter",
"path": "co2meter/homekit.py",
"copies": "1",
"size": "6282",
"license": "mit",
"hash": 4428583365632395300,
"line_mean": 40.6026490066,
"line_max": 91,
"alpha_frac": 0.5842088507,
"autogenerated": false,
"ratio": 4.253215978334461,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5337424829034462,
"avg_score": null,
"num_lines": null
} |
"""Apple HomeKit encryption helper methods.
The variables in this file follow the names given in the SRP paper:
T. Wu, SRP-6: Improvements and Refinements to the Secure Remote Password Protocol,
Submission to the IEEE P1363 Working Group, Oct 2002.
https://tools.ietf.org/html/rfc5054#ref-SRP-RFC
https://tools.ietf.org/html/rfc2945
"""
import logging
import os
import random
from hashlib import sha512
from hmac import compare_digest
from struct import pack
from typing import Any, Dict, List, Tuple, Union, Optional # NOQA pylint: disable=W0611
import cryptography.hazmat
from libnacl import (crypto_aead_chacha20poly1305_ietf_decrypt,
crypto_aead_chacha20poly1305_ietf_encrypt)
import ed25519
from . import constants, utils
logger = logging.getLogger(__name__)
# Constants
N_HEX = """FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1 29024E08
8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD EF9519B3 CD3A431B
302B0A6D F25F1437 4FE1356D 6D51C245 E485B576 625E7EC6 F44C42E9
A637ED6B 0BFF5CB6 F406B7ED EE386BFB 5A899FA5 AE9F2411 7C4B1FE6
49286651 ECE45B3D C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8
FD24CF5F 83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B E39E772C
180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9 DE2BCBF6 95581718
3995497C EA956AE5 15D22618 98FA0510 15728E5A 8AAAC42D AD33170D
04507A33 A85521AB DF1CBA64 ECFB8504 58DBEF0A 8AEA7157 5D060C7D
B3970F85 A6E1E4C7 ABF5AE8C DB0933D7 1E8C94E0 4A25619D CEE3D226
1AD2EE6B F12FFA06 D98A0864 D8760273 3EC86A64 521F2B18 177B200C
BBE11757 7A615D6C 770988C0 BAD946E2 08E24FA0 74E5AB31 43DB5BFC
E0FD108E 4B82D120 A93AD2CA FFFFFFFF FFFFFFFF"""
N = int(''.join(N_HEX.split()), 16)
PAD_L = N.bit_length() // 8
g = 5
USERNAME = 'Pair-Setup'
SALT_BITS = 64
RANDOM_BITS = 512
password = ''
def H(*args: Union[int, bytes, str], sep: bytes=b'', pad: bool=False) -> int:
"""Hash concatenated arguments"""
# convert to bytes if necessary
byte_args = []
for arg in args:
if isinstance(arg, int):
arg = to_bytes(arg, False)
elif isinstance(arg, str):
arg = arg.encode('utf-8')
if pad:
arg = b'\x00' * (PAD_L - len(arg)) + arg
byte_args.append(arg)
return int(sha512(sep.join(byte_args)).hexdigest(), 16)
def random_int(n_bits: int=RANDOM_BITS) -> int:
"""Generates a random int of n bytes, modulo N"""
return random.SystemRandom().getrandbits(n_bits) % N
def to_bytes(value: int, little_endian: bool=False) -> bytes:
"""Transforms the int into bytes."""
if little_endian:
order = 'little'
else:
order = 'big'
return value.to_bytes(-(-value.bit_length() // 8), order)
def from_bytes(value: bytes, little_endian: bool=False) -> int:
"""Transform bytes representation of an int into an int."""
if little_endian:
order = 'little'
else:
order = 'big'
return int.from_bytes(value, order)
k = H(N, g, pad=True)
def derive_session_key(shared_secret: bytes,
salt: bytes=b"Pair-Setup-Controller-Sign-Salt",
info: bytes=b"Pair-Setup-Controller-Sign-Info",
output_size: int=32) -> bytes:
"""Derive X from the SRP shared secret by using HKDF-SHA-512."""
hkdf = cryptography.hazmat.primitives.kdf.hkdf.HKDF(
algorithm=cryptography.hazmat.primitives.hashes.SHA512(),
length=output_size,
salt=salt,
info=info,
backend=cryptography.hazmat.backends.default_backend())
return hkdf.derive(shared_secret)
class SRPPairSetup:
"""Secure Remote Protocol session for pair setup.
This class is used to generate messages for pairing using SRP
and generates the long term cryptographic keys.
Parameters
----------
pairing_id
Unique identifier for the controller. Must be formatted as
XX:XX:XX:XX:XX:XX", where "XX" is a hexadecimal string representing a byte.
setup_code
Code for the pairing, Must be formatted as
XXX-XX-XXX where each X is a 0-9 digit and dashes are required.
This code can be passed when creating the session, or after
starting the pairing (for m3).
storage_folder
Folder path to store the pairing keys.
This folder should be secure to prevent unauthorized access.
"""
def __init__(
self,
pairing_id: bytes,
storage_folder: str,
setup_code: str=None, ) -> None:
self.setup_code = setup_code
self.pairing_id = pairing_id
self.storage_folder = storage_folder
self.g = g
self.N = N
self.k = H(self.N, self.g, pad=True)
self.B = 0 # type: int
self.s = 0 # type: int
self.my_s = 0 # type: int
self.x = 0 # type: int
self.a = 0 # type: int
self.A = 0 # type: int
self.u = 0 # type: int
self.S = 0 # type: int
self.K = 0 # type: int
self.M1 = 0 # type: int
self.M2 = 0 # type: int
self.X = 0 # type: int
self.state = 0
self.signing_key = None # type: Optional[ed25519.SigningKey]
self.verifying_key = None # type: Optional[ed25519.VerifyingKey]
self.device_info = b'' # type: bytes
self.device_signature = b'' # type: bytes
self.accessory_pairing_id = b'' # type: bytes
self.accessory_ltpk = b'' # type: bytes
self.accessory_signature = b'' # type: bytes
@staticmethod
def m1_generate_srp_start_request() -> List[Tuple[int, bytes]]:
"""Generate the SRP Start request message TLVs.
The message contains 2 TLVs:
- Return_Response: 1
- Vale: kTLVs
With the kTLVs:
- kTLVType_State <M1>
- kTLVType_Method <Pair Setup>
"""
ktlvs = [(constants.PairingKTlvValues.kTLVType_State, pack('<B', 1)),
(constants.PairingKTlvValues.kTLVType_Method, pack(
'<B', constants.PairingKTLVMethodValues.Pair_Setup))]
return ktlvs
def m2_receive_srp_start_response(self,
parsed_ktlvs: Dict[str, bytes]) -> None:
"""Update SRP session with m2 response"""
if from_bytes(parsed_ktlvs['kTLVType_State'], False) != 2:
raise ValueError(
"Received wrong message for M2 {}".format(parsed_ktlvs))
self.B = from_bytes(parsed_ktlvs['kTLVType_PublicKey'])
self.s = from_bytes(parsed_ktlvs['kTLVType_Salt'])
if self.B >= N:
raise ValueError("Invalid public key received")
def m3_generate_srp_verify_request(
self, setup_code: str=None) -> List[Tuple[int, bytes]]:
"""Generate the SRP Verify request message TLVs.
The message contains 2 TLVs:
- Return_Response: 1
- Vale: kTLVs
With the kTLVs:
- kTLVType_State <M3>
- kTLVType_PublicKey <iOS device's SRP public key> - A
- kTLVType_Proof <iOS device's SRP proof> - M1
"""
if self.setup_code is None:
self.setup_code = setup_code
if self.setup_code is None:
raise ValueError("No setup code, cannot proceed with M3")
self.x = H(self.s, H(USERNAME, self.setup_code, sep=b":"))
self.a = random_int(RANDOM_BITS)
self.A = pow(self.g, self.a, self.N)
self.u = H(self.A, self.B, pad=True)
self.S = pow(self.B - (self.k * pow(self.g, self.x, self.N)),
self.a + (self.u * self.x), self.N)
self.K = H(self.S)
# self.M1 = H(self.A, self.B, self.S)
self.M1 = H(H(N) ^ H(g), H(USERNAME), self.s, self.A, self.B, self.K)
ktlvs = [(constants.PairingKTlvValues.kTLVType_State, pack('<B', 3)),
(constants.PairingKTlvValues.kTLVType_PublicKey,
to_bytes(self.A)),
(constants.PairingKTlvValues.kTLVType_Proof,
to_bytes(self.M1))]
return ktlvs
def m4_receive_srp_verify_response(self,
parsed_ktlvs: Dict[str, bytes]) -> None:
"""Verify accessory's proof."""
if from_bytes(parsed_ktlvs['kTLVType_State'], False) != 4:
raise ValueError(
"Received wrong message for M4 {}".format(parsed_ktlvs))
self.M2 = from_bytes(parsed_ktlvs['kTLVType_Proof'])
M2_calc = H(self.A, self.M1, self.K)
if not compare_digest(
to_bytes(M2_calc), parsed_ktlvs['kTLVType_Proof']):
raise ValueError("Authentication failed - invalid prood received.")
def m5_generate_exchange_request(self) -> List[Tuple[int, bytes]]:
"""Generate the Request Generation, as well as signing and encryption keys.
The message contains 2 TLVs:
- Return_Response: 1
- Value: kTLVs
With the kTLVs:
- kTLVType_State <M5>
- kTLVType_EncryptedData <encryptedData with authTag appended>
The encrypted data contains the ktlvs:
- kTLVType_Identifier <iOSDevicePairingID>
- kTLVType_PublicKey <iOSDeviceLTPK> - verifying_key
- kTLVType_Signature <iOSDeviceSignature>
"""
# 1. Generate Ed25519 long-term public key, iOSDeviceLTPK,
# and long-term secret key, iOSDeviceLTSK
if self.signing_key is None:
self.signing_key, _ = ed25519.create_keypair()
with open(os.path.join(self.storage_folder, "secret-key"),
"wb") as secret_key_file:
secret_key_file.write(self.signing_key.to_bytes())
self.verifying_key = self.signing_key.get_verifying_key()
# 2. Derive iOSDeviceX from the SRP shared secret by using HKDF-SHA-512
salt = b"Pair-Setup-Controller-Sign-Salt"
info = b"Pair-Setup-Controller-Sign-Info"
output_size = 32
hkdf = cryptography.hazmat.primitives.kdf.hkdf.HKDF(
algorithm=cryptography.hazmat.primitives.hashes.SHA512(),
length=output_size,
salt=salt,
info=info,
backend=cryptography.hazmat.backends.default_backend())
self.X = hkdf.derive(to_bytes(self.K))
# 3. Concatenate iOSDeviceX with the iOS device's Pairing Identifier, iOSDevicePairingID,
# and its long-term public key, iOSDeviceLTPK.
# The concatenated value will be referred to as iOSDeviceInfo.
self.device_info = (
to_bytes(self.X) + session.pairing_id.encode('utf-8') +
self.verifying_key.to_bytes())
# 4. Generate iOSDeviceSignature by signing iOSDeviceInfo with its
# long-term secret key, iOSDeviceLTSK, using Ed25519.
self.device_signature = self.signing_key.sign(self.device_info)
# 5. Construct a sub-TLV
sub_ktlvs = [(constants.PairingKTlvValues.kTLVType_Identifier,
self.pairing_id),
(constants.PairingKTlvValues.kTLVType_PublicKey,
self.verifying_key.to_bytes()),
(constants.PairingKTlvValues.kTLVType_Signature,
self.device_signature)]
prepared_sub_ktlvs = b''.join(
data for ktlv in sub_ktlvs for data in utils.prepare_tlv(*ktlv))
# 6. Encrypt the sub-TLV, encryptedData, and generate the 16 byte auth tag, authTag.
# using the ChaCha20-Poly1305 AEAD algorithm
# this includes the auth_tag appended at the end
encrypted_data = crypto_aead_chacha20poly1305_ietf_encrypt(
key=self.S, nonce="PS-Msg05", aad=None, message=prepared_sub_ktlvs)
ktlvs = [(constants.PairingKTlvValues.kTLVType_State, pack('<B', 5)),
(constants.PairingKTlvValues.kTLVType_EncryptedData,
encrypted_data)]
return ktlvs
def m6_receive_exchange_response(self,
parsed_ktlvs: Dict[str, int]) -> None:
"""Verify accessory and save pairing."""
if parsed_ktlvs['kTLVType_State'] != 6:
raise ValueError(
"Received wrong message for M6 {}".format(parsed_ktlvs))
decrypted_ktlvs = crypto_aead_chacha20poly1305_ietf_decrypt(
parsed_ktlvs['kTLVType_EncryptedData'],
nonce=b"PS-Msg06",
aad=None,
key=self.S)
parsed_decrypted_ktlvs = utils.parse_ktlvs(decrypted_ktlvs)
self.accessory_pairing_id = parsed_decrypted_ktlvs[
'kTLVType_Identifier']
self.accessory_ltpk = parsed_decrypted_ktlvs['kTLVType_PublicKey']
self.accessory_signature = parsed_decrypted_ktlvs['kTLVType_Signature']
with open(
os.path.join(self.storage_folder, "accessory_pairing_id"),
"wb") as accessory_pairing_id_file:
accessory_pairing_id_file.write(self.accessory_pairing_id)
with open(os.path.join(self.storage_folder, "accessory_ltpk"),
"wb") as accessory_ltpk_file:
accessory_ltpk_file.write(self.accessory_ltpk)
logger.debug(
"Successfully saved accessory pairing id and accessory long term public key"
)
class SRPPairVerify:
"""Secure Remote Protocol session for pair verify.
You must already have paired with an accessory.
Parameters
----------
pairing_id
Unique identifier for the controller. Must be formatted as
XX:XX:XX:XX:XX:XX", where "XX" is a hexadecimal string representing a byte.
setup_code
Code for the pairing, Must be formatted as
XXX-XX-XXX where each X is a 0-9 digit and dashes are required.
This code can be passed when creating the session, or after
starting the pairing (for m3).
storage_folder
Folder path to store the pairing keys.
This folder should be secure to prevent unauthorized access.
"""
def __init__(
self,
pairing_id: bytes,
storage_folder: str,
setup_code: str=None, ) -> None:
self.setup_code = setup_code
self.pairing_id = pairing_id
self.storage_folder = storage_folder
self.secret_key = None # type: Optional[ed25519.SigningKey]
self.verifying_key = None # type: Optional[ed25519.VerifyingKey]
self.device_info = b'' # type: bytes
self.device_signature = b'' # type: bytes
self.accessory_pairing_id = b'' # type: bytes
self.accessory_ltpk = b'' # type: bytes
self.accessory_signature = b'' # type: bytes
def m1_generate_verify_start_request(self) -> List[Tuple[int, bytes]]:
"""Generate the SRP Start request message TLVs.
The message contains 2 TLVs:
- Return_Response: 1
- Vale: kTLVs
With the kTLVs:
- kTLVType_State <M1>
- kTLVType_PublicKey <Curve25519 public key>
"""
with open(os.path.join(self.storage_folder, "secret-key"),
"rb") as secret_key_file:
self.secret_key = ed25519.SigningKey(secret_key_file.read())
self.verifying_key = self.secret_key.get_verifying_key()
ktlvs = [(constants.PairingKTlvValues.kTLVType_State, pack('<B', 1)),
(constants.PairingKTlvValues.kTLVType_PublicKey,
self.verifying_key.to_bytes())]
prepared_ktlvs = b''.join(
data for ktlv in ktlvs for data in utils.prepare_tlv(*ktlv))
message_data = [(constants.HapParamTypes.Return_Response, pack(
'<B', 1)), (constants.HapParamTypes.Value, prepared_ktlvs)]
return message_data
@staticmethod
def m2_receive_start_response(parsed_ktlvs: Dict[str, bytes]) -> None:
"""Update SRP session with m2 response"""
if from_bytes(parsed_ktlvs['kTLVType_State']) != 2:
raise ValueError(
"Received wrong message for M2 {}".format(parsed_ktlvs))
proof = from_bytes(parsed_ktlvs['kTLVType_PublicKey'])
encrypted_data = from_bytes(parsed_ktlvs['kTLVType_EncryptedData'])
if proof == encrypted_data:
print("")
def pair() -> None:
"""Pairing SRP protocol"""
# Protocol Summary
# Message 1: Send to accessory
# SRP Start Request
# kTLVType_State <M1>
# kTLVType_Method <Pair Setup>
# Message 2: Receive from accessory
# SRP Start Response
# kTLVType_State <M2>
# kTLVType_PublicKey <Accessory's SRP public key> - s
# kTLVType_Salt <16 byte salt generated in Step 6> - B
parsed_response = {} # type: Dict[str, Any]
B = parsed_response['kTLVType_PublicKey']
s = parsed_response['kTLVType_Salt']
# Message 3: Send to accessory
# SRP Verify Request
# kTLVType_State <M3>
# kTLVType_PublicKey <iOS device's SRP public key> - A
# kTLVType_Proof <iOS device's SRP proof> - M1
my_s = random_int(SALT_BITS)
x = H(my_s, H(USERNAME, password, sep=b":"))
a = random_int(RANDOM_BITS)
A = pow(g, a, N)
u = H(A, B, pad=True)
S = pow(B - (k * pow(g, x, N)), a + (u * x), N)
K = H(S)
M1 = H(A, B, S)
# M1 = H(H(N) | H(g), H(USERNAME), s, A, B, K)
# Message 4: Receive from accessory
# SRP Verify Response
# kTLVType_State <M4>
# kTLVType_Proof <Accessory's SRP proof> - M2
parsed_response = {s: K}
M2 = parsed_response['kTLVType_Proof']
M2_calc = H(A, M1, K)
if M2_calc != M2:
raise ValueError("Authentication failed - invalid prood received.")
# Message 5: Send to accessory
# Request Generation
# kTLVType_State <M5>
# kTLVType_EncryptedData <encryptedData with authTag appended>
# The encrypted data contains
# kTLVType_Identifier <iOSDevicePairingID>
# kTLVType_PublicKey <iOSDeviceLTPK>
# kTLVType_Signature <iOSDeviceSignature>
# signing_key, verifying_key = ed25519.create_keypair()
# # Derive iOSDeviceX
# clientX = ''
# InputKey = S
# Salt = b"Pair-Setup-Controller-Sign-Salt"
# Info = b"Pair-Setup-Controller-Sign-Info"
# OutputSize = 32
| {
"repo_name": "henridwyer/pyhomekit",
"path": "pyhomekit/pairing.py",
"copies": "1",
"size": "18343",
"license": "mit",
"hash": 3388822687376194000,
"line_mean": 35.7595190381,
"line_max": 97,
"alpha_frac": 0.6133129804,
"autogenerated": false,
"ratio": 3.3405572755417956,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9449380399182157,
"avg_score": 0.0008979713519276858,
"num_lines": 499
} |
# apple/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from datetime import date
from django.db import models
from wevote_functions.functions import positive_value_exists
from exception.models import handle_exception, print_to_log
import wevote_functions.admin
logger = wevote_functions.admin.get_logger(__name__)
class AppleUser(models.Model):
"""
Sign in with Apple returns a unique apple user_code for a Apple ID/iCloud sign-in (8/5/20 is it unique or sb `sub`?)
example user_code "001407.8220c1ff0bf84328bcc85ac1ca25e9aa.0456"
Apple allows alias sign in email addresses, so the reported email address might be an alias that would not be
in our system anywhere else
"""
# objects = None
# voter_device_id = models.CharField(verbose_name='voter device id',
# max_length=255, null=False, blank=False, unique=True, default='DELETE_ME')
voter_we_vote_id = models.CharField(verbose_name="we vote id for the Apple ID owner", max_length=255, unique=True)
user_code = models.CharField(verbose_name="User's apple id code", max_length=255, null=False, unique=False)
email = models.EmailField(verbose_name='apple email address', max_length=255, unique=False,
null=True, blank=True)
first_name = models.CharField(
verbose_name="User's first_name from Apple", max_length=255, null=True, blank=True, unique=False)
middle_name = models.CharField(
verbose_name="User's middle_name from Apple", max_length=255, null=True, blank=True, unique=False)
last_name = models.CharField(
verbose_name="User's last_name from Apple", max_length=255, null=True, blank=True, unique=False)
# The next three are for debugging/statistics are are not necessary for sign in
apple_platform = models.CharField(
verbose_name="Platform of Apple Device", max_length=32, null=True, blank=True, unique=False)
apple_os_version = models.CharField(
verbose_name="Apple OS Version", max_length=32, null=True, blank=True, unique=False)
apple_model = models.CharField(
verbose_name="Apple Device Model", max_length=32, null=True, blank=True, unique=False)
date_created = models.DateTimeField(verbose_name='date created', null=False, auto_now_add=True)
date_last_referenced = models.DateTimeField(verbose_name='date last referenced', null=False, auto_now=True)
def __unicode__(self):
return AppleUser
| {
"repo_name": "wevote/WeVoteServer",
"path": "apple/models.py",
"copies": "1",
"size": "2495",
"license": "mit",
"hash": -5487473651152214000,
"line_mean": 52.085106383,
"line_max": 120,
"alpha_frac": 0.7018036072,
"autogenerated": false,
"ratio": 3.626453488372093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4828257095572093,
"avg_score": null,
"num_lines": null
} |
# ApplePy - an Apple ][ emulator in Python
# James Tauber / http://jtauber.com/
# originally written 2001, updated 2011
import BaseHTTPServer
import json
import re
import select
import socket
import struct
import sys
bus = None # socket for bus I/O
def signed(x):
if x > 0x7F:
x = x - 0x100
return x
class ROM:
def __init__(self, start, size):
self.start = start
self.end = start + size - 1
self._mem = [0x00] * size
def load(self, address, data):
for offset, datum in enumerate(data):
self._mem[address - self.start + offset] = datum
def load_file(self, address, filename):
with open(filename, "rb") as f:
for offset, datum in enumerate(f.read()):
self._mem[address - self.start + offset] = ord(datum)
def read_byte(self, address):
assert self.start <= address <= self.end
return self._mem[address - self.start]
class RAM(ROM):
def write_byte(self, address, value):
self._mem[address] = value
class Memory:
def __init__(self, options=None, use_bus=True):
self.use_bus = use_bus
self.rom = ROM(0xD000, 0x3000)
if options:
self.rom.load_file(0xD000, options.rom)
self.ram = RAM(0x0000, 0xC000)
if options and options.ram:
self.ram.load_file(0x0000, options.ram)
def load(self, address, data):
if address < 0xC000:
self.ram.load(address, data)
def read_byte(self, cycle, address):
if address < 0xC000:
return self.ram.read_byte(address)
elif address < 0xD000:
return self.bus_read(cycle, address)
else:
return self.rom.read_byte(address)
def read_word(self, cycle, address):
return self.read_byte(cycle, address) + (self.read_byte(cycle + 1, address + 1) << 8)
def read_word_bug(self, cycle, address):
if address % 0x100 == 0xFF:
return self.read_byte(cycle, address) + (self.read_byte(cycle + 1, address & 0xFF00) << 8)
else:
return self.read_word(cycle, address)
def write_byte(self, cycle, address, value):
if address < 0xC000:
self.ram.write_byte(address, value)
if 0x400 <= address < 0x800 or 0x2000 <= address < 0x5FFF:
self.bus_write(cycle, address, value)
def bus_read(self, cycle, address):
if not self.use_bus:
return 0
op = struct.pack("<IBHB", cycle, 0, address, 0)
try:
bus.send(op)
b = bus.recv(1)
if len(b) == 0:
sys.exit(0)
return ord(b)
except socket.error:
sys.exit(0)
def bus_write(self, cycle, address, value):
if not self.use_bus:
return
op = struct.pack("<IBHB", cycle, 1, address, value)
try:
bus.send(op)
except IOError:
sys.exit(0)
class Disassemble:
def __init__(self, cpu, memory):
self.cpu = cpu
self.memory = memory
self.setup_ops()
def setup_ops(self):
self.ops = [(1, "???")] * 0x100
self.ops[0x00] = (1, "BRK", )
self.ops[0x01] = (2, "ORA", self.indirect_x_mode)
self.ops[0x05] = (2, "ORA", self.zero_page_mode)
self.ops[0x06] = (2, "ASL", self.zero_page_mode)
self.ops[0x08] = (1, "PHP", )
self.ops[0x09] = (2, "ORA", self.immediate_mode)
self.ops[0x0A] = (1, "ASL", )
self.ops[0x0D] = (3, "ORA", self.absolute_mode)
self.ops[0x0E] = (3, "ASL", self.absolute_mode)
self.ops[0x10] = (2, "BPL", self.relative_mode)
self.ops[0x11] = (2, "ORA", self.indirect_y_mode)
self.ops[0x15] = (2, "ORA", self.zero_page_x_mode)
self.ops[0x16] = (2, "ASL", self.zero_page_x_mode)
self.ops[0x18] = (1, "CLC", )
self.ops[0x19] = (3, "ORA", self.absolute_y_mode)
self.ops[0x1D] = (3, "ORA", self.absolute_x_mode)
self.ops[0x1E] = (3, "ASL", self.absolute_x_mode)
self.ops[0x20] = (3, "JSR", self.absolute_mode)
self.ops[0x21] = (2, "AND", self.indirect_x_mode)
self.ops[0x24] = (2, "BIT", self.zero_page_mode)
self.ops[0x25] = (2, "AND", self.zero_page_mode)
self.ops[0x26] = (2, "ROL", self.zero_page_mode)
self.ops[0x28] = (1, "PLP", )
self.ops[0x29] = (2, "AND", self.immediate_mode)
self.ops[0x2A] = (1, "ROL", )
self.ops[0x2C] = (3, "BIT", self.absolute_mode)
self.ops[0x2D] = (3, "AND", self.absolute_mode)
self.ops[0x2E] = (3, "ROL", self.absolute_mode)
self.ops[0x30] = (2, "BMI", self.relative_mode)
self.ops[0x31] = (2, "AND", self.indirect_y_mode)
self.ops[0x35] = (2, "AND", self.zero_page_x_mode)
self.ops[0x36] = (2, "ROL", self.zero_page_x_mode)
self.ops[0x38] = (1, "SEC", )
self.ops[0x39] = (3, "AND", self.absolute_y_mode)
self.ops[0x3D] = (3, "AND", self.absolute_x_mode)
self.ops[0x3E] = (3, "ROL", self.absolute_x_mode)
self.ops[0x40] = (1, "RTI", )
self.ops[0x41] = (2, "EOR", self.indirect_x_mode)
self.ops[0x45] = (2, "EOR", self.zero_page_mode)
self.ops[0x46] = (2, "LSR", self.zero_page_mode)
self.ops[0x48] = (1, "PHA", )
self.ops[0x49] = (2, "EOR", self.immediate_mode)
self.ops[0x4A] = (1, "LSR", )
self.ops[0x4C] = (3, "JMP", self.absolute_mode)
self.ops[0x4D] = (3, "EOR", self.absolute_mode)
self.ops[0x4E] = (3, "LSR", self.absolute_mode)
self.ops[0x50] = (2, "BVC", self.relative_mode)
self.ops[0x51] = (2, "EOR", self.indirect_y_mode)
self.ops[0x55] = (2, "EOR", self.zero_page_x_mode)
self.ops[0x56] = (2, "LSR", self.zero_page_x_mode)
self.ops[0x58] = (1, "CLI", )
self.ops[0x59] = (3, "EOR", self.absolute_y_mode)
self.ops[0x5D] = (3, "EOR", self.absolute_x_mode)
self.ops[0x5E] = (3, "LSR", self.absolute_x_mode)
self.ops[0x60] = (1, "RTS", )
self.ops[0x61] = (2, "ADC", self.indirect_x_mode)
self.ops[0x65] = (2, "ADC", self.zero_page_mode)
self.ops[0x66] = (2, "ROR", self.zero_page_mode)
self.ops[0x68] = (1, "PLA", )
self.ops[0x69] = (2, "ADC", self.immediate_mode)
self.ops[0x6A] = (1, "ROR", )
self.ops[0x6C] = (3, "JMP", self.indirect_mode)
self.ops[0x6D] = (3, "ADC", self.absolute_mode)
self.ops[0x6E] = (3, "ROR", self.absolute_mode)
self.ops[0x70] = (2, "BVS", self.relative_mode)
self.ops[0x71] = (2, "ADC", self.indirect_y_mode)
self.ops[0x75] = (2, "ADC", self.zero_page_x_mode)
self.ops[0x76] = (2, "ROR", self.zero_page_x_mode)
self.ops[0x78] = (1, "SEI", )
self.ops[0x79] = (3, "ADC", self.absolute_y_mode)
self.ops[0x7D] = (3, "ADC", self.absolute_x_mode)
self.ops[0x7E] = (3, "ROR", self.absolute_x_mode)
self.ops[0x81] = (2, "STA", self.indirect_x_mode)
self.ops[0x84] = (2, "STY", self.zero_page_mode)
self.ops[0x85] = (2, "STA", self.zero_page_mode)
self.ops[0x86] = (2, "STX", self.zero_page_mode)
self.ops[0x88] = (1, "DEY", )
self.ops[0x8A] = (1, "TXA", )
self.ops[0x8C] = (3, "STY", self.absolute_mode)
self.ops[0x8D] = (3, "STA", self.absolute_mode)
self.ops[0x8E] = (3, "STX", self.absolute_mode)
self.ops[0x90] = (2, "BCC", self.relative_mode)
self.ops[0x91] = (2, "STA", self.indirect_y_mode)
self.ops[0x94] = (2, "STY", self.zero_page_x_mode)
self.ops[0x95] = (2, "STA", self.zero_page_x_mode)
self.ops[0x96] = (2, "STX", self.zero_page_y_mode)
self.ops[0x98] = (1, "TYA", )
self.ops[0x99] = (3, "STA", self.absolute_y_mode)
self.ops[0x9A] = (1, "TXS", )
self.ops[0x9D] = (3, "STA", self.absolute_x_mode)
self.ops[0xA0] = (2, "LDY", self.immediate_mode)
self.ops[0xA1] = (2, "LDA", self.indirect_x_mode)
self.ops[0xA2] = (2, "LDX", self.immediate_mode)
self.ops[0xA4] = (2, "LDY", self.zero_page_mode)
self.ops[0xA5] = (2, "LDA", self.zero_page_mode)
self.ops[0xA6] = (2, "LDX", self.zero_page_mode)
self.ops[0xA8] = (1, "TAY", )
self.ops[0xA9] = (2, "LDA", self.immediate_mode)
self.ops[0xAA] = (1, "TAX", )
self.ops[0xAC] = (3, "LDY", self.absolute_mode)
self.ops[0xAD] = (3, "LDA", self.absolute_mode)
self.ops[0xAE] = (3, "LDX", self.absolute_mode)
self.ops[0xB0] = (2, "BCS", self.relative_mode)
self.ops[0xB1] = (2, "LDA", self.indirect_y_mode)
self.ops[0xB4] = (2, "LDY", self.zero_page_x_mode)
self.ops[0xB5] = (2, "LDA", self.zero_page_x_mode)
self.ops[0xB6] = (2, "LDX", self.zero_page_y_mode)
self.ops[0xB8] = (1, "CLV", )
self.ops[0xB9] = (3, "LDA", self.absolute_y_mode)
self.ops[0xBA] = (1, "TSX", )
self.ops[0xBC] = (3, "LDY", self.absolute_x_mode)
self.ops[0xBD] = (3, "LDA", self.absolute_x_mode)
self.ops[0xBE] = (3, "LDX", self.absolute_y_mode)
self.ops[0xC0] = (2, "CPY", self.immediate_mode)
self.ops[0xC1] = (2, "CMP", self.indirect_x_mode)
self.ops[0xC4] = (2, "CPY", self.zero_page_mode)
self.ops[0xC5] = (2, "CMP", self.zero_page_mode)
self.ops[0xC6] = (2, "DEC", self.zero_page_mode)
self.ops[0xC8] = (1, "INY", )
self.ops[0xC9] = (2, "CMP", self.immediate_mode)
self.ops[0xCA] = (1, "DEX", )
self.ops[0xCC] = (3, "CPY", self.absolute_mode)
self.ops[0xCD] = (3, "CMP", self.absolute_mode)
self.ops[0xCE] = (3, "DEC", self.absolute_mode)
self.ops[0xD0] = (2, "BNE", self.relative_mode)
self.ops[0xD1] = (2, "CMP", self.indirect_y_mode)
self.ops[0xD5] = (2, "CMP", self.zero_page_x_mode)
self.ops[0xD6] = (2, "DEC", self.zero_page_x_mode)
self.ops[0xD8] = (1, "CLD", )
self.ops[0xD9] = (3, "CMP", self.absolute_y_mode)
self.ops[0xDD] = (3, "CMP", self.absolute_x_mode)
self.ops[0xDE] = (3, "DEC", self.absolute_x_mode)
self.ops[0xE0] = (2, "CPX", self.immediate_mode)
self.ops[0xE1] = (2, "SBC", self.indirect_x_mode)
self.ops[0xE4] = (2, "CPX", self.zero_page_mode)
self.ops[0xE5] = (2, "SBC", self.zero_page_mode)
self.ops[0xE6] = (2, "INC", self.zero_page_mode)
self.ops[0xE8] = (1, "INX", )
self.ops[0xE9] = (2, "SBC", self.immediate_mode)
self.ops[0xEA] = (1, "NOP", )
self.ops[0xEC] = (3, "CPX", self.absolute_mode)
self.ops[0xED] = (3, "SBC", self.absolute_mode)
self.ops[0xEE] = (3, "INC", self.absolute_mode)
self.ops[0xF0] = (2, "BEQ", self.relative_mode)
self.ops[0xF1] = (2, "SBC", self.indirect_y_mode)
self.ops[0xF5] = (2, "SBC", self.zero_page_x_mode)
self.ops[0xF6] = (2, "INC", self.zero_page_x_mode)
self.ops[0xF8] = (1, "SED", )
self.ops[0xF9] = (3, "SBC", self.absolute_y_mode)
self.ops[0xFD] = (3, "SBC", self.absolute_x_mode)
self.ops[0xFE] = (3, "INC", self.absolute_x_mode)
def absolute_mode(self, pc):
a = self.cpu.read_word(pc + 1)
return {
"operand": "$%04X" % a,
"memory": [a, 2, self.cpu.read_word(a)],
}
def absolute_x_mode(self, pc):
a = self.cpu.read_word(pc + 1)
e = a + self.cpu.x_index
return {
"operand": "$%04X,X" % a,
"memory": [e, 1, self.cpu.read_byte(e)],
}
def absolute_y_mode(self, pc):
a = self.cpu.read_word(pc + 1)
e = a + self.cpu.y_index
return {
"operand": "$%04X,Y" % a,
"memory": [e, 1, self.cpu.read_byte(e)],
}
def immediate_mode(self, pc):
return {
"operand": "#$%02X" % (self.cpu.read_byte(pc + 1)),
}
def indirect_mode(self, pc):
a = self.cpu.read_word(pc + 1)
return {
"operand": "($%04X)" % a,
"memory": [a, 2, self.cpu.read_word(a)],
}
def indirect_x_mode(self, pc):
z = self.cpu.read_byte(pc + 1)
a = self.cpu.read_word((z + self.cpu.x_index) % 0x100)
return {
"operand": "($%02X,X)" % z,
"memory": [a, 1, self.cpu.read_byte(a)],
}
def indirect_y_mode(self, pc):
z = self.cpu.read_byte(pc + 1)
a = self.cpu.read_word(z) + self.cpu.y_index
return {
"operand": "($%02X),Y" % z,
"memory": [a, 1, self.cpu.read_byte(a)],
}
def relative_mode(self, pc):
return {
"operand": "$%04X" % (pc + signed(self.cpu.read_byte(pc + 1) + 2)),
}
def zero_page_mode(self, pc):
a = self.cpu.read_byte(pc + 1)
return {
"operand": "$%02X" % a,
"memory": [a, 1, self.cpu.read_byte(a)],
}
def zero_page_x_mode(self, pc):
z = self.cpu.read_byte(pc + 1)
a = (z + self.cpu.x_index) % 0x100
return {
"operand": "$%02X,X" % z,
"memory": [a, 1, self.cpu.read_byte(a)],
}
def zero_page_y_mode(self, pc):
z = self.cpu.read_byte(pc + 1)
a = (z + self.cpu.y_index) % 0x100
return {
"operand": "$%02X,Y" % z,
"memory": [a, 1, self.cpu.read_byte(a)],
}
def disasm(self, pc):
op = self.cpu.read_byte(pc)
info = self.ops[op]
r = {
"address": pc,
"bytes": [self.cpu.read_byte(pc + i) for i in range(info[0])],
"mnemonic": info[1],
}
if len(info) > 2:
r.update(info[2](pc))
return r, info[0]
class ControlHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, client_address, server, cpu):
self.cpu = cpu
self.disassemble = Disassemble(self.cpu, self.cpu.memory)
self.get_urls = {
r"/disassemble/(\d+)$": self.get_disassemble,
r"/memory/(\d+)(-(\d+))?$": self.get_memory,
r"/memory/(\d+)(-(\d+))?/raw$": self.get_memory_raw,
r"/status$": self.get_status,
}
self.post_urls = {
r"/memory/(\d+)(-(\d+))?$": self.post_memory,
r"/memory/(\d+)(-(\d+))?/raw$": self.post_memory_raw,
r"/quit$": self.post_quit,
r"/reset$": self.post_reset,
}
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, client_address, server)
def log_request(self, code, size=0):
pass
def dispatch(self, urls):
for r, f in urls.items():
m = re.match(r, self.path)
if m is not None:
f(m)
break
else:
self.send_response(404)
self.end_headers()
def response(self, s):
self.send_response(200)
self.send_header("Content-Length", str(len(s)))
self.end_headers()
self.wfile.write(s)
def do_GET(self):
self.dispatch(self.get_urls)
def do_POST(self):
self.dispatch(self.post_urls)
def get_disassemble(self, m):
addr = int(m.group(1))
r = []
n = 20
while n > 0:
dis, length = self.disassemble.disasm(addr)
r.append(dis)
addr += length
n -= 1
self.response(json.dumps(r))
def get_memory_raw(self, m):
addr = int(m.group(1))
e = m.group(3)
if e is not None:
end = int(e)
else:
end = addr
self.response("".join([chr(self.cpu.read_byte(x)) for x in range(addr, end + 1)]))
def get_memory(self, m):
addr = int(m.group(1))
e = m.group(3)
if e is not None:
end = int(e)
else:
end = addr
self.response(json.dumps(list(map(self.cpu.read_byte, range(addr, end + 1)))))
def get_status(self, m):
self.response(json.dumps(dict((x, getattr(self.cpu, x)) for x in (
"accumulator",
"x_index",
"y_index",
"stack_pointer",
"program_counter",
"sign_flag",
"overflow_flag",
"break_flag",
"decimal_mode_flag",
"interrupt_disable_flag",
"zero_flag",
"carry_flag",
))))
def post_memory(self, m):
addr = int(m.group(1))
e = m.group(3)
if e is not None:
end = int(e)
else:
end = addr
data = json.loads(self.rfile.read(int(self.headers["Content-Length"])))
for i, a in enumerate(range(addr, end + 1)):
self.cpu.write_byte(a, data[i])
self.response("")
def post_memory_raw(self, m):
addr = int(m.group(1))
e = m.group(3)
if e is not None:
end = int(e)
else:
end = addr
data = self.rfile.read(int(self.headers["Content-Length"]))
for i, a in enumerate(range(addr, end + 1)):
self.cpu.write_byte(a, data[i])
self.response("")
def post_quit(self, m):
self.cpu.quit = True
self.response("")
def post_reset(self, m):
self.cpu.reset()
self.cpu.running = True
self.response("")
class ControlHandlerFactory:
def __init__(self, cpu):
self.cpu = cpu
def __call__(self, request, client_address, server):
return ControlHandler(request, client_address, server, self.cpu)
class CPU:
STACK_PAGE = 0x100
RESET_VECTOR = 0xFFFC
def __init__(self, options, memory):
self.memory = memory
self.control_server = BaseHTTPServer.HTTPServer(("127.0.0.1", 6502), ControlHandlerFactory(self))
self.accumulator = 0x00
self.x_index = 0x00
self.y_index = 0x00
self.carry_flag = 0
self.zero_flag = 0
self.interrupt_disable_flag = 0
self.decimal_mode_flag = 0
self.break_flag = 1
self.overflow_flag = 0
self.sign_flag = 0
self.stack_pointer = 0xFF
self.cycles = 0
self.setup_ops()
self.reset()
if options.pc is not None:
self.program_counter = options.pc
self.running = True
self.quit = False
def setup_ops(self):
self.ops = [None] * 0x100
self.ops[0x00] = lambda: self.BRK()
self.ops[0x01] = lambda: self.ORA(self.indirect_x_mode())
self.ops[0x05] = lambda: self.ORA(self.zero_page_mode())
self.ops[0x06] = lambda: self.ASL(self.zero_page_mode())
self.ops[0x08] = lambda: self.PHP()
self.ops[0x09] = lambda: self.ORA(self.immediate_mode())
self.ops[0x0A] = lambda: self.ASL()
self.ops[0x0D] = lambda: self.ORA(self.absolute_mode())
self.ops[0x0E] = lambda: self.ASL(self.absolute_mode())
self.ops[0x10] = lambda: self.BPL(self.relative_mode())
self.ops[0x11] = lambda: self.ORA(self.indirect_y_mode())
self.ops[0x15] = lambda: self.ORA(self.zero_page_x_mode())
self.ops[0x16] = lambda: self.ASL(self.zero_page_x_mode())
self.ops[0x18] = lambda: self.CLC()
self.ops[0x19] = lambda: self.ORA(self.absolute_y_mode())
self.ops[0x1D] = lambda: self.ORA(self.absolute_x_mode())
self.ops[0x1E] = lambda: self.ASL(self.absolute_x_mode(rmw=True))
self.ops[0x20] = lambda: self.JSR(self.absolute_mode())
self.ops[0x21] = lambda: self.AND(self.indirect_x_mode())
self.ops[0x24] = lambda: self.BIT(self.zero_page_mode())
self.ops[0x25] = lambda: self.AND(self.zero_page_mode())
self.ops[0x26] = lambda: self.ROL(self.zero_page_mode())
self.ops[0x28] = lambda: self.PLP()
self.ops[0x29] = lambda: self.AND(self.immediate_mode())
self.ops[0x2A] = lambda: self.ROL()
self.ops[0x2C] = lambda: self.BIT(self.absolute_mode())
self.ops[0x2D] = lambda: self.AND(self.absolute_mode())
self.ops[0x2E] = lambda: self.ROL(self.absolute_mode())
self.ops[0x30] = lambda: self.BMI(self.relative_mode())
self.ops[0x31] = lambda: self.AND(self.indirect_y_mode())
self.ops[0x35] = lambda: self.AND(self.zero_page_x_mode())
self.ops[0x36] = lambda: self.ROL(self.zero_page_x_mode())
self.ops[0x38] = lambda: self.SEC()
self.ops[0x39] = lambda: self.AND(self.absolute_y_mode())
self.ops[0x3D] = lambda: self.AND(self.absolute_x_mode())
self.ops[0x3E] = lambda: self.ROL(self.absolute_x_mode(rmw=True))
self.ops[0x40] = lambda: self.RTI()
self.ops[0x41] = lambda: self.EOR(self.indirect_x_mode())
self.ops[0x45] = lambda: self.EOR(self.zero_page_mode())
self.ops[0x46] = lambda: self.LSR(self.zero_page_mode())
self.ops[0x48] = lambda: self.PHA()
self.ops[0x49] = lambda: self.EOR(self.immediate_mode())
self.ops[0x4A] = lambda: self.LSR()
self.ops[0x4C] = lambda: self.JMP(self.absolute_mode())
self.ops[0x4D] = lambda: self.EOR(self.absolute_mode())
self.ops[0x4E] = lambda: self.LSR(self.absolute_mode())
self.ops[0x50] = lambda: self.BVC(self.relative_mode())
self.ops[0x51] = lambda: self.EOR(self.indirect_y_mode())
self.ops[0x55] = lambda: self.EOR(self.zero_page_x_mode())
self.ops[0x56] = lambda: self.LSR(self.zero_page_x_mode())
self.ops[0x58] = lambda: self.CLI()
self.ops[0x59] = lambda: self.EOR(self.absolute_y_mode())
self.ops[0x5D] = lambda: self.EOR(self.absolute_x_mode())
self.ops[0x5E] = lambda: self.LSR(self.absolute_x_mode(rmw=True))
self.ops[0x60] = lambda: self.RTS()
self.ops[0x61] = lambda: self.ADC(self.indirect_x_mode())
self.ops[0x65] = lambda: self.ADC(self.zero_page_mode())
self.ops[0x66] = lambda: self.ROR(self.zero_page_mode())
self.ops[0x68] = lambda: self.PLA()
self.ops[0x69] = lambda: self.ADC(self.immediate_mode())
self.ops[0x6A] = lambda: self.ROR()
self.ops[0x6C] = lambda: self.JMP(self.indirect_mode())
self.ops[0x6D] = lambda: self.ADC(self.absolute_mode())
self.ops[0x6E] = lambda: self.ROR(self.absolute_mode())
self.ops[0x70] = lambda: self.BVS(self.relative_mode())
self.ops[0x71] = lambda: self.ADC(self.indirect_y_mode())
self.ops[0x75] = lambda: self.ADC(self.zero_page_x_mode())
self.ops[0x76] = lambda: self.ROR(self.zero_page_x_mode())
self.ops[0x78] = lambda: self.SEI()
self.ops[0x79] = lambda: self.ADC(self.absolute_y_mode())
self.ops[0x7D] = lambda: self.ADC(self.absolute_x_mode())
self.ops[0x7E] = lambda: self.ROR(self.absolute_x_mode(rmw=True))
self.ops[0x81] = lambda: self.STA(self.indirect_x_mode())
self.ops[0x84] = lambda: self.STY(self.zero_page_mode())
self.ops[0x85] = lambda: self.STA(self.zero_page_mode())
self.ops[0x86] = lambda: self.STX(self.zero_page_mode())
self.ops[0x88] = lambda: self.DEY()
self.ops[0x8A] = lambda: self.TXA()
self.ops[0x8C] = lambda: self.STY(self.absolute_mode())
self.ops[0x8D] = lambda: self.STA(self.absolute_mode())
self.ops[0x8E] = lambda: self.STX(self.absolute_mode())
self.ops[0x90] = lambda: self.BCC(self.relative_mode())
self.ops[0x91] = lambda: self.STA(self.indirect_y_mode(rmw=True))
self.ops[0x94] = lambda: self.STY(self.zero_page_x_mode())
self.ops[0x95] = lambda: self.STA(self.zero_page_x_mode())
self.ops[0x96] = lambda: self.STX(self.zero_page_y_mode())
self.ops[0x98] = lambda: self.TYA()
self.ops[0x99] = lambda: self.STA(self.absolute_y_mode(rmw=True))
self.ops[0x9A] = lambda: self.TXS()
self.ops[0x9D] = lambda: self.STA(self.absolute_x_mode(rmw=True))
self.ops[0xA0] = lambda: self.LDY(self.immediate_mode())
self.ops[0xA1] = lambda: self.LDA(self.indirect_x_mode())
self.ops[0xA2] = lambda: self.LDX(self.immediate_mode())
self.ops[0xA4] = lambda: self.LDY(self.zero_page_mode())
self.ops[0xA5] = lambda: self.LDA(self.zero_page_mode())
self.ops[0xA6] = lambda: self.LDX(self.zero_page_mode())
self.ops[0xA8] = lambda: self.TAY()
self.ops[0xA9] = lambda: self.LDA(self.immediate_mode())
self.ops[0xAA] = lambda: self.TAX()
self.ops[0xAC] = lambda: self.LDY(self.absolute_mode())
self.ops[0xAD] = lambda: self.LDA(self.absolute_mode())
self.ops[0xAE] = lambda: self.LDX(self.absolute_mode())
self.ops[0xB0] = lambda: self.BCS(self.relative_mode())
self.ops[0xB1] = lambda: self.LDA(self.indirect_y_mode())
self.ops[0xB4] = lambda: self.LDY(self.zero_page_x_mode())
self.ops[0xB5] = lambda: self.LDA(self.zero_page_x_mode())
self.ops[0xB6] = lambda: self.LDX(self.zero_page_y_mode())
self.ops[0xB8] = lambda: self.CLV()
self.ops[0xB9] = lambda: self.LDA(self.absolute_y_mode())
self.ops[0xBA] = lambda: self.TSX()
self.ops[0xBC] = lambda: self.LDY(self.absolute_x_mode())
self.ops[0xBD] = lambda: self.LDA(self.absolute_x_mode())
self.ops[0xBE] = lambda: self.LDX(self.absolute_y_mode())
self.ops[0xC0] = lambda: self.CPY(self.immediate_mode())
self.ops[0xC1] = lambda: self.CMP(self.indirect_x_mode())
self.ops[0xC4] = lambda: self.CPY(self.zero_page_mode())
self.ops[0xC5] = lambda: self.CMP(self.zero_page_mode())
self.ops[0xC6] = lambda: self.DEC(self.zero_page_mode())
self.ops[0xC8] = lambda: self.INY()
self.ops[0xC9] = lambda: self.CMP(self.immediate_mode())
self.ops[0xCA] = lambda: self.DEX()
self.ops[0xCC] = lambda: self.CPY(self.absolute_mode())
self.ops[0xCD] = lambda: self.CMP(self.absolute_mode())
self.ops[0xCE] = lambda: self.DEC(self.absolute_mode())
self.ops[0xD0] = lambda: self.BNE(self.relative_mode())
self.ops[0xD1] = lambda: self.CMP(self.indirect_y_mode())
self.ops[0xD5] = lambda: self.CMP(self.zero_page_x_mode())
self.ops[0xD6] = lambda: self.DEC(self.zero_page_x_mode())
self.ops[0xD8] = lambda: self.CLD()
self.ops[0xD9] = lambda: self.CMP(self.absolute_y_mode())
self.ops[0xDD] = lambda: self.CMP(self.absolute_x_mode())
self.ops[0xDE] = lambda: self.DEC(self.absolute_x_mode(rmw=True))
self.ops[0xE0] = lambda: self.CPX(self.immediate_mode())
self.ops[0xE1] = lambda: self.SBC(self.indirect_x_mode())
self.ops[0xE4] = lambda: self.CPX(self.zero_page_mode())
self.ops[0xE5] = lambda: self.SBC(self.zero_page_mode())
self.ops[0xE6] = lambda: self.INC(self.zero_page_mode())
self.ops[0xE8] = lambda: self.INX()
self.ops[0xE9] = lambda: self.SBC(self.immediate_mode())
self.ops[0xEA] = lambda: self.NOP()
self.ops[0xEC] = lambda: self.CPX(self.absolute_mode())
self.ops[0xED] = lambda: self.SBC(self.absolute_mode())
self.ops[0xEE] = lambda: self.INC(self.absolute_mode())
self.ops[0xF0] = lambda: self.BEQ(self.relative_mode())
self.ops[0xF1] = lambda: self.SBC(self.indirect_y_mode())
self.ops[0xF5] = lambda: self.SBC(self.zero_page_x_mode())
self.ops[0xF6] = lambda: self.INC(self.zero_page_x_mode())
self.ops[0xF8] = lambda: self.SED()
self.ops[0xF9] = lambda: self.SBC(self.absolute_y_mode())
self.ops[0xFD] = lambda: self.SBC(self.absolute_x_mode())
self.ops[0xFE] = lambda: self.INC(self.absolute_x_mode(rmw=True))
def reset(self):
self.program_counter = self.read_word(self.RESET_VECTOR)
def run(self, bus_port):
global bus
bus = socket.socket()
bus.connect(("127.0.0.1", bus_port))
while not self.quit:
timeout = 0
if not self.running:
timeout = 1
# Currently this handler blocks from the moment
# a connection is accepted until the response
# is sent. TODO: use an async HTTP server that
# handles input data asynchronously.
sockets = [self.control_server]
rs, _, _ = select.select(sockets, [], [], timeout)
for s in rs:
if s is self.control_server:
self.control_server._handle_request_noblock()
else:
pass
count = 1000
while count > 0 and self.running:
self.cycles += 2 # all instructions take this as a minimum
op = self.read_pc_byte()
func = self.ops[op]
if func is None:
print "UNKNOWN OP"
print hex(self.program_counter - 1)
print hex(op)
break
else:
self.ops[op]()
count -= 1
def test_run(self, start, end):
self.program_counter = start
while True:
self.cycles += 2 # all instructions take this as a minimum
if self.program_counter == end:
break
op = self.read_pc_byte()
func = self.ops[op]
if func is None:
print "UNKNOWN OP"
print hex(self.program_counter - 1)
print hex(op)
break
else:
self.ops[op]()
####
def get_pc(self, inc=1):
pc = self.program_counter
self.program_counter += inc
return pc
def read_byte(self, address):
return self.memory.read_byte(self.cycles, address)
def read_word(self, address):
return self.memory.read_word(self.cycles, address)
def read_word_bug(self, address):
return self.memory.read_word_bug(self.cycles, address)
def read_pc_byte(self):
return self.read_byte(self.get_pc())
def read_pc_word(self):
return self.read_word(self.get_pc(2))
def write_byte(self, address, value):
self.memory.write_byte(self.cycles, address, value)
####
def status_from_byte(self, status):
self.carry_flag = [0, 1][0 != status & 1]
self.zero_flag = [0, 1][0 != status & 2]
self.interrupt_disable_flag = [0, 1][0 != status & 4]
self.decimal_mode_flag = [0, 1][0 != status & 8]
self.break_flag = [0, 1][0 != status & 16]
self.overflow_flag = [0, 1][0 != status & 64]
self.sign_flag = [0, 1][0 != status & 128]
def status_as_byte(self):
return self.carry_flag | self.zero_flag << 1 | self.interrupt_disable_flag << 2 | self.decimal_mode_flag << 3 | self.break_flag << 4 | 1 << 5 | self.overflow_flag << 6 | self.sign_flag << 7
####
def push_byte(self, byte):
self.write_byte(self.STACK_PAGE + self.stack_pointer, byte)
self.stack_pointer = (self.stack_pointer - 1) % 0x100
def pull_byte(self):
self.stack_pointer = (self.stack_pointer + 1) % 0x100
return self.read_byte(self.STACK_PAGE + self.stack_pointer)
def push_word(self, word):
hi, lo = divmod(word, 0x100)
self.push_byte(hi)
self.push_byte(lo)
def pull_word(self):
s = self.STACK_PAGE + self.stack_pointer + 1
self.stack_pointer += 2
return self.read_word(s)
####
def immediate_mode(self):
return self.get_pc()
def absolute_mode(self):
self.cycles += 2
return self.read_pc_word()
def absolute_x_mode(self, rmw=False):
if rmw:
self.cycles += 1
return self.absolute_mode() + self.x_index
def absolute_y_mode(self, rmw=False):
if rmw:
self.cycles += 1
return self.absolute_mode() + self.y_index
def zero_page_mode(self):
self.cycles += 1
return self.read_pc_byte()
def zero_page_x_mode(self):
self.cycles += 1
return (self.zero_page_mode() + self.x_index) % 0x100
def zero_page_y_mode(self):
self.cycles += 1
return (self.zero_page_mode() + self.y_index) % 0x100
def indirect_mode(self):
self.cycles += 2
return self.read_word_bug(self.absolute_mode())
def indirect_x_mode(self):
self.cycles += 4
return self.read_word_bug((self.read_pc_byte() + self.x_index) % 0x100)
def indirect_y_mode(self, rmw=False):
if rmw:
self.cycles += 4
else:
self.cycles += 3
return self.read_word_bug(self.read_pc_byte()) + self.y_index
def relative_mode(self):
pc = self.get_pc()
return pc + 1 + signed(self.read_byte(pc))
####
def update_nz(self, value):
value = value % 0x100
self.zero_flag = [0, 1][(value == 0)]
self.sign_flag = [0, 1][((value & 0x80) != 0)]
return value
def update_nzc(self, value):
self.carry_flag = [0, 1][(value > 0xFF)]
return self.update_nz(value)
####
# LOAD / STORE
def LDA(self, operand_address):
self.accumulator = self.update_nz(self.read_byte(operand_address))
def LDX(self, operand_address):
self.x_index = self.update_nz(self.read_byte(operand_address))
def LDY(self, operand_address):
self.y_index = self.update_nz(self.read_byte(operand_address))
def STA(self, operand_address):
self.write_byte(operand_address, self.accumulator)
def STX(self, operand_address):
self.write_byte(operand_address, self.x_index)
def STY(self, operand_address):
self.write_byte(operand_address, self.y_index)
# TRANSFER
def TAX(self):
self.x_index = self.update_nz(self.accumulator)
def TXA(self):
self.accumulator = self.update_nz(self.x_index)
def TAY(self):
self.y_index = self.update_nz(self.accumulator)
def TYA(self):
self.accumulator = self.update_nz(self.y_index)
def TSX(self):
self.x_index = self.update_nz(self.stack_pointer)
def TXS(self):
self.stack_pointer = self.x_index
# SHIFTS / ROTATES
def ASL(self, operand_address=None):
if operand_address is None:
self.accumulator = self.update_nzc(self.accumulator << 1)
else:
self.cycles += 2
self.write_byte(operand_address, self.update_nzc(self.read_byte(operand_address) << 1))
def ROL(self, operand_address=None):
if operand_address is None:
a = self.accumulator << 1
if self.carry_flag:
a = a | 0x01
self.accumulator = self.update_nzc(a)
else:
self.cycles += 2
m = self.read_byte(operand_address) << 1
if self.carry_flag:
m = m | 0x01
self.write_byte(operand_address, self.update_nzc(m))
def ROR(self, operand_address=None):
if operand_address is None:
if self.carry_flag:
self.accumulator = self.accumulator | 0x100
self.carry_flag = self.accumulator % 2
self.accumulator = self.update_nz(self.accumulator >> 1)
else:
self.cycles += 2
m = self.read_byte(operand_address)
if self.carry_flag:
m = m | 0x100
self.carry_flag = m % 2
self.write_byte(operand_address, self.update_nz(m >> 1))
def LSR(self, operand_address=None):
if operand_address is None:
self.carry_flag = self.accumulator % 2
self.accumulator = self.update_nz(self.accumulator >> 1)
else:
self.cycles += 2
self.carry_flag = self.read_byte(operand_address) % 2
self.write_byte(operand_address, self.update_nz(self.read_byte(operand_address) >> 1))
# JUMPS / RETURNS
def JMP(self, operand_address):
self.cycles -= 1
self.program_counter = operand_address
def JSR(self, operand_address):
self.cycles += 2
self.push_word(self.program_counter - 1)
self.program_counter = operand_address
def RTS(self):
self.cycles += 4
self.program_counter = self.pull_word() + 1
# BRANCHES
def BCC(self, operand_address):
if not self.carry_flag:
self.cycles += 1
self.program_counter = operand_address
def BCS(self, operand_address):
if self.carry_flag:
self.cycles += 1
self.program_counter = operand_address
def BEQ(self, operand_address):
if self.zero_flag:
self.cycles += 1
self.program_counter = operand_address
def BNE(self, operand_address):
if not self.zero_flag:
self.cycles += 1
self.program_counter = operand_address
def BMI(self, operand_address):
if self.sign_flag:
self.cycles += 1
self.program_counter = operand_address
def BPL(self, operand_address):
if not self.sign_flag:
self.cycles += 1
self.program_counter = operand_address
def BVC(self, operand_address):
if not self.overflow_flag:
self.cycles += 1
self.program_counter = operand_address
def BVS(self, operand_address):
if self.overflow_flag:
self.cycles += 1
self.program_counter = operand_address
# SET / CLEAR FLAGS
def CLC(self):
self.carry_flag = 0
def CLD(self):
self.decimal_mode_flag = 0
def CLI(self):
self.interrupt_disable_flag = 0
def CLV(self):
self.overflow_flag = 0
def SEC(self):
self.carry_flag = 1
def SED(self):
self.decimal_mode_flag = 1
def SEI(self):
self.interrupt_disable_flag = 1
# INCREMENT / DECREMENT
def DEC(self, operand_address):
self.cycles += 2
self.write_byte(operand_address, self.update_nz(self.read_byte(operand_address) - 1))
def DEX(self):
self.x_index = self.update_nz(self.x_index - 1)
def DEY(self):
self.y_index = self.update_nz(self.y_index - 1)
def INC(self, operand_address):
self.cycles += 2
self.write_byte(operand_address, self.update_nz(self.read_byte(operand_address) + 1))
def INX(self):
self.x_index = self.update_nz(self.x_index + 1)
def INY(self):
self.y_index = self.update_nz(self.y_index + 1)
# PUSH / PULL
def PHA(self):
self.cycles += 1
self.push_byte(self.accumulator)
def PHP(self):
self.cycles += 1
self.push_byte(self.status_as_byte())
def PLA(self):
self.cycles += 2
self.accumulator = self.update_nz(self.pull_byte())
def PLP(self):
self.cycles += 2
self.status_from_byte(self.pull_byte())
# LOGIC
def AND(self, operand_address):
self.accumulator = self.update_nz(self.accumulator & self.read_byte(operand_address))
def ORA(self, operand_address):
self.accumulator = self.update_nz(self.accumulator | self.read_byte(operand_address))
def EOR(self, operand_address):
self.accumulator = self.update_nz(self.accumulator ^ self.read_byte(operand_address))
# ARITHMETIC
def ADC(self, operand_address):
# @@@ doesn't handle BCD yet
assert not self.decimal_mode_flag
a2 = self.accumulator
a1 = signed(a2)
m2 = self.read_byte(operand_address)
m1 = signed(m2)
# twos complement addition
result1 = a1 + m1 + self.carry_flag
# unsigned addition
result2 = a2 + m2 + self.carry_flag
self.accumulator = self.update_nzc(result2)
# perhaps this could be calculated from result2 but result1 is more intuitive
self.overflow_flag = [0, 1][(result1 > 127) | (result1 < -128)]
def SBC(self, operand_address):
# @@@ doesn't handle BCD yet
assert not self.decimal_mode_flag
a2 = self.accumulator
a1 = signed(a2)
m2 = self.read_byte(operand_address)
m1 = signed(m2)
# twos complement subtraction
result1 = a1 - m1 - [1, 0][self.carry_flag]
# unsigned subtraction
result2 = a2 - m2 - [1, 0][self.carry_flag]
self.accumulator = self.update_nz(result2)
self.carry_flag = [0, 1][(result2 >= 0)]
# perhaps this could be calculated from result2 but result1 is more intuitive
self.overflow_flag = [0, 1][(result1 > 127) | (result1 < -128)]
# BIT
def BIT(self, operand_address):
value = self.read_byte(operand_address)
self.sign_flag = ((value >> 7) % 2) # bit 7
self.overflow_flag = ((value >> 6) % 2) # bit 6
self.zero_flag = [0, 1][((self.accumulator & value) == 0)]
# COMPARISON
def CMP(self, operand_address):
result = self.accumulator - self.read_byte(operand_address)
self.carry_flag = [0, 1][(result >= 0)]
self.update_nz(result)
def CPX(self, operand_address):
result = self.x_index - self.read_byte(operand_address)
self.carry_flag = [0, 1][(result >= 0)]
self.update_nz(result)
def CPY(self, operand_address):
result = self.y_index - self.read_byte(operand_address)
self.carry_flag = [0, 1][(result >= 0)]
self.update_nz(result)
# SYSTEM
def NOP(self):
pass
def BRK(self):
self.cycles += 5
self.push_word(self.program_counter + 1)
self.push_byte(self.status_as_byte())
self.program_counter = self.read_word(0xFFFE)
self.break_flag = 1
def RTI(self):
self.cycles += 4
self.status_from_byte(self.pull_byte())
self.program_counter = self.pull_word()
# @@@ IRQ
# @@@ NMI
def usage():
print >>sys.stderr, "ApplePy - an Apple ][ emulator in Python"
print >>sys.stderr, "James Tauber / http://jtauber.com/"
print >>sys.stderr
print >>sys.stderr, "Usage: cpu6502.py [options]"
print >>sys.stderr
print >>sys.stderr, " -b, --bus Bus port number"
print >>sys.stderr, " -p, --pc Initial PC value"
print >>sys.stderr, " -R, --rom ROM file to use (default A2ROM.BIN)"
print >>sys.stderr, " -r, --ram RAM file to load (default none)"
sys.exit(1)
def get_options():
class Options:
def __init__(self):
self.rom = "A2ROM.BIN"
self.ram = None
self.bus = None
self.pc = None
options = Options()
a = 1
while a < len(sys.argv):
if sys.argv[a].startswith("-"):
if sys.argv[a] in ("-b", "--bus"):
a += 1
options.bus = int(sys.argv[a])
elif sys.argv[a] in ("-p", "--pc"):
a += 1
options.pc = int(sys.argv[a])
elif sys.argv[a] in ("-R", "--rom"):
a += 1
options.rom = sys.argv[a]
elif sys.argv[a] in ("-r", "--ram"):
a += 1
options.ram = sys.argv[a]
else:
usage()
else:
usage()
a += 1
return options
if __name__ == "__main__":
options = get_options()
if options.bus is None:
print "ApplePy cpu core"
print "Run applepy.py instead"
sys.exit(0)
mem = Memory(options)
cpu = CPU(options, mem)
cpu.run(options.bus)
| {
"repo_name": "jtauber/applepy",
"path": "cpu6502.py",
"copies": "1",
"size": "43783",
"license": "mit",
"hash": 8670298023766364000,
"line_mean": 34.6248982913,
"line_max": 197,
"alpha_frac": 0.5504191124,
"autogenerated": false,
"ratio": 2.993095433415368,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4043514545815368,
"avg_score": null,
"num_lines": null
} |
# ApplePy - an Apple ][ emulator in Python
# James Tauber / http://jtauber.com/
# originally written 2001, updated 2011
import curses
import socket
import struct
import subprocess
import sys
kbd = 0
def write_screen(win, address, value):
base = address - 0x400
hi, lo = divmod(base, 0x80)
row_group, column = divmod(lo, 0x28)
row = hi + 8 * row_group
# skip if writing to row group 3
if row_group == 3:
return
c = chr(0x20 + ((value + 0x20) % 0x40))
if value < 0x40:
attr = curses.A_DIM
elif value < 0x80:
attr = curses.A_REVERSE
elif value < 0xA0:
attr = curses.A_UNDERLINE
else:
attr = curses.A_DIM
try:
win.addch(row, column, c, attr)
except curses.error:
pass
def read(addr, val):
global kbd
if addr == 0xC000:
return kbd
elif addr == 0xC010:
kbd = kbd & 0x7F
return 0x00
def write(win, addr, val):
if 0x400 <= addr <= 0x800:
write_screen(win, addr, val)
def run(win):
global kbd
listener = socket.socket()
listener.bind(("127.0.0.1", 0))
listener.listen(0)
args = [
sys.executable,
"cpu6502.py",
"--bus", str(listener.getsockname()[1]),
"--rom", options.rom,
]
subprocess.Popen(args)
cpu, _ = listener.accept()
win.clear()
curses.noecho()
win.nodelay(True)
while True:
op = cpu.recv(8)
cycle, rw, addr, val = struct.unpack("<IBHB", op)
if rw == 0:
cpu.send(chr(read(addr, val)))
elif rw == 1:
write(win, addr, val)
else:
break
try:
key = ord(win.getkey())
if key == 0xA:
key = 0xD
elif key == 0x7F:
key = 0x8
# win.addstr(15, 50, hex(key))
kbd = 0x80 | key
except curses.error:
pass
except TypeError:
pass
def usage():
print >>sys.stderr, "ApplePy - an Apple ][ emulator in Python"
print >>sys.stderr, "James Tauber / http://jtauber.com/"
print >>sys.stderr
print >>sys.stderr, "Usage: applepy_curses.py [options]"
print >>sys.stderr
print >>sys.stderr, " -R, --rom ROM file to use (default A2ROM.BIN)"
sys.exit(1)
def get_options():
class Options:
def __init__(self):
self.rom = "A2ROM.BIN"
options = Options()
a = 1
while a < len(sys.argv):
if sys.argv[a].startswith("-"):
if sys.argv[a] in ("-R", "--rom"):
a += 1
options.rom = sys.argv[a]
else:
usage()
else:
usage()
a += 1
return options
if __name__ == "__main__":
options = get_options()
curses.wrapper(run)
| {
"repo_name": "jtauber/applepy",
"path": "applepy_curses.py",
"copies": "1",
"size": "2837",
"license": "mit",
"hash": -5945539996763775000,
"line_mean": 20.3308270677,
"line_max": 80,
"alpha_frac": 0.5160380684,
"autogenerated": false,
"ratio": 3.4057623049219687,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9421336250050827,
"avg_score": 0.000092824654228163,
"num_lines": 133
} |
""" applescript -- Easy-to-use Python wrapper for NSAppleScript """
import sys
from Foundation import NSAppleScript, NSAppleEventDescriptor, NSURL, \
NSAppleScriptErrorMessage, NSAppleScriptErrorBriefMessage, \
NSAppleScriptErrorNumber, NSAppleScriptErrorAppName, NSAppleScriptErrorRange
from .aecodecs import Codecs, fourcharcode, AEType, AEEnum
from . import kae
__all__ = ['AppleScript', 'ScriptError', 'AEType', 'AEEnum', 'kMissingValue', 'kae']
######################################################################
class AppleScript:
""" Represents a compiled AppleScript. The script object is persistent; its handlers may be called multiple times and its top-level properties will retain current state until the script object's disposal.
"""
_codecs = Codecs()
def __init__(self, source=None, path=None):
"""
source : str | None -- AppleScript source code
path : str | None -- full path to .scpt/.applescript file
Notes:
- Either the path or the source argument must be provided.
- If the script cannot be read/compiled, a ScriptError is raised.
"""
if path:
url = NSURL.URLWithFilePath_(path)
self._script, errorinfo = NSAppleScript.alloc().initWithContentsOfURL_error_(url, None)
if errorinfo:
raise ScriptError(errorinfo)
elif source:
self._script = NSAppleScript.alloc().initWithSource_(source)
else:
raise ValueError("Missing source or path argument.")
if not self._script.isCompiled():
errorinfo = self._script.compileAndReturnError_(None)[1]
if errorinfo:
raise ScriptError(errorinfo)
def __repr__(self):
s = self.source
return 'AppleScript({})'.format(repr(s) if len(s) < 100 else '{}...{}'.format(repr(s)[:80], repr(s)[-17:]))
##
def _newevent(self, suite, code, args):
evt = NSAppleEventDescriptor.appleEventWithEventClass_eventID_targetDescriptor_returnID_transactionID_(
fourcharcode(suite), fourcharcode(code), NSAppleEventDescriptor.nullDescriptor(), 0, 0)
evt.setDescriptor_forKeyword_(self._codecs.pack(args), fourcharcode(kae.keyDirectObject))
return evt
def _unpackresult(self, result, errorinfo):
if not result:
raise ScriptError(errorinfo)
return self._codecs.unpack(result)
##
source = property(lambda self: str(self._script.source()), doc="str -- the script's source code")
def run(self, *args):
""" Run the script, optionally passing arguments to its run handler.
args : anything -- arguments to pass to script, if any; see also supported type mappings documentation
Result : anything | None -- the script's return value, if any
Notes:
- The run handler must be explicitly declared in order to pass arguments.
- AppleScript will ignore excess arguments. Passing insufficient arguments will result in an error.
- If execution fails, a ScriptError is raised.
"""
if args:
evt = self._newevent(kae.kCoreEventClass, kae.kAEOpenApplication, args)
return self._unpackresult(*self._script.executeAppleEvent_error_(evt, None))
else:
return self._unpackresult(*self._script.executeAndReturnError_(None))
def call(self, name, *args):
""" Call the specified user-defined handler.
name : str -- the handler's name (case-sensitive)
args : anything -- arguments to pass to script, if any; see documentation for supported types
Result : anything | None -- the script's return value, if any
Notes:
- The handler's name must be a user-defined identifier, not an AppleScript keyword; e.g. 'myCount' is acceptable; 'count' is not.
- AppleScript will ignore excess arguments. Passing insufficient arguments will result in an error.
- If execution fails, a ScriptError is raised.
"""
evt = self._newevent(kae.kASAppleScriptSuite, kae.kASPrepositionalSubroutine, args)
evt.setDescriptor_forKeyword_(NSAppleEventDescriptor.descriptorWithString_(name),
fourcharcode(kae.keyASSubroutineName))
return self._unpackresult(*self._script.executeAppleEvent_error_(evt, None))
##
class ScriptError(Exception):
""" Indicates an AppleScript compilation/execution error. """
def __init__(self, errorinfo):
self._errorinfo = dict(errorinfo)
def __repr__(self):
return 'ScriptError({})'.format(self._errorinfo)
@property
def message(self):
""" str -- the error message """
msg = self._errorinfo.get(NSAppleScriptErrorMessage)
if not msg:
msg = self._errorinfo.get(NSAppleScriptErrorBriefMessage, 'Script Error')
return msg
number = property(lambda self: self._errorinfo.get(NSAppleScriptErrorNumber),
doc="int | None -- the error number, if given")
appname = property(lambda self: self._errorinfo.get(NSAppleScriptErrorAppName),
doc="str | None -- the name of the application that reported the error, where relevant")
@property
def range(self):
""" (int, int) -- the start and end points (1-indexed) within the source code where the error occurred """
range = self._errorinfo.get(NSAppleScriptErrorRange)
if range:
start = range.rangeValue().location
end = start + range.rangeValue().length
return (start, end)
else:
return None
def __str__(self):
msg = self.message
for s, v in [(' ({})', self.number), (' app={!r}', self.appname), (' range={0[0]}-{0[1]}', self.range)]:
if v is not None:
msg += s.format(v)
return msg.encode('ascii', 'replace') if sys.version_info.major < 3 else msg # 2.7 compatibility
##
kMissingValue = AEType(kae.cMissingValue) # convenience constant
| {
"repo_name": "encima/NeuroSocket",
"path": "libs/py-applescript-1.0.0/applescript/__init__.py",
"copies": "1",
"size": "5510",
"license": "mit",
"hash": -1070258348018598500,
"line_mean": 33.012345679,
"line_max": 205,
"alpha_frac": 0.700907441,
"autogenerated": false,
"ratio": 3.5825747724317294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4783482213431729,
"avg_score": null,
"num_lines": null
} |
# applesingle - a module to decode AppleSingle files
import struct
import MacOS
import sys
Error="applesingle.Error"
verbose=0
# File header format: magic, version, unused, number of entries
AS_HEADER_FORMAT="ll16sh"
AS_HEADER_LENGTH=26
# The flag words for AppleSingle
AS_MAGIC=0x00051600
AS_VERSION=0x00020000
# Entry header format: id, offset, length
AS_ENTRY_FORMAT="lll"
AS_ENTRY_LENGTH=12
# The id values
AS_DATAFORK=1
AS_RESOURCEFORK=2
AS_IGNORE=(3,4,5,6,8,9,10,11,12,13,14,15)
def decode(input, output, resonly=0):
if type(input) == type(''):
input = open(input, 'rb')
# Should we also test for FSSpecs or FSRefs?
header = input.read(AS_HEADER_LENGTH)
try:
magic, version, dummy, nentry = struct.unpack(AS_HEADER_FORMAT, header)
except ValueError, arg:
raise Error, "Unpack header error: %s"%arg
if verbose:
print 'Magic: 0x%8.8x'%magic
print 'Version: 0x%8.8x'%version
print 'Entries: %d'%nentry
if magic != AS_MAGIC:
raise Error, 'Unknown AppleSingle magic number 0x%8.8x'%magic
if version != AS_VERSION:
raise Error, 'Unknown AppleSingle version number 0x%8.8x'%version
if nentry <= 0:
raise Error, "AppleSingle file contains no forks"
headers = [input.read(AS_ENTRY_LENGTH) for i in range(nentry)]
didwork = 0
for hdr in headers:
try:
id, offset, length = struct.unpack(AS_ENTRY_FORMAT, hdr)
except ValueError, arg:
raise Error, "Unpack entry error: %s"%arg
if verbose:
print 'Fork %d, offset %d, length %d'%(id, offset, length)
input.seek(offset)
if length == 0:
data = ''
else:
data = input.read(length)
if len(data) != length:
raise Error, 'Short read: expected %d bytes got %d'%(length, len(data))
if id == AS_DATAFORK:
if verbose:
print ' (data fork)'
if not resonly:
didwork = 1
fp = open(output, 'wb')
fp.write(data)
fp.close()
elif id == AS_RESOURCEFORK:
didwork = 1
if verbose:
print ' (resource fork)'
if resonly:
fp = open(output, 'wb')
else:
fp = MacOS.openrf(output, 'wb')
fp.write(data)
fp.close()
elif id in AS_IGNORE:
if verbose:
print ' (ignored)'
else:
raise Error, 'Unknown fork type %d'%id
if not didwork:
raise Error, 'No useful forks found'
def _test():
if len(sys.argv) < 3 or sys.argv[1] == '-r' and len(sys.argv) != 4:
print 'Usage: applesingle.py [-r] applesinglefile decodedfile'
sys.exit(1)
if sys.argv[1] == '-r':
resonly = 1
del sys.argv[1]
else:
resonly = 0
decode(sys.argv[1], sys.argv[2], resonly=resonly)
if __name__ == '__main__':
_test()
| {
"repo_name": "MalloyPower/parsing-python",
"path": "front-end/testsuite-python-lib/Python-2.3/Lib/plat-mac/applesingle.py",
"copies": "1",
"size": "3023",
"license": "mit",
"hash": 8450934487276164000,
"line_mean": 29.24,
"line_max": 83,
"alpha_frac": 0.5616936818,
"autogenerated": false,
"ratio": 3.466743119266055,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9426255800041041,
"avg_score": 0.02043620020500259,
"num_lines": 100
} |
# Applesoft BASIC de-tokenizer
# James Tauber / jtauber.com
import sys
def token(d):
return {
0x80: "END",
0x81: "FOR",
0x82: "NEXT",
0x83: "DATA",
0x84: "INPUT",
0x85: "DEL",
0x86: "DIM",
0x87: "READ",
0x88: "GR",
0x89: "TEXT",
0x8A: "PR #",
0x8B: "IN #",
0x8C: "CALL",
0x8D: "PLOT",
0x8E: "HLIN",
0x8F: "VLIN",
0x90: "HGR2",
0x91: "HGR",
0x92: "HCOLOR=",
0x93: "HPLOT",
0x94: "DRAW",
0x95: "XDRAW",
0x96: "HTAB",
0x97: "HOME",
0x98: "ROT=",
0x99: "SCALE=",
0x9A: "SHLOAD",
0x9B: "TRACE",
0x9C: "NOTRACE",
0x9D: "NORMAL",
0x9E: "INVERSE",
0x9F: "FLASH",
0xA0: "COLOR=",
0xA1: "POP",
0xA2: "VTAB",
0xA3: "HIMEM:",
0xA4: "LOMEM:",
0xA5: "ONERR",
0xA6: "RESUME",
0xA7: "RECALL",
0xA8: "STORE",
0xA9: "SPEED=",
0xAA: "LET",
0xAB: "GOTO",
0xAC: "RUN",
0xAD: "IF",
0xAE: "RESTORE",
0xAF: "&",
0xB0: "GOSUB",
0xB1: "RETURN",
0xB2: "REM",
0xB3: "STOP",
0xB4: "ON",
0xB5: "WAIT",
0xB6: "LOAD",
0xB7: "SAVE",
0xB8: "DEF FN",
0xB9: "POKE",
0xBA: "PRINT",
0xBB: "CONT",
0xBC: "LIST",
0xBD: "CLEAR",
0xBE: "GET",
0xBF: "NEW",
0xC0: "TAB",
0xC1: "TO",
0xC2: "FN",
0xC3: "SPC(",
0xC4: "THEN",
0xC5: "AT",
0xC6: "NOT",
0xC7: "STEP",
0xC8: "+",
0xC9: "-",
0xCA: "*",
0xCB: "/",
0xCC: ";",
0xCD: "AND",
0xCE: "OR",
0xCF: ">",
0xD0: "=",
0xD1: "<",
0xD2: "SGN",
0xD3: "INT",
0xD4: "ABS",
0xD5: "USR",
0xD6: "FRE",
0xD7: "SCRN (",
0xD8: "PDL",
0xD9: "POS",
0xDA: "SQR",
0xDB: "RND",
0xDC: "LOG",
0xDD: "EXP",
0xDE: "COS",
0xDF: "SIN",
0xE0: "TAN",
0xE1: "ATN",
0xE2: "PEEK",
0xE3: "LEN",
0xE4: "STR$",
0xE5: "VAL",
0xE6: "ASC",
0xE7: "CHR$",
0xE8: "LEFT$",
0xE9: "RIGHT$",
0xEA: "MID$",
}[d]
class Detokenize:
def __init__(self, data):
self.data = data
self.index = 0
def read_byte(self):
d = self.data[self.index]
self.index += 1
return d
def read_word(self):
return self.read_byte() + 0x100 * self.read_byte()
def detokenize(self):
length = self.read_word()
while self.index < length:
self.read_word()
line_number = self.read_word()
sys.stdout.write("{} ".format(line_number))
while True:
d = self.read_byte()
if d == 0x00:
print()
break
elif d <= 0x7F:
sys.stdout.write(chr(d))
else:
sys.stdout.write(" {} ".format(token(d)))
class ApplesoftHandler:
def __init__(self):
self.data = []
def __enter__(self):
return self.receive_sector_data
def __exit__(self, exc_type, exc_value, traceback):
Detokenize(self.data).detokenize()
def receive_sector_data(self, sector_data):
self.data += sector_data
| {
"repo_name": "jtauber/a2disk",
"path": "applesoft.py",
"copies": "1",
"size": "3580",
"license": "mit",
"hash": -7841984743241998000,
"line_mean": 20.696969697,
"line_max": 61,
"alpha_frac": 0.4008379888,
"autogenerated": false,
"ratio": 2.8940986257073567,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37949366145073565,
"avg_score": null,
"num_lines": null
} |
"""Applet for setting ip options: tcp windows, tcp quickack
Usage:
exec 202-ip-options [host] [initcwnd:[number]] [initrwnd:[number]]
[quickack:[on|off]] [min-rto:[time]]
Check Success:
ip r s
ss -i
Examples:
- exec-applet 202-ip-options alpha initcwnd:12 quickack:on
- exec-applet 202-ip-options beta initrwnd:4
- exec-applet 202-ip-options beta initcwnd:20 initrwnd:20 quickack:on
- exec-applet 202-ip-options alpha min-rto:50ms
Hints:
- [time] can be in sec and ms e.g. (1s, 200ms, 0.5ms -> 1ms)
- min-rto adjustments do not affect ongoing open connections
- if ip options are missing, they probably should be added here
- restoration is covered by 009-network or by setting the device down
"""
def print_usage(x):
x.p.msg("\n 202-ip-options:\n", False)
x.p.msg(" applet for setting ip options: tcp windows, tcp quickack, min "
"rto\n", False)
x.p.msg("\n usage:\n", False)
x.p.msg(" - exec 202-ip-options [host] [initcwnd:[number]] "
"[initrwnd:[number]] [quickack:[on|off]] [min-rto:[time]]\n",
False)
x.p.msg("\n check success:\n", False)
x.p.msg(" - ip r s\n", False)
x.p.msg(" - ss -i\n", False)
x.p.msg("\n examples:\n", False)
x.p.msg(" - exec-applet 202-ip-options alpha initcwnd:12 initrwnd:12 "
"quickack:on\n", False)
x.p.msg(" - exec-applet 202-ip-options alpha min-rto:50ms\n", False)
x.p.msg(" - exec-applet 202-ip-options beta quickack:on initcwnd:12\n\n",
False)
x.p.msg("\n hints:\n", False)
x.p.msg(" - [time] can be in sec and milli-sec e.g. (1s, 200ms, 0.5ms -> "
"1ms)"
"\n\n", False)
def print_wrong_usage(x):
x.p.err("error: wrong usage\n")
x.p.err("use: [host] [initcwnd:[number]] [initrwnd:[number]] "
"[quickack:[on|off]] [min-rto:[time]]\n")
def set_ip_options(x, dic, options):
cmd = "ip route change default via {} proto static dev {}{}".format(
dic["default_route_data"], dic["interface_data"], options)
exit_code = x.ssh.exec(dic["ip_control"], dic["user"], cmd)
if exit_code != 0:
x.p.err("error: ip option(s) could not be set\n")
x.p.err("failed cmd: {}\n".format(cmd))
return False
return True
def main(x, conf, args):
if "?" in args:
print_usage(x)
return False
if not len(args) >= 2:
print_wrong_usage(x)
return False
# arguments dictionary
dic = dict()
options = str()
try:
dic["host_name"] = args[0]
args.remove(dic["host_name"])
for argument in args:
if argument.split(":")[0] == "initcwnd":
options += " initcwnd {}".format(argument.split(":")[1])
elif argument.split(":")[0] == "initrwnd":
options += " initrwnd {}".format(argument.split(":")[1])
elif argument.split(":")[0] == "min-rto":
value = argument.split(":")[1]
if not value.endswith("ms") and not value.endswith("s"):
print_wrong_usage(x)
return False
options += " rto_min {}".format(value)
elif argument.split(":")[0] == "quickack":
if argument.split(":")[1] == "on":
options += " quickack 1"
elif argument.split(":")[1] == "off":
options += " quickack 0"
else:
print_wrong_usage(x)
return False
else:
print_wrong_usage(x)
return False
except IndexError:
print_wrong_usage(x)
return False
# retrieve host information from conf
dic["user"] = conf.get_user(dic["host_name"])
dic["ip_control"] = conf.get_control_ip(dic["host_name"])
dic["default_route_data"] = conf.get_data_default_route(dic["host_name"])
dic["interface_data"] = conf.get_data_iface_name(dic["host_name"])
# set ip options
if not set_ip_options(x, dic, options):
return False
return True
| {
"repo_name": "hgn/net-applet-shuffler",
"path": "applets/202-ip-options/applet.py",
"copies": "2",
"size": "4056",
"license": "mit",
"hash": 250550157202978000,
"line_mean": 35.2142857143,
"line_max": 78,
"alpha_frac": 0.5613905325,
"autogenerated": false,
"ratio": 3.25,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9811390532500001,
"avg_score": 0,
"num_lines": 112
} |
"""Applet for setting tcp sysctl window memory values
Usage:
exec 205-tcp-window-mem [host]
[rmem-max:[byte]]
[rmem-default:[byte]]
[wmem-max:[byte]]
[wmem-default:[byte]]
[window-scaling:[on|off]]
[tcp-rmem-min:[byte]]
[tcp-rmem-default:[byte]]
[tcp-rmem-max:[byte]]
[tcp-wmem-min:[byte]]
[tcp-wmem-default:[byte]]
[tcp-wmem-max:[byte]]
Check Success:
sysctl [/net/core/[option]|/net/ipv4/[option]]
Examples:
- exec-applet 205-tcp-window-mem alpha window-scaling:on rmem-max:12582912
rmem-default:12582912
- exec-applet 205-tcp-window-mem beta window_scaling:on wmem-max:12582912
wmem-default:12582912 tcp-rmem-min:12582912 tcp-rmem-default:12582912
tcp-rmem-max:12582912
Hints:
- restoration is covered by restoring sysctls, e.g. 003-restore-sysctls or a
reboot
"""
def print_usage(x):
x.p.msg("\n 205-tcp-window-mem:\n", False)
x.p.msg(" applet for setting tcp window memory values\n", False)
x.p.msg("\n usage:\n", False)
x.p.msg(" - exec 205-tcp-window-mem [host] "
"[rmem-max:[byte]] "
"[rmem-default:[byte]] "
"[wmem-max:[byte]] "
"[wmem-default:[byte]] "
"[window-scaling:[on|off]] "
"[tcp-rmem-min:[byte]] "
"[tcp-rmem-default:[byte]] "
"[tcp-rmem-max:[byte]] "
"[tcp-wmem-min:[byte]] "
"[tcp-wmem-default:[byte]] "
"[tcp-wmem-max:[byte]]\n", False)
x.p.msg("\n check success:\n", False)
x.p.msg(" - sysctl [/net/core/[option]|/net/ipv4/[option]]\n", False)
x.p.msg("\n examples:\n", False)
x.p.msg(" - exec-applet 205-tcp-window-mem alpha window-scaling:on "
"rmem-max:12582912 rmem-default:12582912\n", False)
x.p.msg(" - exec-applet 205-tcp-window-mem beta window-scaling:on "
"wmem-max:12582912 wmem-default:12582912 tcp-rmem-min:12582912 "
"tcp-rmem-default:12582912 tcp-rmem-max:12582912\n", False)
x.p.msg("\n hints:\n", False)
x.p.msg(" - restoration is covered by restoring sysctls, e.g. "
"003-restore-sysctls or a reboot\n\n", False)
def print_wrong_usage(x):
x.p.err("error: wrong usage\n")
x.p.err("use: [host] "
"[rmem-max:[byte]] "
"[rmem-default:[byte]] "
"[wmem-max:[byte]] "
"[wmem-default:[byte]] "
"[window-scaling:[on|off]] "
"[tcp-rmem-min:[byte]] "
"[tcp-rmem-default:[byte]] "
"[tcp-rmem-max:[byte]] "
"[tcp-wmem-min:[byte]] "
"[tcp-wmem-default:[byte]] "
"[tcp-wmem-max:[byte]]\n")
def construct_tcp_mem_string(x, dic, tcp_mem_str, new_value_dict):
cmd = "sysctl /net/ipv4/{}".format(tcp_mem_str)
stdout, _, exit_code = x.ssh.exec_verbose(dic["ip_control"],
dic["user"],
cmd)
if exit_code != 0:
x.p.err("error: tcp window values could not retrieved from "
"{}\n".format(dic["host_name"]))
x.p.err("failed cmd: {}\n".format(cmd))
return False
# construct dict with read values
constructed_dict = dict()
stdout_decoded = stdout.decode("utf-8")
string_value_side = stdout_decoded.split("=")[1]
constructed_dict["0"] = string_value_side.split()[0]
constructed_dict["1"] = string_value_side.split()[1]
constructed_dict["2"] = string_value_side.split()[2]
# now override with new values
mem_str = str()
for number in range(0, 3):
if str(number) in new_value_dict:
mem_str += new_value_dict[str(number)] + " "
else:
mem_str += constructed_dict[str(number)] + " "
mem_str = mem_str.strip()
return mem_str
def set_options(x, dic, options_dict, tcp_rmem_dict, tcp_wmem_dict):
for option in options_dict:
cmd = "sysctl -w {}=\"{}\"".format(option, options_dict[option])
exit_code = x.ssh.exec(dic["ip_control"], dic["user"], cmd)
if exit_code != 0:
x.p.err("error: tcp window value could not be set\n")
x.p.err("failed cmd: {}\n".format(cmd))
return False
if tcp_rmem_dict:
value = construct_tcp_mem_string(x, dic, "tcp_rmem", tcp_rmem_dict)
cmd = "sysctl -w /net/ipv4/tcp_rmem=\"{}\"".format(value)
exit_code = x.ssh.exec(dic["ip_control"], dic["user"], cmd)
if exit_code != 0:
x.p.err("error: tcp rmem value could not be set\n")
x.p.err("failed cmd: {}\n".format(cmd))
return False
if tcp_wmem_dict:
value = construct_tcp_mem_string(x, dic, "tcp_wmem", tcp_rmem_dict)
cmd = "sysctl -w /net/ipv4/tcp_wmem=\"{}\"".format(value)
exit_code = x.ssh.exec(dic["ip_control"], dic["user"], cmd)
if exit_code != 0:
x.p.err("error: tcp wmem value could not be set\n")
x.p.err("failed cmd: {}\n".format(cmd))
return False
return True
def main(x, conf, args):
if "?" in args:
print_usage(x)
return False
if not len(args) >= 2:
print_wrong_usage(x)
return False
# dictionaries:
dic = dict()
options = dict()
# tcp dicts, [0]: min, [1]: default, [2]: max, separated by whitespaces
# these are also options, but can be handled more convenient like this
tcp_rmem = dict()
tcp_wmem = dict()
try:
dic["host_name"] = args[0]
args.remove(dic["host_name"])
for argument in args:
key = argument.split(":")[0]
value = argument.split(":")[1]
if key == "rmem-max":
options["/net/core/rmem_max"] = value
elif key == "rmem-default":
options["/net/core/rmem_default"] = value
elif key == "wmem-max":
options["/net/core/wmem_max"] = value
elif key == "wmem-default":
options["/net/core/wmem_default"] = value
elif key == "window-scaling":
if value == "on":
options["/net/ipv4/tcp_window_scaling"] = 1
elif value == "off":
options["/net/ipv4/tcp_window_scaling"] = 0
else:
print_wrong_usage(x)
return False
elif key == "tcp-rmem-min":
tcp_rmem["0"] = value
elif key == "tcp-rmem-default":
tcp_rmem["1"] = value
elif key == "tcp-rmem-max":
tcp_rmem["2"] = value
elif key == "tcp-wmem-min":
tcp_wmem["0"] = value
elif key == "tcp-wmem-default":
tcp_wmem["1"] = value
elif key == "tcp-wmem-max":
tcp_wmem["2"] = value
else:
print_wrong_usage(x)
return False
except IndexError:
print_wrong_usage(x)
return False
# retrieve host information from conf
dic["user"] = conf.get_user(dic["host_name"])
dic["ip_control"] = conf.get_control_ip(dic["host_name"])
# set options
if not set_options(x, dic, options, tcp_rmem, tcp_wmem):
return False
return True
| {
"repo_name": "danielmgit/net-applet-shuffler",
"path": "applets/205-tcp-window-mem/applet.py",
"copies": "2",
"size": "7466",
"license": "mit",
"hash": 4860971342705012000,
"line_mean": 37.6839378238,
"line_max": 76,
"alpha_frac": 0.519957139,
"autogenerated": false,
"ratio": 3.3375055878408584,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9857462726840859,
"avg_score": 0,
"num_lines": 193
} |
"""Applet for using ipproof for data connection and transfer.
"""
import os
import time
from threading import Thread
CONTROLLER_NAME = "ipproof-controller.py"
# fix for exec-applet
CONTROLLER_STARTED = False
LOCAL_PATH = os.path.dirname(os.path.realpath(__file__))
REMOTE_PATH = "/tmp/net-applet-shuffler"
def print_usage(x):
x.p.msg("\n 104-ipproof:\n", False)
x.p.msg(" applet for establishing a connection and transferring data from "
"a source to a destination, only the target port can be "
"specified\n", False)
x.p.msg("\n usage:\n", False)
x.p.msg(" - exec 104-ipproof [host] "
"sink:[host] "
"id:[string] "
"ipproof-client:[file_descriptor] "
"ipproof-server:[file_descriptor] "
"[server-port:[number]] "
"[transfer-size:[byte]] "
"[iterations:[number]] "
"[ack-size:[byte]] "
"[inter-send-interval:[us]]\n", False)
x.p.msg("\n examples:\n", False)
x.p.msg(" - exec-applet 104-ipproof alpha sink:beta id:42 ipproof-client:"
"/home/tcp1/bin/ipproof/unix/ipproof-client ipproof-server:"
"/home/beta/bin/ipproof/unix/ipproof-server\n", False)
x.p.msg(" - exec-applet 104-ipproof beta sink:alpha id:42 ipproof-client:"
"/home/beta/bin/ipproof/unix/ipproof-client ipproof-server:"
"/home/tcp1/bin/ipproof/unix/ipproof-server server-port:30000 "
"transfer-size:30000 iterations:1 ack-size:50 "
"inter-send-interval:25\n", False)
def print_wrong_usage(x):
x.p.err("error: wrong usage\n")
x.p.err("use: [host] "
"sink:[host] "
"id:[string] "
"ipproof-client:[file_descriptor] "
"ipproof-server:[file_descriptor] "
"[server-port:[number]] "
"[transfer-size:[byte]] "
"[iterations:[number]] "
"[ack-size:[byte]] "
"[inter-send-interval:[us]]\n")
def controller_thread(x, dic):
# assemble arguments
# 1. applet_id
# 2. user_source
# 3. name_dest
# 4. user_dest
# 5. ip_dest_data
# 6. ip_dest_control
# 7. ipproof_port
# 8. transfer_size
# 9. iterations
# 10. ack_size
# 11. inter_send_interval
# 12. ipproof_client_path
# 13. ipproof_server_path
arguments_string = " ".join((dic["applet_id"],
dic["user_source"],
dic["name_dest"],
dic["user_dest"],
dic["ip_dest_data"],
dic["ip_dest_control"],
dic["ipproof_port"],
dic["transfer_size"],
dic["iterations"],
dic["ack_size"],
dic["inter_send_interval"],
dic["ipproof_client_path"],
dic["ipproof_server_path"]))
# check if ipproof controller is already on source
exit_code = x.ssh.exec(dic["ip_source_control"], dic["user_source"],
"test -f {}/{}".format(REMOTE_PATH,
CONTROLLER_NAME))
# if not, copy it to source
if not exit_code == 0:
x.ssh.copy_to(dic["user_source"], dic["ip_source_control"],
LOCAL_PATH, REMOTE_PATH, CONTROLLER_NAME,
CONTROLLER_NAME)
x.ssh.exec(dic["ip_source_control"], dic["user_source"],
"python3.5 {}/{} {}".format(REMOTE_PATH, CONTROLLER_NAME,
arguments_string))
global CONTROLLER_STARTED
CONTROLLER_STARTED = True
def ipproof_path_tests(x, dic):
exit_code = x.ssh.exec(dic["ip_source_control"], dic["user_source"],
"test -f {}".format(dic["ipproof_client_path"]))
if not exit_code == 0:
x.p.err("error: ipproof client not found\n")
x.p.err("failed cmd:\n")
x.p.err("on {}:{}: test -f {} \n".format(dic["user_source"],
dic["ip_source_control"],
dic["ipproof_client_path"]))
return False
exit_code = x.ssh.exec(dic["ip_dest_control"], dic["user_dest"],
"test -f {}".format(dic["ipproof_server_path"]))
if not exit_code == 0:
x.p.err("error: ipproof server not found\n")
x.p.err("failed cmd:\n")
x.p.err("on {}:{}: test -f {} \n".format(dic["user_dest"],
dic["ip_dest_control"],
dic["ipproof_server_path"]))
return False
return True
def main(x, conf, args):
if "?" in args:
print_usage(x)
return False
if not len(args) >= 5:
print_wrong_usage(x)
return False
# arguments dictionary
dic = dict()
# default values
dic["ipproof_port"] = "13337"
# this results in one packet
dic["transfer_size"] = "1448"
dic["iterations"] = "1"
dic["ack_size"] = "0"
dic["inter_send_interval"] = "0"
try:
dic["name_source"] = args[0]
args.remove(dic["name_source"])
dic["name_dest"] = args[0].split(":")[1]
args.remove("sink:" + dic["name_dest"])
dic["applet_id"] = args[0].split(":")[1]
args.remove("id:" + dic["applet_id"])
dic["ipproof_client_path"] = args[0].split(":")[1]
args.remove("ipproof-client:" + dic["ipproof_client_path"])
dic["ipproof_server_path"] = args[0].split(":")[1]
args.remove("ipproof-server:" + dic["ipproof_server_path"])
for argument in args:
key = argument.split(":")[0]
value = argument.split(":")[1]
if key == "server-port":
dic["ipproof_port"] = value
elif key == "transfer-size":
dic["transfer_size"] = value
elif key == "iterations":
dic["iterations"] = value
elif key == "ack-size":
dic["ack_size"] = value
elif key == "inter-send-interval":
dic["inter_send_interval"] = value
else:
print_wrong_usage(x)
return False
except IndexError:
print_wrong_usage(x)
return False
dic["ip_source_control"] = conf.get_control_ip(dic["name_source"])
dic["user_source"] = conf.get_user(dic["name_source"])
dic["ip_dest_data"] = conf.get_data_ip(dic["name_dest"])
dic["ip_dest_control"] = conf.get_control_ip(dic["name_dest"])
dic["user_dest"] = conf.get_user(dic["name_dest"])
# check if ipproof paths are valid
if not ipproof_path_tests(x, dic):
return False
# start ipproof thread
x.ssh.exec(dic["ip_source_control"], dic["user_source"],
"mkdir -p /tmp/net-applet-shuffler")
x.p.msg("Starting ipproof applet on host {}\n".format(dic["user_source"]))
contr_thread = Thread(target=controller_thread, args=(x, dic, ))
contr_thread.daemon = True
contr_thread.start()
while not CONTROLLER_STARTED:
time.sleep(1)
return True
| {
"repo_name": "hgn/net-applet-shuffler",
"path": "applets/104-ipproof/applet.py",
"copies": "2",
"size": "7348",
"license": "mit",
"hash": 600769702771504900,
"line_mean": 37.4712041885,
"line_max": 79,
"alpha_frac": 0.5129286881,
"autogenerated": false,
"ratio": 3.5949119373776908,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5107840625477691,
"avg_score": null,
"num_lines": null
} |
"""Applet for using netperf for a connection.
Unfortunately it is not possible to transfer a fixed amount of data via
netperf.
"""
import os
import time
from threading import Thread
LOCAL_NET_PATH = os.path.dirname(os.path.realpath(__file__))
REMOTE_NET_PATH = "/tmp/net-applet-shuffler"
# fix for exec-applet
CONTROLLER_STARTED = False
def controller_thread(x, arg_d):
# assemble arguments
# 1. applet_id
# 2. name_source
# 3. user_source
# 4. ip_source_data
# 5. port_source
# 6. name_dest
# 7. user_dest
# 8. ip_dest_data
# 9. port_dest
# 10. netserver_port
# 11. flow_length
# 12. flow_offset
# 13. ip_source_control
# 14. ip_dest_control
arguments_string = " ".join((arg_d["applet_id"], arg_d["name_source"],
arg_d["user_source"], arg_d["ip_source_data"],
arg_d["port_source"], arg_d["name_dest"],
arg_d["user_dest"], arg_d["ip_dest_data"],
arg_d["port_dest"], arg_d["netserver_port"],
arg_d["flow_length"], arg_d["flow_offset"],
arg_d["ip_source_control"],
arg_d["ip_dest_control"]))
# check if netperf controller is already on source
exit_code = x.ssh.exec(arg_d["ip_source_control"], arg_d["user_source"],
"test -f {}/netperf-controller.py"
.format(REMOTE_NET_PATH))
# if not, copy it to source
if not exit_code == 0:
x.ssh.copy_to(arg_d["user_source"], arg_d["ip_source_control"],
LOCAL_NET_PATH, REMOTE_NET_PATH, "netperf-controller.py",
"netperf-controller.py")
x.ssh.exec(arg_d["ip_source_control"], arg_d["user_source"],
"python3.5 {}/{} {}".format(REMOTE_NET_PATH,
"netperf-controller.py",
arguments_string))
global CONTROLLER_STARTED
CONTROLLER_STARTED = True
def main(x, conf, args):
if not len(args) == 8:
x.p.err("wrong usage. use: [name] sink:[name] id:[id] source-port:"
"[port] sink-port:[port] length:[bytes|seconds] "
"flow-offset:[seconds] netserver:[port]\n")
return False
# arguments dictionary
arg_d = dict()
try:
arg_d["name_source"] = args[0]
arg_d["name_dest"] = args[1].split(":")[1]
arg_d["applet_id"] = args[2].split(":")[1]
arg_d["port_source"] = args[3].split(":")[1]
arg_d["port_dest"] = args[4].split(":")[1]
arg_d["flow_length"] = args[5].split(":")[1]
arg_d["flow_offset"] = args[6].split(":")[1]
arg_d["netserver_port"] = args[7].split(":")[1]
except IndexError:
x.p.err("error: wrong usage\n")
return False
# retrieve: source ip, source user name, destination ip, destination user
# name
arg_d["ip_source_data"] = conf.get_data_ip(arg_d["name_source"])
arg_d["ip_source_control"] = conf.get_control_ip(arg_d["name_source"])
arg_d["user_source"] = conf.get_user(arg_d["name_source"])
arg_d["ip_dest_data"] = conf.get_data_ip(arg_d["name_dest"])
arg_d["ip_dest_control"] = conf.get_control_ip(arg_d["name_dest"])
arg_d["user_dest"] = conf.get_user(arg_d["name_dest"])
# potentially start parallel netperf instances
# note: at this point, the distribution of the applet will be done, which
# is probably not the best way to do (can cause unwanted time offsets)
x.p.msg("Starting netperf applet on host {}\n".format(arg_d["user_source"]))
contr_thread = Thread(target=controller_thread, args=(x, arg_d, ))
contr_thread.daemon = True
contr_thread.start()
while not CONTROLLER_STARTED:
time.sleep(1)
return True
| {
"repo_name": "hgn/net-applet-shuffler",
"path": "applets/103-netperf/applet.py",
"copies": "2",
"size": "3913",
"license": "mit",
"hash": -6014018868024090000,
"line_mean": 38.13,
"line_max": 80,
"alpha_frac": 0.553028367,
"autogenerated": false,
"ratio": 3.384948096885813,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9937359179935197,
"avg_score": 0.0001234567901234568,
"num_lines": 100
} |
"""AppleTV control via fireCore's AirControl protocol
This if for jailbroken TV's running fireCore's protocol only.
See their page for details:
http://support.firecore.com/entries/21375902-3rd-Party-Control-API-AirControl-beta-
This file contains library components used to talk to the tv.
You may also be looking for the command line controller, see
papaltvvmd.py instead of this file.
"""
import logging
import plistlib
import urllib2
from xml.parsers.expat import ExpatError
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
class AppleTV(object):
def __init__(self,host='apple-tv.local'):
object.__init__(self)
self.host = host
# ===== commands
def ls(self,path=''):
return self._get_entries(path)
def find(self):
return self._find()
def load(self,path):
tvpath = self._tvpath(path)
self._soupify('plugin={0}'.format(tvpath))
def now_playing(self):
soup = self._soupify('np')
try:
return plistlib.readPlistFromString(str(soup))
except ExpatError:
return None
def type(self,args):
txt = args.strip()
soup = self._soupify('enterText={0}'.format(txt))
def menu(self):
self._soupify('remoteAction=1')
def hold_menu(self):
self._soupify('remoteAction=2')
def up(self):
self._soupify('remoteAction=3')
def down(self):
self._soupify('remoteAction=4')
def select(self):
self._soupify('remoteAction=5')
def left(self):
self._soupify('remoteAction=6')
def right(self):
self._soupify('remoteAction=7')
def toggle(self):
self._soupify('remoteAction=10')
def playpause(self):
self._soupify('remoteAction=10')
def pause(self):
self._soupify('remoteAction=15')
def play(self):
self._soupify('remoteAction=16')
def stop(self):
self._soupify('remoteAction=17')
def ff(self):
self._soupify('remoteAction=18')
def rw(self):
self._soupify('remoteAction=19')
def skip(self):
self._soupify('remoteAction=20')
def skip_back(self):
self._soupify('remoteAction=21')
def hold_select(self):
self._soupify('remoteAction=22')
# ===== internals
def _soupify(self,path):
try:
url = 'http://{0}/{1}'.format(self.host,path)
logger.debug('fetching ' + url)
f = urllib2.urlopen(url)
return BeautifulSoup(f.read())
except urllib2.URLError, e:
logger.error(' failed to contact appletv ({0})'.format(e))
return BeautifulSoup('<html><body>error {0}</body></html>'.format(e))
def _tvpath(self,path):
realparts = []
for p in [p for p in path.split('/') if p != '']:
entries = {}
if len(realparts) == 0:
soup = self._soupify('apl')
for x in soup.find_all('appliance'):
entries[x['name']]=x['identifier']
else:
soup = self._soupify('appcat={0}'.format('/'.join(realparts)))
for x in soup.find_all('category'):
entries[x['name']]=x['identifier']
realparts.append(entries[p])
return '/'.join(realparts)
def _get_entries(self,path):
entries = {}
if path == '' or path == '/':
soup = self._soupify('apl')
for x in soup.find_all('appliance'):
entries[x['name']]=x['identifier']
else:
soup = self._soupify('appcat={0}'.format(self._tvpath(path)))
for x in soup.find_all('category'):
entries[x['name']]=x['identifier']
return entries
def _find(self,root=''):
paths = []
for p in ['{0}/{1}'.format(root,p) for p in self._get_entries(root)]:
paths.append(p)
subpaths = self._find(p)
paths = paths + subpaths
return paths
| {
"repo_name": "gdawg/papaltv",
"path": "papaltv/tv.py",
"copies": "1",
"size": "4038",
"license": "mit",
"hash": 757704718788305900,
"line_mean": 28.6911764706,
"line_max": 87,
"alpha_frac": 0.563645369,
"autogenerated": false,
"ratio": 3.7986829727187206,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9748748920608248,
"avg_score": 0.022715884222094446,
"num_lines": 136
} |
"""Application and persistence management."""
# pylint: disable=no-member, import-error, no-init, too-few-public-methods
# pylint: disable=cyclic-import, no-name-in-module, invalid-name
from functools import lru_cache
from ersa_reporting import configure, get_or_create
from ersa_reporting import record_input, commit, app, request
from ersa_reporting import require_auth, Resource, QueryResource
from ersa_reporting import BaseIngestResource
from nectar import get_domain
from .models.keystone import *
# API
class AccountResource(QueryResource):
"""Account"""
query_class = Account
class TenantResource(QueryResource):
"""Tenant"""
query_class = Tenant
class DomainResource(QueryResource):
"""Domain"""
query_class = Domain
class MembershipResource(QueryResource):
"""Membership"""
query_class = Membership
class AccountReferenceResource(QueryResource):
"""Account Reference"""
query_class = AccountReference
class AccountReferenceMappingResource(QueryResource):
"""Account Reference Mapping"""
query_class = AccountReferenceMapping
class SnapshotResource(QueryResource):
"""Snapshot"""
query_class = Snapshot
class IngestResource(BaseIngestResource):
def ingest(self):
"""Ingest data."""
@lru_cache(maxsize=10000)
def cache(model, **kwargs):
return get_or_create(model, **kwargs)
for message in request.get_json(force=True):
data = message["data"]
snapshot = cache(Snapshot, ts=data["timestamp"])
for account_detail in data["users"]:
account = cache(Account, openstack_id=account_detail["id"])
if not account_detail["email"]:
continue
# Fix broken emails containing ";"
email = account_detail["email"].split(";")[0]
domain_name = get_domain(email)
domain = cache(Domain,
name=domain_name) if domain_name else None
reference = cache(AccountReference, value=email, domain=domain)
cache(AccountReferenceMapping,
account=account,
reference=reference,
snapshot=snapshot)
for tenant_detail in data["tenants"]:
tenant = cache(Tenant, openstack_id=tenant_detail["id"])
tenant.name = tenant_detail["name"]
tenant.description = tenant_detail["description"]
if "allocation_id" in tenant_detail:
try:
tenant.allocation = int(tenant_detail["allocation_id"])
except:
pass
if "users" not in tenant_detail:
continue
for member in tenant_detail["users"]:
account = cache(Account, openstack_id=member["id"])
cache(Membership,
account=account,
tenant=tenant,
snapshot=snapshot)
commit()
return "", 204
def setup():
"""Let's roll."""
resources = {
"/account": AccountResource,
"/tenant": TenantResource,
"/domain": DomainResource,
"/membership": MembershipResource,
"/reference": AccountReferenceResource,
"/mapping": AccountReferenceMappingResource,
"/snapshot": SnapshotResource,
"/ingest": IngestResource
}
configure(resources)
setup()
| {
"repo_name": "eResearchSA/reporting-unified",
"path": "ersa_reporting/keystone.py",
"copies": "1",
"size": "3576",
"license": "apache-2.0",
"hash": -8076970927227861000,
"line_mean": 27.157480315,
"line_max": 79,
"alpha_frac": 0.5889261745,
"autogenerated": false,
"ratio": 4.768,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.58569261745,
"avg_score": null,
"num_lines": null
} |
"""Application API.
Use VAPIX® Application API to upload, control and manage applications and their license keys.
"""
from axis.api import APIItem, APIItems
URL = "/axis-cgi/applications"
URL_CONTROL = f"{URL}/control.cgi"
URL_LICENSE = f"{URL}/license.cgi"
URL_LIST = f"{URL}/list.cgi"
URL_UPLOAD = f"{URL}/upload.cgi"
PARAM_CGI_KEY = "Properties.EmbeddedDevelopment.Version"
PARAM_CGI_VALUE = "1.20"
APPLICATION_STATE_RUNNING = "Running"
APPLICATION_STATE_STOPPED = "Stopped"
class Applications(APIItems):
"""Applications on Axis devices."""
def __init__(self, request: object) -> None:
"""Initialize applications manager."""
super().__init__({}, request, URL, Application)
async def update(self) -> None:
"""Refresh data."""
raw = await self.list()
self.process_raw(raw)
@staticmethod
def pre_process_raw(raw: dict) -> dict:
"""Return a dictionary of applications."""
if not raw:
return {}
if "application" not in raw.get("reply", {}):
return {}
raw_applications = raw["reply"]["application"]
applications = {}
if not isinstance(raw_applications, list):
applications[raw_applications["@Name"]] = raw_applications
else:
for raw_application in raw_applications:
applications[raw_application["@Name"]] = raw_application
return applications
async def list(self) -> dict:
"""Retrieve information about installed applications."""
return await self._request("post", URL_LIST)
class Application(APIItem):
"""Application item."""
@property
def application_id(self) -> str:
"""Id of application."""
return self.raw["@ApplicationID"]
@property
def configuration_page(self) -> str:
"""Relative URL to application configuration page."""
return self.raw["@ConfigurationPage"]
@property
def license_name(self) -> str:
"""License name."""
return self.raw.get("@LicenseName", "")
@property
def license_status(self) -> str:
"""License status of application.
License status:
Valid = License is installed and valid.
Invalid = License is installed but not valid.
Missing = No license is installed.
Custom = Custom license is used. License status cannot be retrieved.
None = Application does not require any license.
"""
return self.raw["@License"]
@property
def license_expiration_date(self) -> str:
"""Date (YYYY-MM-DD) when the license expires."""
return self.raw.get("@LicenseExpirationDate", "")
@property
def name(self) -> str:
"""Name of application."""
return self.raw["@Name"]
@property
def nice_name(self) -> str:
"""Name of application."""
return self.raw["@NiceName"]
@property
def status(self) -> str:
"""Status of application.
Application status:
Running = Application is running.
Stopped = Application is not running.
Idle = Application is idle.
"""
return self.raw["@Status"]
@property
def validation_result_page(self) -> str:
"""Complete URL to a validation or result page."""
return self.raw.get("@ValidationResult", "")
@property
def vendor(self) -> str:
"""Vendor of application."""
return self.raw["@Vendor"]
@property
def vendor_page(self) -> str:
"""Vendor of application."""
return self.raw["@VendorHomePage"]
@property
def version(self) -> str:
"""Version of application."""
return self.raw["@Version"]
| {
"repo_name": "Kane610/axis",
"path": "axis/applications/applications.py",
"copies": "1",
"size": "3771",
"license": "mit",
"hash": -611463505096164500,
"line_mean": 26.9259259259,
"line_max": 93,
"alpha_frac": 0.5984084881,
"autogenerated": false,
"ratio": 4.363425925925926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5461834414025926,
"avg_score": null,
"num_lines": null
} |
"""Application base class for displaying data about a single object.
"""
import abc
import itertools
import logging
from .display import DisplayCommandBase
LOG = logging.getLogger(__name__)
class ShowOne(DisplayCommandBase):
"""Command base class for displaying data about a single object.
"""
__metaclass__ = abc.ABCMeta
@property
def formatter_namespace(self):
return 'cliff.formatter.show'
@property
def formatter_default(self):
return 'table'
@abc.abstractmethod
def take_action(self, parsed_args):
"""Return a two-part tuple with a tuple of column names
and a tuple of values.
"""
def produce_output(self, parsed_args, column_names, data):
if not parsed_args.columns:
columns_to_include = column_names
else:
columns_to_include = [c for c in column_names
if c in parsed_args.columns]
# Set up argument to compress()
selector = [(c in columns_to_include)
for c in column_names]
data = list(itertools.compress(data, selector))
self.formatter.emit_one(columns_to_include,
data,
self.app.stdout,
parsed_args)
return 0
def dict2columns(self, data):
"""Implement the common task of converting a dict-based object
to the two-column output that ShowOne expects.
"""
if not data:
return ({}, {})
else:
return zip(*sorted(data.items()))
| {
"repo_name": "ralphwort/chef-repo",
"path": "build/cliff/cliff/show.py",
"copies": "5",
"size": "1635",
"license": "apache-2.0",
"hash": -3313534424571257300,
"line_mean": 28.7272727273,
"line_max": 70,
"alpha_frac": 0.5712538226,
"autogenerated": false,
"ratio": 4.644886363636363,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7716140186236363,
"avg_score": null,
"num_lines": null
} |
"""Application base class for displaying data about a single object.
"""
import abc
import logging
import six
from .display import DisplayCommandBase
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class ShowOne(DisplayCommandBase):
"""Command base class for displaying data about a single object.
"""
@property
def formatter_namespace(self):
return 'cliff.formatter.show'
@property
def formatter_default(self):
return 'table'
@abc.abstractmethod
def take_action(self, parsed_args):
"""Return a two-part tuple with a tuple of column names
and a tuple of values.
"""
def produce_output(self, parsed_args, column_names, data):
if not parsed_args.columns:
columns_to_include = column_names
else:
columns_to_include = [c for c in column_names
if c in parsed_args.columns]
# Set up argument to compress()
selector = [(c in columns_to_include)
for c in column_names]
data = list(self._compress_iterable(data, selector))
self.formatter.emit_one(columns_to_include,
data,
self.app.stdout,
parsed_args)
return 0
def dict2columns(self, data):
"""Implement the common task of converting a dict-based object
to the two-column output that ShowOne expects.
"""
if not data:
return ({}, {})
else:
return zip(*sorted(data.items()))
| {
"repo_name": "sjsucohort6/openstack",
"path": "python/venv/lib/python2.7/site-packages/cliff/show.py",
"copies": "3",
"size": "1635",
"license": "mit",
"hash": -8892929071878354000,
"line_mean": 28.1964285714,
"line_max": 70,
"alpha_frac": 0.573088685,
"autogenerated": false,
"ratio": 4.554317548746518,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6627406233746518,
"avg_score": null,
"num_lines": null
} |
"""Application base class for displaying data.
"""
import abc
import logging
import pkg_resources
from .command import Command
LOG = logging.getLogger(__name__)
class DisplayCommandBase(Command):
"""Command base class for displaying data about a single object.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, app, app_args):
super(DisplayCommandBase, self).__init__(app, app_args)
self.load_formatter_plugins()
@abc.abstractproperty
def formatter_namespace(self):
"String specifying the namespace to use for loading formatter plugins."
@abc.abstractproperty
def formatter_default(self):
"String specifying the name of the default formatter."
def load_formatter_plugins(self):
self.formatters = {}
for ep in pkg_resources.iter_entry_points(self.formatter_namespace):
try:
self.formatters[ep.name] = ep.load()()
except Exception as err:
LOG.error(err)
if self.app_args.debug:
raise
return
def get_parser(self, prog_name):
parser = super(DisplayCommandBase, self).get_parser(prog_name)
formatter_group = parser.add_argument_group(
title='output formatters',
description='output formatter options',
)
formatter_choices = sorted(self.formatters.keys())
formatter_default = self.formatter_default
if formatter_default not in formatter_choices:
formatter_default = formatter_choices[0]
formatter_group.add_argument(
'-f', '--format',
dest='formatter',
action='store',
choices=formatter_choices,
default=formatter_default,
help='the output format, defaults to %s' % formatter_default,
)
formatter_group.add_argument(
'-c', '--column',
action='append',
default=[],
dest='columns',
metavar='COLUMN',
help='specify the column(s) to include, can be repeated',
)
for name, formatter in sorted(self.formatters.items()):
formatter.add_argument_group(parser)
return parser
@abc.abstractmethod
def produce_output(self, parsed_args, column_names, data):
"""Use the formatter to generate the output.
:param parsed_args: argparse.Namespace instance with argument values
:param column_names: sequence of strings containing names
of output columns
:param data: iterable with values matching the column names
"""
def run(self, parsed_args):
self.formatter = self.formatters[parsed_args.formatter]
column_names, data = self.take_action(parsed_args)
self.produce_output(parsed_args, column_names, data)
return 0
| {
"repo_name": "neumerance/cloudloon2",
"path": ".venv/lib/python2.7/site-packages/cliff/display.py",
"copies": "2",
"size": "2893",
"license": "apache-2.0",
"hash": 7279808326036778000,
"line_mean": 32.6395348837,
"line_max": 79,
"alpha_frac": 0.6114759765,
"autogenerated": false,
"ratio": 4.621405750798722,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6232881727298722,
"avg_score": null,
"num_lines": null
} |
"""Application base class for displaying data.
"""
import abc
import logging
import stevedore
from .command import Command
LOG = logging.getLogger(__name__)
class DisplayCommandBase(Command):
"""Command base class for displaying data about a single object.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, app, app_args):
super(DisplayCommandBase, self).__init__(app, app_args)
self._formatter_plugins = self._load_formatter_plugins()
@abc.abstractproperty
def formatter_namespace(self):
"String specifying the namespace to use for loading formatter plugins."
@abc.abstractproperty
def formatter_default(self):
"String specifying the name of the default formatter."
def _load_formatter_plugins(self):
# Here so tests can override
return stevedore.ExtensionManager(
self.formatter_namespace,
invoke_on_load=True,
)
def get_parser(self, prog_name):
parser = super(DisplayCommandBase, self).get_parser(prog_name)
formatter_group = parser.add_argument_group(
title='output formatters',
description='output formatter options',
)
formatter_choices = sorted(self._formatter_plugins.names())
formatter_default = self.formatter_default
if formatter_default not in formatter_choices:
formatter_default = formatter_choices[0]
formatter_group.add_argument(
'-f', '--format',
dest='formatter',
action='store',
choices=formatter_choices,
default=formatter_default,
help='the output format, defaults to %s' % formatter_default,
)
formatter_group.add_argument(
'-c', '--column',
action='append',
default=[],
dest='columns',
metavar='COLUMN',
help='specify the column(s) to include, can be repeated',
)
for formatter in self._formatter_plugins:
formatter.obj.add_argument_group(parser)
return parser
@abc.abstractmethod
def produce_output(self, parsed_args, column_names, data):
"""Use the formatter to generate the output.
:param parsed_args: argparse.Namespace instance with argument values
:param column_names: sequence of strings containing names
of output columns
:param data: iterable with values matching the column names
"""
def run(self, parsed_args):
self.formatter = self._formatter_plugins[parsed_args.formatter].obj
column_names, data = self.take_action(parsed_args)
self.produce_output(parsed_args, column_names, data)
return 0
| {
"repo_name": "citrix-openstack-build/cliff",
"path": "cliff/display.py",
"copies": "3",
"size": "2762",
"license": "apache-2.0",
"hash": -1770389552906677200,
"line_mean": 32.6829268293,
"line_max": 79,
"alpha_frac": 0.6256335988,
"autogenerated": false,
"ratio": 4.6110183639399,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6736651962739899,
"avg_score": null,
"num_lines": null
} |
"""Application base class for displaying data.
"""
import abc
try:
from itertools import compress
except ImportError:
# for py26 compat
from itertools import izip
def compress(data, selectors):
return (d for d, s in izip(data, selectors) if s)
import logging
import six
import stevedore
from .command import Command
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class DisplayCommandBase(Command):
"""Command base class for displaying data about a single object.
"""
def __init__(self, app, app_args, cmd_name=None):
super(DisplayCommandBase, self).__init__(app, app_args,
cmd_name=cmd_name)
self._formatter_plugins = self._load_formatter_plugins()
@abc.abstractproperty
def formatter_namespace(self):
"String specifying the namespace to use for loading formatter plugins."
@abc.abstractproperty
def formatter_default(self):
"String specifying the name of the default formatter."
def _load_formatter_plugins(self):
# Here so tests can override
return stevedore.ExtensionManager(
self.formatter_namespace,
invoke_on_load=True,
)
def get_parser(self, prog_name):
parser = super(DisplayCommandBase, self).get_parser(prog_name)
formatter_group = parser.add_argument_group(
title='output formatters',
description='output formatter options',
)
formatter_choices = sorted(self._formatter_plugins.names())
formatter_default = self.formatter_default
if formatter_default not in formatter_choices:
formatter_default = formatter_choices[0]
formatter_group.add_argument(
'-f', '--format',
dest='formatter',
action='store',
choices=formatter_choices,
default=formatter_default,
help='the output format, defaults to %s' % formatter_default,
)
formatter_group.add_argument(
'-c', '--column',
action='append',
default=[],
dest='columns',
metavar='COLUMN',
help='specify the column(s) to include, can be repeated',
)
for formatter in self._formatter_plugins:
formatter.obj.add_argument_group(parser)
return parser
@abc.abstractmethod
def produce_output(self, parsed_args, column_names, data):
"""Use the formatter to generate the output.
:param parsed_args: argparse.Namespace instance with argument values
:param column_names: sequence of strings containing names
of output columns
:param data: iterable with values matching the column names
"""
def run(self, parsed_args):
self.formatter = self._formatter_plugins[parsed_args.formatter].obj
column_names, data = self.take_action(parsed_args)
self.produce_output(parsed_args, column_names, data)
return 0
@staticmethod
def _compress_iterable(iterable, selectors):
return compress(iterable, selectors)
| {
"repo_name": "enzochiau/cliff",
"path": "cliff/display.py",
"copies": "3",
"size": "3178",
"license": "apache-2.0",
"hash": -5468361583778555000,
"line_mean": 31.4285714286,
"line_max": 79,
"alpha_frac": 0.6255506608,
"autogenerated": false,
"ratio": 4.553008595988539,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 98
} |
"""Application base class for providing a list of data as output.
"""
import abc
import logging
import six
from .display import DisplayCommandBase
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class Lister(DisplayCommandBase):
"""Command base class for providing a list of data as output.
"""
@property
def formatter_namespace(self):
return 'cliff.formatter.list'
@property
def formatter_default(self):
return 'table'
@abc.abstractmethod
def take_action(self, parsed_args):
"""Return a tuple containing the column names and an iterable
containing the data to be listed.
"""
def produce_output(self, parsed_args, column_names, data):
if not parsed_args.columns:
columns_to_include = column_names
data_gen = data
else:
columns_to_include = [c for c in column_names
if c in parsed_args.columns
]
if not columns_to_include:
raise ValueError('No recognized column names in %s' %
str(parsed_args.columns))
# Set up argument to compress()
selector = [(c in columns_to_include)
for c in column_names]
# Generator expression to only return the parts of a row
# of data that the user has expressed interest in
# seeing. We have to convert the compress() output to a
# list so the table formatter can ask for its length.
data_gen = (list(self._compress_iterable(row, selector))
for row in data)
self.formatter.emit_list(columns_to_include,
data_gen,
self.app.stdout,
parsed_args,
)
return 0
| {
"repo_name": "sjsucohort6/openstack",
"path": "python/venv/lib/python2.7/site-packages/cliff/lister.py",
"copies": "3",
"size": "1947",
"license": "mit",
"hash": -349192001055920640,
"line_mean": 32.5689655172,
"line_max": 69,
"alpha_frac": 0.5485362096,
"autogenerated": false,
"ratio": 4.843283582089552,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 58
} |
"""Application base class for providing a list of data as output.
"""
import abc
try:
from itertools import compress
except ImportError:
# for py26 compat
from itertools import izip
def compress(data, selectors):
return (d for d, s in izip(data, selectors) if s)
import logging
from .display import DisplayCommandBase
LOG = logging.getLogger(__name__)
class Lister(DisplayCommandBase):
"""Command base class for providing a list of data as output.
"""
__metaclass__ = abc.ABCMeta
@property
def formatter_namespace(self):
return 'cliff.formatter.list'
@property
def formatter_default(self):
return 'table'
@abc.abstractmethod
def take_action(self, parsed_args):
"""Return a tuple containing the column names and an iterable
containing the data to be listed.
"""
def produce_output(self, parsed_args, column_names, data):
if not parsed_args.columns:
columns_to_include = column_names
data_gen = data
else:
columns_to_include = [c for c in column_names
if c in parsed_args.columns
]
if not columns_to_include:
raise ValueError('No recognized column names in %s' %
str(parsed_args.columns))
# Set up argument to compress()
selector = [(c in columns_to_include)
for c in column_names]
# Generator expression to only return the parts of a row
# of data that the user has expressed interest in
# seeing. We have to convert the compress() output to a
# list so the table formatter can ask for its length.
data_gen = (list(compress(row, selector))
for row in data)
self.formatter.emit_list(columns_to_include,
data_gen,
self.app.stdout,
parsed_args,
)
return 0
| {
"repo_name": "ralphwort/chef-repo",
"path": "build/cliff/cliff/lister.py",
"copies": "5",
"size": "2129",
"license": "apache-2.0",
"hash": -8988805463612809000,
"line_mean": 31.2575757576,
"line_max": 69,
"alpha_frac": 0.5551902302,
"autogenerated": false,
"ratio": 4.805869074492099,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 66
} |
"""Application base class.
"""
import argparse
import codecs
import inspect
import locale
import logging
import logging.handlers
import os
import sys
import operator
from .complete import CompleteCommand
from .help import HelpAction, HelpCommand
from .utils import damerau_levenshtein, COST
# Make sure the cliff library has a logging handler
# in case the app developer doesn't set up logging.
# For py26 compat, create a NullHandler
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logging.getLogger('cliff').addHandler(NullHandler())
class App(object):
"""Application base class.
:param description: one-liner explaining the program purpose
:paramtype description: str
:param version: application version number
:paramtype version: str
:param command_manager: plugin loader
:paramtype command_manager: cliff.commandmanager.CommandManager
:param stdin: Standard input stream
:paramtype stdin: readable I/O stream
:param stdout: Standard output stream
:paramtype stdout: writable I/O stream
:param stderr: Standard error output stream
:paramtype stderr: writable I/O stream
:param interactive_app_factory: callable to create an
interactive application
:paramtype interactive_app_factory: cliff.interactive.InteractiveApp
"""
NAME = os.path.splitext(os.path.basename(sys.argv[0]))[0]
LOG = logging.getLogger(NAME)
CONSOLE_MESSAGE_FORMAT = '%(message)s'
LOG_FILE_MESSAGE_FORMAT = \
'[%(asctime)s] %(levelname)-8s %(name)s %(message)s'
DEFAULT_VERBOSE_LEVEL = 1
DEFAULT_OUTPUT_ENCODING = 'utf-8'
def __init__(self, description, version, command_manager,
stdin=None, stdout=None, stderr=None,
interactive_app_factory=None,
deferred_help=False):
"""Initialize the application.
"""
self.command_manager = command_manager
self.command_manager.add_command('help', HelpCommand)
self.command_manager.add_command('complete', CompleteCommand)
self._set_streams(stdin, stdout, stderr)
self.interactive_app_factory = interactive_app_factory
self.deferred_help = deferred_help
self.parser = self.build_option_parser(description, version)
self.interactive_mode = False
self.interpreter = None
def _set_streams(self, stdin, stdout, stderr):
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
if sys.version_info[:2] == (2, 6):
# Configure the input and output streams. If a stream is
# provided, it must be configured correctly by the
# caller. If not, make sure the versions of the standard
# streams used by default are wrapped with encodings. This
# works around a problem with Python 2.6 fixed in 2.7 and
# later (http://hg.python.org/cpython/rev/e60ef17561dc/).
lang, encoding = locale.getdefaultlocale()
encoding = (getattr(sys.stdout, 'encoding', None) or
encoding or
self.DEFAULT_OUTPUT_ENCODING)
self.stdin = stdin or codecs.getreader(encoding)(sys.stdin)
self.stdout = stdout or codecs.getwriter(encoding)(sys.stdout)
self.stderr = stderr or codecs.getwriter(encoding)(sys.stderr)
else:
self.stdin = stdin or sys.stdin
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
def build_option_parser(self, description, version,
argparse_kwargs=None):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
:param argparse_kwargs: extra keyword argument passed to the
ArgumentParser constructor
:paramtype extra_kwargs: dict
"""
argparse_kwargs = argparse_kwargs or {}
parser = argparse.ArgumentParser(
description=description,
add_help=False,
**argparse_kwargs
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(version),
)
parser.add_argument(
'-v', '--verbose',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help='Increase verbosity of output. Can be repeated.',
)
parser.add_argument(
'--log-file',
action='store',
default=None,
help='Specify a file to log output. Disabled by default.',
)
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help='Suppress output except warnings and errors.',
)
if self.deferred_help:
parser.add_argument(
'-h', '--help',
dest='deferred_help',
action='store_true',
help="Show help message and exit.",
)
else:
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help="Show this help message and exit.",
)
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='Show tracebacks on errors.',
)
return parser
def configure_logging(self):
"""Create logging handlers for any log output.
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def print_help_if_requested(self):
"""Print help and exits if deferred help is enabled and requested.
'--help' shows the help message and exits:
* without calling initialize_app if not self.deferred_help (default),
* after initialize_app call if self.deferred_help,
* during initialize_app call if self.deferred_help and subclass calls
explicitly this method in initialize_app.
"""
if self.deferred_help and self.options.deferred_help:
action = HelpAction(None, None, default=self)
action(self.parser, self.options, None, None)
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
if self.deferred_help and self.options.deferred_help and remainder:
# When help is requested and `remainder` has any values disable
# `deferred_help` and instead allow the help subcommand to
# handle the request during run_subcommand(). This turns
# "app foo bar --help" into "app help foo bar". However, when
# `remainder` is empty use print_help_if_requested() to allow
# for an early exit.
# Disabling `deferred_help` here also ensures that
# print_help_if_requested will not fire if called by a subclass
# during its initialize_app().
self.options.deferred_help = False
remainder.insert(0, "help")
self.initialize_app(remainder)
self.print_help_if_requested()
except Exception as err:
if hasattr(self, 'options'):
debug = self.options.debug
else:
debug = True
if debug:
self.LOG.exception(err)
raise
else:
self.LOG.error(err)
return 1
result = 1
if self.interactive_mode:
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
# FIXME(dhellmann): Consider moving these command handling methods
# to a separate class.
def initialize_app(self, argv):
"""Hook for subclasses to take global initialization action
after the arguments are parsed but before a command is run.
Invoked only once, even in interactive mode.
:param argv: List of arguments, including the subcommand to run.
Empty for interactive mode.
"""
return
def prepare_to_run_command(self, cmd):
"""Perform any preliminary work needed to run a command.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
"""
return
def clean_up(self, cmd, result, err):
"""Hook run after a command is done to shutdown the app.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
:param result: return value of cmd
:paramtype result: int
:param err: exception or None
:paramtype err: Exception
"""
return
def interact(self):
# Defer importing .interactive as cmd2 is a slow import
from .interactive import InteractiveApp
if self.interactive_app_factory is None:
self.interactive_app_factory = InteractiveApp
self.interpreter = self.interactive_app_factory(self,
self.command_manager,
self.stdin,
self.stdout,
)
self.interpreter.cmdloop()
return 0
def get_fuzzy_matches(self, cmd):
"""return fuzzy matches of unknown command
"""
sep = '_'
if self.command_manager.convert_underscores:
sep = ' '
all_cmds = [k[0] for k in self.command_manager]
dist = []
for candidate in sorted(all_cmds):
prefix = candidate.split(sep)[0]
# Give prefix match a very good score
if candidate.startswith(cmd):
dist.append((candidate, 0))
continue
# Levenshtein distance
dist.append((candidate, damerau_levenshtein(cmd, prefix, COST)+1))
dist = sorted(dist, key=operator.itemgetter(1, 0))
matches = []
i = 0
# Find the best similarity
while (not dist[i][1]):
matches.append(dist[i][0])
i += 1
best_similarity = dist[i][1]
while (dist[i][1] == best_similarity):
matches.append(dist[i][0])
i += 1
return matches
def run_subcommand(self, argv):
try:
subcommand = self.command_manager.find_command(argv)
except ValueError as err:
# If there was no exact match, try to find a fuzzy match
the_cmd = argv[0]
fuzzy_matches = self.get_fuzzy_matches(the_cmd)
if fuzzy_matches:
article = 'a'
if self.NAME[0] in 'aeiou':
article = 'an'
self.stdout.write('%s: \'%s\' is not %s %s command. '
'See \'%s --help\'.\n'
% (self.NAME, the_cmd, article,
self.NAME, self.NAME))
self.stdout.write('Did you mean one of these?\n')
for match in fuzzy_matches:
self.stdout.write(' %s\n' % match)
else:
if self.options.debug:
raise
else:
self.LOG.error(err)
return 2
cmd_factory, cmd_name, sub_argv = subcommand
kwargs = {}
if 'cmd_name' in inspect.getargspec(cmd_factory.__init__).args:
kwargs['cmd_name'] = cmd_name
cmd = cmd_factory(self, self.options, **kwargs)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
parsed_args = cmd_parser.parse_args(sub_argv)
result = cmd.run(parsed_args)
except Exception as err:
if self.options.debug:
self.LOG.exception(err)
else:
self.LOG.error(err)
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.debug:
self.LOG.exception(err2)
else:
self.LOG.error('Could not clean up: %s', err2)
if self.options.debug:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.debug:
self.LOG.exception(err3)
else:
self.LOG.error('Could not clean up: %s', err3)
return result
| {
"repo_name": "sjsucohort6/openstack",
"path": "python/venv/lib/python2.7/site-packages/cliff/app.py",
"copies": "3",
"size": "14793",
"license": "mit",
"hash": 9151461036704339000,
"line_mean": 36.2619647355,
"line_max": 79,
"alpha_frac": 0.5569526127,
"autogenerated": false,
"ratio": 4.632947071719387,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 397
} |
"""Application base class.
"""
import argparse
import codecs
import locale
import logging
import logging.handlers
import os
import sys
from .complete import CompleteCommand
from .help import HelpAction, HelpCommand
from .interactive import InteractiveApp
# Make sure the cliff library has a logging handler
# in case the app developer doesn't set up logging.
# For py26 compat, create a NullHandler
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logging.getLogger('cliff').addHandler(NullHandler())
LOG = logging.getLogger(__name__)
class App(object):
"""Application base class.
:param description: one-liner explaining the program purpose
:paramtype description: str
:param version: application version number
:paramtype version: str
:param command_manager: plugin loader
:paramtype command_manager: cliff.commandmanager.CommandManager
:param stdin: Standard input stream
:paramtype stdin: readable I/O stream
:param stdout: Standard output stream
:paramtype stdout: writable I/O stream
:param stderr: Standard error output stream
:paramtype stderr: writable I/O stream
:param interactive_app_factory: callable to create an
interactive application
:paramtype interactive_app_factory: cliff.interactive.InteractiveApp
"""
NAME = os.path.splitext(os.path.basename(sys.argv[0]))[0]
CONSOLE_MESSAGE_FORMAT = '%(message)s'
LOG_FILE_MESSAGE_FORMAT = \
'[%(asctime)s] %(levelname)-8s %(name)s %(message)s'
DEFAULT_VERBOSE_LEVEL = 1
DEFAULT_OUTPUT_ENCODING = 'utf-8'
def __init__(self, description, version, command_manager,
stdin=None, stdout=None, stderr=None,
interactive_app_factory=InteractiveApp):
"""Initialize the application.
"""
self.command_manager = command_manager
self.command_manager.add_command('help', HelpCommand)
self.command_manager.add_command('complete', CompleteCommand)
self._set_streams(stdin, stdout, stderr)
self.interactive_app_factory = interactive_app_factory
self.parser = self.build_option_parser(description, version)
self.interactive_mode = False
self.interpreter = None
def _set_streams(self, stdin, stdout, stderr):
locale.setlocale(locale.LC_ALL, '')
if sys.version_info[:2] == (2, 6):
# Configure the input and output streams. If a stream is
# provided, it must be configured correctly by the
# caller. If not, make sure the versions of the standard
# streams used by default are wrapped with encodings. This
# works around a problem with Python 2.6 fixed in 2.7 and
# later (http://hg.python.org/cpython/rev/e60ef17561dc/).
lang, encoding = locale.getdefaultlocale()
encoding = (getattr(sys.stdout, 'encoding', None)
or encoding
or self.DEFAULT_OUTPUT_ENCODING
)
self.stdin = stdin or codecs.getreader(encoding)(sys.stdin)
self.stdout = stdout or codecs.getwriter(encoding)(sys.stdout)
self.stderr = stderr or codecs.getwriter(encoding)(sys.stderr)
else:
self.stdin = stdin or sys.stdin
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
def build_option_parser(self, description, version,
argparse_kwargs=None):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
:param argparse_kwargs: extra keyword argument passed to the
ArgumentParser constructor
:paramtype extra_kwargs: dict
"""
argparse_kwargs = argparse_kwargs or {}
parser = argparse.ArgumentParser(
description=description,
add_help=False,
**argparse_kwargs
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(version),
)
parser.add_argument(
'-v', '--verbose',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help='Increase verbosity of output. Can be repeated.',
)
parser.add_argument(
'--log-file',
action='store',
default=None,
help='Specify a file to log output. Disabled by default.',
)
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help='suppress output except warnings and errors',
)
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help="show this help message and exit",
)
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='show tracebacks on errors',
)
return parser
def configure_logging(self):
"""Create logging handlers for any log output.
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if hasattr(self, 'options'):
debug = self.options.debug
else:
debug = True
if debug:
LOG.exception(err)
raise
else:
LOG.error(err)
return 1
result = 1
if self.interactive_mode:
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
# FIXME(dhellmann): Consider moving these command handling methods
# to a separate class.
def initialize_app(self, argv):
"""Hook for subclasses to take global initialization action
after the arguments are parsed but before a command is run.
Invoked only once, even in interactive mode.
:param argv: List of arguments, including the subcommand to run.
Empty for interactive mode.
"""
return
def prepare_to_run_command(self, cmd):
"""Perform any preliminary work needed to run a command.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
"""
return
def clean_up(self, cmd, result, err):
"""Hook run after a command is done to shutdown the app.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
:param result: return value of cmd
:paramtype result: int
:param err: exception or None
:paramtype err: Exception
"""
return
def interact(self):
self.interpreter = self.interactive_app_factory(self,
self.command_manager,
self.stdin,
self.stdout,
)
self.interpreter.cmdloop()
return 0
def run_subcommand(self, argv):
try:
subcommand = self.command_manager.find_command(argv)
except ValueError as err:
if self.options.debug:
raise
else:
LOG.error(err)
return 2
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
parsed_args = cmd_parser.parse_args(sub_argv)
result = cmd.run(parsed_args)
except Exception as err:
if self.options.debug:
LOG.exception(err)
else:
LOG.error(err)
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.debug:
LOG.exception(err2)
else:
LOG.error('Could not clean up: %s', err2)
if self.options.debug:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.debug:
LOG.exception(err3)
else:
LOG.error('Could not clean up: %s', err3)
return result
| {
"repo_name": "ralphwort/chef-repo",
"path": "build/cliff/cliff/app.py",
"copies": "3",
"size": "10728",
"license": "apache-2.0",
"hash": 5519229249934228000,
"line_mean": 34.1737704918,
"line_max": 77,
"alpha_frac": 0.5658090977,
"autogenerated": false,
"ratio": 4.721830985915493,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6787640083615493,
"avg_score": null,
"num_lines": null
} |
"""Application base class.
"""
import argparse
import codecs
import locale
import logging
import logging.handlers
import os
import sys
from .help import HelpAction, HelpCommand
from .interactive import InteractiveApp
# Make sure the cliff library has a logging handler
# in case the app developer doesn't set up logging.
# For py26 compat, create a NullHandler
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logging.getLogger('cliff').addHandler(NullHandler())
LOG = logging.getLogger(__name__)
class App(object):
"""Application base class.
:param description: one-liner explaining the program purpose
:paramtype description: str
:param version: application version number
:paramtype version: str
:param command_manager: plugin loader
:paramtype command_manager: cliff.commandmanager.CommandManager
:param stdin: Standard input stream
:paramtype stdin: readable I/O stream
:param stdout: Standard output stream
:paramtype stdout: writable I/O stream
:param stderr: Standard error output stream
:paramtype stderr: writable I/O stream
:param interactive_app_factory: callable to create an
interactive application
:paramtype interactive_app_factory: cliff.interactive.InteractiveApp
"""
NAME = os.path.splitext(os.path.basename(sys.argv[0]))[0]
CONSOLE_MESSAGE_FORMAT = '%(message)s'
LOG_FILE_MESSAGE_FORMAT = \
'[%(asctime)s] %(levelname)-8s %(name)s %(message)s'
DEFAULT_VERBOSE_LEVEL = 1
DEFAULT_OUTPUT_ENCODING = 'utf-8'
def __init__(self, description, version, command_manager,
stdin=None, stdout=None, stderr=None,
interactive_app_factory=InteractiveApp):
"""Initialize the application.
"""
self.command_manager = command_manager
self.command_manager.add_command('help', HelpCommand)
self._set_streams(stdin, stdout, stderr)
self.interactive_app_factory = interactive_app_factory
self.parser = self.build_option_parser(description, version)
self.interactive_mode = False
self.interpreter = None
def _set_streams(self, stdin, stdout, stderr):
locale.setlocale(locale.LC_ALL, '')
if sys.version_info[:2] == (2, 6):
# Configure the input and output streams. If a stream is
# provided, it must be configured correctly by the
# caller. If not, make sure the versions of the standard
# streams used by default are wrapped with encodings. This
# works around a problem with Python 2.6 fixed in 2.7 and
# later (http://hg.python.org/cpython/rev/e60ef17561dc/).
lang, encoding = locale.getdefaultlocale()
encoding = (getattr(sys.stdout, 'encoding', None)
or encoding
or self.DEFAULT_OUTPUT_ENCODING
)
self.stdin = stdin or codecs.getreader(encoding)(sys.stdin)
self.stdout = stdout or codecs.getwriter(encoding)(sys.stdout)
self.stderr = stderr or codecs.getwriter(encoding)(sys.stderr)
else:
self.stdin = stdin or sys.stdin
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
def build_option_parser(self, description, version,
argparse_kwargs=None):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
:param argparse_kwargs: extra keyword argument passed to the
ArgumentParser constructor
:paramtype extra_kwargs: dict
"""
argparse_kwargs = argparse_kwargs or {}
parser = argparse.ArgumentParser(
description=description,
add_help=False,
**argparse_kwargs
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(version),
)
parser.add_argument(
'-v', '--verbose',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help='Increase verbosity of output. Can be repeated.',
)
parser.add_argument(
'--log-file',
action='store',
default=None,
help='Specify a file to log output. Disabled by default.',
)
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help='suppress output except warnings and errors',
)
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help="show this help message and exit",
)
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='show tracebacks on errors',
)
return parser
def configure_logging(self):
"""Create logging handlers for any log output.
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if hasattr(self, 'options'):
debug = self.options.debug
else:
debug = True
if debug:
LOG.exception(err)
raise
else:
LOG.error(err)
return 1
result = 1
if self.interactive_mode:
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
# FIXME(dhellmann): Consider moving these command handling methods
# to a separate class.
def initialize_app(self, argv):
"""Hook for subclasses to take global initialization action
after the arguments are parsed but before a command is run.
Invoked only once, even in interactive mode.
:param argv: List of arguments, including the subcommand to run.
Empty for interactive mode.
"""
return
def prepare_to_run_command(self, cmd):
"""Perform any preliminary work needed to run a command.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
"""
return
def clean_up(self, cmd, result, err):
"""Hook run after a command is done to shutdown the app.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
:param result: return value of cmd
:paramtype result: int
:param err: exception or None
:paramtype err: Exception
"""
return
def interact(self):
self.interpreter = self.interactive_app_factory(self,
self.command_manager,
self.stdin,
self.stdout,
)
self.interpreter.cmdloop()
return 0
def run_subcommand(self, argv):
try:
subcommand = self.command_manager.find_command(argv)
except ValueError as err:
if self.options.debug:
raise
else:
LOG.error(err)
return 2
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
parsed_args = cmd_parser.parse_args(sub_argv)
result = cmd.run(parsed_args)
except Exception as err:
if self.options.debug:
LOG.exception(err)
else:
LOG.error(err)
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.debug:
LOG.exception(err2)
else:
LOG.error('Could not clean up: %s', err2)
if self.options.debug:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.debug:
LOG.exception(err3)
else:
LOG.error('Could not clean up: %s', err3)
return result
| {
"repo_name": "neumerance/cloudloon2",
"path": ".venv/lib/python2.7/site-packages/cliff/app.py",
"copies": "2",
"size": "10620",
"license": "apache-2.0",
"hash": 2754332525688904000,
"line_mean": 34.0495049505,
"line_max": 77,
"alpha_frac": 0.563653484,
"autogenerated": false,
"ratio": 4.722098710538017,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 303
} |
"""Application base class.
"""
import itertools
import logging
import logging.handlers
import shlex
import sys
import cmd2
LOG = logging.getLogger(__name__)
class InteractiveApp(cmd2.Cmd):
"""Provides "interactive mode" features.
Refer to the cmd2_ and cmd_ documentation for details
about subclassing and configuring this class.
.. _cmd2: http://packages.python.org/cmd2/index.html
.. _cmd: http://docs.python.org/library/cmd.html
:param parent_app: The calling application (expected to be derived
from :class:`cliff.main.App`).
:param command_manager: A :class:`cliff.commandmanager.CommandManager`
instance.
:param stdin: Standard input stream
:param stdout: Standard output stream
"""
use_rawinput = True
doc_header = "Shell commands (type help <topic>):"
app_cmd_header = "Application commands (type help <topic>):"
def __init__(self, parent_app, command_manager, stdin, stdout):
self.parent_app = parent_app
if not hasattr(sys.stdin, 'isatty') or sys.stdin.isatty():
self.prompt = '(%s) ' % parent_app.NAME
else:
# batch/pipe mode
self.prompt = ''
self.command_manager = command_manager
cmd2.Cmd.__init__(self, 'tab', stdin=stdin, stdout=stdout)
def default(self, line):
# Tie in the default command processor to
# dispatch commands known to the command manager.
# We send the message through our parent app,
# since it already has the logic for executing
# the subcommand.
line_parts = shlex.split(line.parsed.raw)
self.parent_app.run_subcommand(line_parts)
def completenames(self, text, *ignored):
"""Tab-completion for command prefix without completer delimiter.
This method returns cmd style and cliff style commands matching
provided command prefix (text).
"""
completions = cmd2.Cmd.completenames(self, text, *ignored)
completions += self._complete_prefix(text)
return completions
def completedefault(self, text, line, begidx, endidx):
"""Default tab-completion for command prefix with completer delimiter.
This method filters only cliff style commands matching provided
command prefix (line) as cmd2 style commands cannot contain spaces.
This method returns text + missing command part of matching commands.
This method does not handle options in cmd2/cliff style commands, you
must define complete_$method to handle them.
"""
return [x[begidx:] for x in self._complete_prefix(line)]
def _complete_prefix(self, prefix):
"""Returns cliff style commands with a specific prefix."""
if not prefix:
return [n for n, v in self.command_manager]
return [n for n, v in self.command_manager if n.startswith(prefix)]
def help_help(self):
# Use the command manager to get instructions for "help"
self.default('help help')
def do_help(self, arg):
if arg:
# Check if the arg is a builtin command or something
# coming from the command manager
arg_parts = shlex.split(arg)
method_name = '_'.join(
itertools.chain(
['do'],
itertools.takewhile(lambda x: not x.startswith('-'),
arg_parts)
)
)
# Have the command manager version of the help
# command produce the help text since cmd and
# cmd2 do not provide help for "help"
if hasattr(self, method_name):
return cmd2.Cmd.do_help(self, arg)
# Dispatch to the underlying help command,
# which knows how to provide help for extension
# commands.
self.default(self.parsed('help ' + arg))
else:
cmd2.Cmd.do_help(self, arg)
cmd_names = sorted([n for n, v in self.command_manager])
self.print_topics(self.app_cmd_header, cmd_names, 15, 80)
return
def get_names(self):
# Override the base class version to filter out
# things that look like they should be hidden
# from the user.
return [n
for n in cmd2.Cmd.get_names(self)
if not n.startswith('do__')
]
def precmd(self, statement):
# Pre-process the parsed command in case it looks like one of
# our subcommands, since cmd2 does not handle multi-part
# command names by default.
line_parts = shlex.split(statement.parsed.raw)
try:
the_cmd = self.command_manager.find_command(line_parts)
cmd_factory, cmd_name, sub_argv = the_cmd
except ValueError:
# Not a plugin command
pass
else:
statement.parsed.command = cmd_name
statement.parsed.args = ' '.join(sub_argv)
return statement
def cmdloop(self):
self._cmdloop()
| {
"repo_name": "enzochiau/cliff",
"path": "cliff/interactive.py",
"copies": "3",
"size": "5137",
"license": "apache-2.0",
"hash": -7876402000540219000,
"line_mean": 35.9568345324,
"line_max": 78,
"alpha_frac": 0.604243722,
"autogenerated": false,
"ratio": 4.368197278911564,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 139
} |
"""Application base class.
"""
import itertools
import logging
import logging.handlers
import shlex
import cmd2
LOG = logging.getLogger(__name__)
class InteractiveApp(cmd2.Cmd):
"""Provides "interactive mode" features.
Refer to the cmd2_ and cmd_ documentation for details
about subclassing and configuring this class.
.. _cmd2: http://packages.python.org/cmd2/index.html
.. _cmd: http://docs.python.org/library/cmd.html
:param parent_app: The calling application (expected to be derived
from :class:`cliff.main.App`).
:param command_manager: A :class:`cliff.commandmanager.CommandManager`
instance.
:param stdin: Standard input stream
:param stdout: Standard output stream
"""
use_rawinput = True
doc_header = "Shell commands (type help <topic>):"
app_cmd_header = "Application commands (type help <topic>):"
def __init__(self, parent_app, command_manager, stdin, stdout):
self.parent_app = parent_app
self.prompt = '(%s) ' % parent_app.NAME
self.command_manager = command_manager
cmd2.Cmd.__init__(self, 'tab', stdin=stdin, stdout=stdout)
def default(self, line):
# Tie in the the default command processor to
# dispatch commands known to the command manager.
# We send the message through our parent app,
# since it already has the logic for executing
# the subcommand.
line_parts = shlex.split(line.parsed.raw)
self.parent_app.run_subcommand(line_parts)
def completedefault(self, text, line, begidx, endidx):
# Tab-completion for commands known to the command manager.
# Does not handle options on the commands.
if not text:
completions = sorted(n for n, v in self.command_manager)
else:
completions = sorted(n for n, v in self.command_manager
if n.startswith(text)
)
return completions
def help_help(self):
# Use the command manager to get instructions for "help"
self.default('help help')
def do_help(self, arg):
if arg:
# Check if the arg is a builtin command or something
# coming from the command manager
arg_parts = shlex.split(arg)
method_name = '_'.join(
itertools.chain(
['do'],
itertools.takewhile(lambda x: not x.startswith('-'),
arg_parts)
)
)
# Have the command manager version of the help
# command produce the help text since cmd and
# cmd2 do not provide help for "help"
if hasattr(self, method_name):
return cmd2.Cmd.do_help(self, arg)
# Dispatch to the underlying help command,
# which knows how to provide help for extension
# commands.
self.default(self.parsed('help ' + arg))
else:
cmd2.Cmd.do_help(self, arg)
cmd_names = sorted([n for n, v in self.command_manager])
self.print_topics(self.app_cmd_header, cmd_names, 15, 80)
return
def get_names(self):
# Override the base class version to filter out
# things that look like they should be hidden
# from the user.
return [n
for n in cmd2.Cmd.get_names(self)
if not n.startswith('do__')
]
def precmd(self, statement):
# Pre-process the parsed command in case it looks like one of
# our subcommands, since cmd2 does not handle multi-part
# command names by default.
line_parts = shlex.split(statement.parsed.raw)
try:
the_cmd = self.command_manager.find_command(line_parts)
cmd_factory, cmd_name, sub_argv = the_cmd
except ValueError:
# Not a plugin command
pass
else:
statement.parsed.command = cmd_name
statement.parsed.args = ' '.join(sub_argv)
return statement
| {
"repo_name": "neumerance/cloudloon2",
"path": ".venv/lib/python2.7/site-packages/cliff/interactive.py",
"copies": "3",
"size": "4179",
"license": "apache-2.0",
"hash": -3568907046042083000,
"line_mean": 35.3391304348,
"line_max": 74,
"alpha_frac": 0.5824359895,
"autogenerated": false,
"ratio": 4.353125,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.64355609895,
"avg_score": null,
"num_lines": null
} |
# /application/base.py
from flask import Flask
from os.path import abspath, join, split
application = Flask("PowerPortfolio")
application.secret_key = "SECRET_KEY" # TODO
application.config["DATABASE"] = "sqlite:///portfolio.db"
application.config.from_pyfile('settings.cfg')
root_dir = split(split(abspath(__file__))[0])[0]
@application.route("/")
@application.route("/index.html")
def main_page():
""" Returns the main index.html page for the website """
fname = join(root_dir, "static", "user", "index.html")
with open(fname) as f:
return f.read()
@application.route("/static/<path>")
def static_route(path):
""" Route a static resource """
path = path.split("/")
fname = join(root_dir, "static", "user", *path)
with open(fname) as f:
return f.read()
@application.route("/admin/")
@application.route("/admin/index.html")
def main_page_admin():
""" Returns the main index.html page for the admin """
fname = join(root_dir, "static", "admin", "index.html")
with open(fname) as f:
return f.read()
@application.route("/admin/static/<path>")
def static_route_admin(path):
""" Route a static response to part of the admin page """
path = path.split("/")
fname = join(root_dir, "static", "admin", *path)
with open(fname) as f:
return f.read()
| {
"repo_name": "TumblrCommunity/PowerPortfolio",
"path": "portfolio/application/base.py",
"copies": "1",
"size": "1333",
"license": "mit",
"hash": -8132780515959871000,
"line_mean": 30.7380952381,
"line_max": 61,
"alpha_frac": 0.647411853,
"autogenerated": false,
"ratio": 3.4533678756476682,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4600779728647668,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.