code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
import urllib
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from django.conf import settings
from django.core.mail import send_mail, mail_admins
from django.template import loader
from managers import TokenManager, ConsumerManager, ResourceManager, KEY_SIZE, SECRET_SIZE
CONSUMER_STATES = (
('pending', 'Pending approval'),
('accepted', 'Accepted'),
('canceled', 'Canceled'),
)
class Nonce(models.Model):
token_key = models.CharField(max_length=KEY_SIZE)
consumer_key = models.CharField(max_length=KEY_SIZE)
key = models.CharField(max_length=255)
def __unicode__(self):
return u"Nonce %s for %s" % (self.key, self.consumer_key)
admin.site.register(Nonce)
class Resource(models.Model):
name = models.CharField(max_length=255)
url = models.TextField(max_length=2047)
is_readonly = models.BooleanField(default=True)
objects = ResourceManager()
def __unicode__(self):
return u"Resource %s with url %s" % (self.name, self.url)
admin.site.register(Resource)
class Consumer(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
status = models.CharField(max_length=16, choices=CONSUMER_STATES, default='pending')
user = models.ForeignKey(User, null=True, blank=True, related_name='consumers')
objects = ConsumerManager()
def __unicode__(self):
return u"Consumer %s with key %s" % (self.name, self.key)
def save(self, **kwargs):
super(Consumer, self).save(**kwargs)
if self.id and self.user:
subject = "API Consumer"
rcpt = [ self.user.email, ]
if self.status == "accepted":
template = "api/mails/consumer_accepted.txt"
subject += " was accepted!"
elif self.status == "canceled":
template = "api/mails/consumer_canceled.txt"
subject += " has been canceled"
else:
template = "api/mails/consumer_pending.txt"
subject += " application received"
for admin in settings.ADMINS:
bcc.append(admin[1])
body = loader.render_to_string(template,
{ 'consumer': self, 'user': self.user })
send_mail(subject, body, settings.DEFAULT_FROM_EMAIL,
rcpt, fail_silently=True)
if self.status == 'pending':
mail_admins(subject, body, fail_silently=True)
if settings.DEBUG:
print "Mail being sent, to=%s" % rcpt
print "Subject: %s" % subject
print body
admin.site.register(Consumer)
class Token(models.Model):
REQUEST = 1
ACCESS = 2
TOKEN_TYPES = ((REQUEST, u'Request'), (ACCESS, u'Access'))
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
token_type = models.IntegerField(choices=TOKEN_TYPES)
timestamp = models.IntegerField()
is_approved = models.BooleanField(default=False)
user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
consumer = models.ForeignKey(Consumer)
objects = TokenManager()
def __unicode__(self):
return u"%s Token %s for %s" % (self.get_token_type_display(), self.key, self.consumer)
def to_string(self, only_key=False):
token_dict = {
'oauth_token': self.key,
'oauth_token_secret': self.secret
}
if only_key:
del token_dict['oauth_token_secret']
return urllib.urlencode(token_dict)
admin.site.register(Token)
| Python |
import cgi
import urllib
import time
import random
import urlparse
import hmac
import base64
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
# Generic exception class
class OAuthError(RuntimeError):
def get_message(self):
return self._message
def set_message(self, message):
self._message = message
message = property(get_message, set_message)
def __init__(self, message='OAuth error occured.'):
self.message = message
# optional WWW-Authenticate header (401 error)
def build_authenticate_header(realm=''):
return { 'WWW-Authenticate': 'OAuth realm="%s"' % realm }
# url escape
def escape(s):
# escape '/' too
return urllib.quote(s, safe='~')
# util function: current timestamp
# seconds since epoch (UTC)
def generate_timestamp():
return int(time.time())
# util function: nonce
# pseudorandom number
def generate_nonce(length=8):
return ''.join(str(random.randint(0, 9)) for i in range(length))
# OAuthConsumer is a data type that represents the identity of the Consumer
# via its shared secret with the Service Provider.
class OAuthConsumer(object):
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
# OAuthToken is a data type that represents an End User via either an access
# or request token.
class OAuthToken(object):
# access tokens and request tokens
key = None
secret = None
'''
key = the token
secret = the token secret
'''
def __init__(self, key, secret):
self.key = key
self.secret = secret
def to_string(self):
return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret})
# return a token from something like:
# oauth_token_secret=digg&oauth_token=digg
@staticmethod
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
return OAuthToken(key, secret)
def __str__(self):
return self.to_string()
# OAuthRequest represents the request and can be serialized
class OAuthRequest(object):
'''
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
... any additional parameters, as defined by the Service Provider.
'''
parameters = None # oauth parameters
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter('oauth_nonce')
# get any non-oauth parameters
def get_nonoauth_parameters(self):
parameters = {}
for k, v in self.parameters.iteritems():
# ignore oauth parameters
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
# serialize as a header for an HTTPAuth request
def to_header(self, realm=''):
auth_header = 'OAuth realm="%s"' % realm
# add the oauth parameters
if self.parameters:
for k, v in self.parameters.iteritems():
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header}
# serialize as post data for a POST request
def to_postdata(self):
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in self.parameters.iteritems())
# serialize as a url for a GET request
def to_url(self):
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
# return a string that consists of all the parameters that need to be signed
def get_normalized_parameters(self):
params = self.parameters
try:
# exclude the signature if it exists
del params['oauth_signature']
except:
pass
key_values = params.items()
# sort lexicographically, first after key, then after value
key_values.sort()
# combine key value pairs in string and escape
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in key_values)
# just uppercases the http method
def get_normalized_http_method(self):
return self.http_method.upper()
# parses the url and rebuilds it to be scheme://host/path
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url)
url_string = '%s://%s%s' % (parts[0], parts[1], parts[2]) # scheme, netloc, path
return url_string
# set the signature parameter to the result of build_signature
def sign_request(self, signature_method, consumer, token):
# set the signature method
self.set_parameter('oauth_signature_method', signature_method.get_name())
# set the signature
self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
# call the build signature method within the signature method
return signature_method.build_signature(self, consumer, token)
@staticmethod
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
# combine multiple parameter sources
if parameters is None:
parameters = {}
# headers
if headers and 'HTTP_AUTHORIZATION' in headers:
auth_header = headers['HTTP_AUTHORIZATION']
# check that the authorization header is OAuth
if auth_header.index('OAuth') > -1:
try:
# get the parameters from the header
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from Authorization header.')
# GET or POST query string
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
@staticmethod
def from_consumer_and_token(oauth_consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return OAuthRequest(http_method, http_url, parameters)
@staticmethod
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = escape(callback)
return OAuthRequest(http_method, http_url, parameters)
# util function: turn Authorization: header into parameters, has to do some unescaping
@staticmethod
def _split_header(header):
params = {}
header = header.replace('OAuth ', '', 1)
parts = header.split(',')
for param in parts:
# ignore realm parameter
if param.find('realm') > -1:
continue
# remove whitespace
param = param.strip()
# split key-value
param_parts = param.split('=', 1)
# remove quotes and unescape the value
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
# util function: turn url string into parameters, has to do some unescaping
@staticmethod
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
# OAuthServer is a worker to check a requests validity against a data store
class OAuthServer(object):
timestamp_threshold = 300 # in seconds, five minutes
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, oauth_data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
# process a request_token request
# returns the request token on success
def fetch_request_token(self, oauth_request):
try:
# get the request token for authorization
token = self._get_token(oauth_request, 'request')
except OAuthError:
# no token required for the initial token request
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
self._check_signature(oauth_request, consumer, None)
# fetch a new token
token = self.data_store.fetch_request_token(consumer)
return token
# process an access_token request
# returns the access token on success
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the request token
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token)
return new_token
# verify an api call, checks all the parameters
def verify_request(self, oauth_request):
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the access token
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
# authorize a request token
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)
# get the callback url
def get_callback(self, oauth_request):
return oauth_request.get_parameter('oauth_callback')
# optional support for the authenticate header
def build_authenticate_header(self, realm=''):
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
# verify the correct version request for this server
def _get_version(self, oauth_request):
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
# figure out the signature with some defaults
def _get_signature_method(self, oauth_request):
try:
signature_method = oauth_request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# get the signature method object
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
if not consumer_key:
raise OAuthError('Invalid consumer key.')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
# try to find the token for the provided request token key
def _get_token(self, oauth_request, token_type='access'):
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# validate the signature
valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
# verify that timestamp is recentish
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
# verify that the nonce is uniqueish
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
# OAuthClient is a worker to attempt to execute a request
class OAuthClient(object):
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def access_resource(self, oauth_request):
# -> some protected resource
raise NotImplementedError
# OAuthDataStore is a database abstraction used to lookup consumers and tokens
class OAuthDataStore(object):
def lookup_consumer(self, key):
# -> OAuthConsumer
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
# -> OAuthToken
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce, timestamp):
# -> OAuthToken
raise NotImplementedError
def fetch_request_token(self, oauth_consumer):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token):
# -> OAuthToken
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
# -> OAuthToken
raise NotImplementedError
# OAuthSignatureMethod is a strategy class that implements a signature method
class OAuthSignatureMethod(object):
def get_name(self):
# -> str
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
# -> str key, str raw
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
# -> str
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
# build the base signature string
key, raw = self.build_signature_base_string(oauth_request, consumer, token)
# hmac object
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, raw, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
# concatenate the consumer key and secret
sig = escape(consumer.secret) + '&'
if token:
sig = sig + escape(token.secret)
return sig
def build_signature(self, oauth_request, consumer, token):
return self.build_signature_base_string(oauth_request, consumer, token)
| Python |
from django.db import models
from django.contrib.auth.models import User
KEY_SIZE = 18
SECRET_SIZE = 32
class KeyManager(models.Manager):
'''Add support for random key/secret generation
'''
def generate_random_codes(self):
key = User.objects.make_random_password(length=KEY_SIZE)
secret = User.objects.make_random_password(length=SECRET_SIZE)
while self.filter(key__exact=key, secret__exact=secret).count():
secret = User.objects.make_random_password(length=SECRET_SIZE)
return key, secret
class ConsumerManager(KeyManager):
def create_consumer(self, name, description=None, user=None):
"""
Shortcut to create a consumer with random key/secret.
"""
consumer, created = self.get_or_create(name=name)
if user:
consumer.user = user
if description:
consumer.description = description
if created:
consumer.key, consumer.secret = self.generate_random_codes()
consumer.save()
return consumer
_default_consumer = None
class ResourceManager(models.Manager):
_default_resource = None
def get_default_resource(self, name):
"""
Add cache if you use a default resource.
"""
if not self._default_resource:
self._default_resource = self.get(name=name)
return self._default_resource
class TokenManager(KeyManager):
def create_token(self, consumer, token_type, timestamp, user=None):
"""
Shortcut to create a token with random key/secret.
"""
token, created = self.get_or_create(consumer=consumer,
token_type=token_type,
timestamp=timestamp,
user=user)
if created:
token.key, token.secret = self.generate_random_codes()
token.save()
return token
| Python |
import binascii
import oauth
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.template import loader
from django.contrib.auth import authenticate
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import render_to_response
from django.template import RequestContext
from piston import forms
class NoAuthentication(object):
"""
Authentication handler that always returns
True, so no authentication is needed, nor
initiated (`challenge` is missing.)
"""
def is_authenticated(self, request):
return True
class HttpBasicAuthentication(object):
"""
Basic HTTP authenticater. Synopsis:
Authentication handlers must implement two methods:
- `is_authenticated`: Will be called when checking for
authentication. Receives a `request` object, please
set your `User` object on `request.user`, otherwise
return False (or something that evaluates to False.)
- `challenge`: In cases where `is_authenticated` returns
False, the result of this method will be returned.
This will usually be a `HttpResponse` object with
some kind of challenge headers and 401 code on it.
"""
def __init__(self, auth_func=authenticate, realm='API'):
self.auth_func = auth_func
self.realm = realm
def is_authenticated(self, request):
auth_string = request.META.get('HTTP_AUTHORIZATION', None)
if not auth_string:
return False
try:
(authmeth, auth) = auth_string.split(" ", 1)
if not authmeth.lower() == 'basic':
return False
auth = auth.strip().decode('base64')
(username, password) = auth.split(':', 1)
except (ValueError, binascii.Error):
return False
request.user = self.auth_func(username=username, password=password) \
or AnonymousUser()
return not request.user in (False, None, AnonymousUser())
def challenge(self):
resp = HttpResponse("Authorization Required")
resp['WWW-Authenticate'] = 'Basic realm="%s"' % self.realm
resp.status_code = 401
return resp
def load_data_store():
'''Load data store for OAuth Consumers, Tokens, Nonces and Resources
'''
path = getattr(settings, 'OAUTH_DATA_STORE', 'piston.store.DataStore')
# stolen from django.contrib.auth.load_backend
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = __import__(module, {}, {}, attr)
except ImportError, e:
raise ImproperlyConfigured, 'Error importing OAuth data store %s: "%s"' % (module, e)
try:
cls = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured, 'Module %s does not define a "%s" OAuth data store' % (module, attr)
return cls
# Set the datastore here.
oauth_datastore = load_data_store()
def initialize_server_request(request):
"""
Shortcut for initialization.
"""
oauth_request = oauth.OAuthRequest.from_request(
request.method, request.build_absolute_uri(),
headers=request.META, parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(oauth_datastore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""
Shortcut for sending an error.
"""
response = HttpResponse(err.message.encode('utf-8'))
response.status_code = 401
realm = 'OAuth'
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
def oauth_request_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_server is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
response = HttpResponse(token.to_string())
except oauth.OAuthError, err:
response = send_oauth_error(err)
return response
def oauth_auth_view(request, token, callback, params):
form = forms.OAuthAuthenticationForm(initial={
'oauth_token': token.key,
'oauth_callback': callback,
})
return render_to_response('piston/authorize_token.html',
{ 'form': form }, RequestContext(request))
@login_required
def oauth_user_auth(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
except oauth.OAuthError, err:
return send_oauth_error(err)
try:
callback = oauth_server.get_callback(oauth_request)
except:
callback = None
if request.method == "GET":
params = oauth_request.get_normalized_parameters()
oauth_view = getattr(settings, 'OAUTH_AUTH_VIEW', None)
if oauth_view is None:
return oauth_auth_view(request, token, callback, params)
else:
return get_callable(oauth_view)(request, token, callback, params)
elif request.method == "POST":
try:
form = forms.OAuthAuthenticationForm(request.POST)
if form.is_valid():
token = oauth_server.authorize_token(token, request.user)
args = '?'+token.to_string(only_key=True)
else:
args = '?error=%s' % 'Access not granted by user.'
if not callback:
callback = getattr(settings, 'OAUTH_CALLBACK_VIEW')
return get_callable(callback)(request, token)
response = HttpResponseRedirect(callback+args)
except oauth.OAuthError, err:
response = send_oauth_error(err)
else:
response = HttpResponse('Action not allowed.')
return response
def oauth_access_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_access_token(oauth_request)
return HttpResponse(token.to_string())
except oauth.OAuthError, err:
return send_oauth_error(err)
INVALID_PARAMS_RESPONSE = send_oauth_error(oauth.OAuthError('Invalid request parameters.'))
class OAuthAuthentication(object):
"""
OAuth authentication. Based on work by Leah Culver.
"""
def __init__(self, realm='API'):
self.realm = realm
self.builder = oauth.build_authenticate_header
def is_authenticated(self, request):
"""
Checks whether a means of specifying authentication
is provided, and if so, if it is a valid token.
Read the documentation on `HttpBasicAuthentication`
for more information about what goes on here.
"""
if self.is_valid_request(request):
try:
consumer, token, parameters = self.validate_token(request)
except oauth.OAuthError, err:
print send_oauth_error(err)
return False
if consumer and token:
request.user = token.user
request.throttle_extra = token.consumer.id
return True
return False
def challenge(self):
"""
Returns a 401 response with a small bit on
what OAuth is, and where to learn more about it.
When this was written, browsers did not understand
OAuth authentication on the browser side, and hence
the helpful template we render. Maybe some day in the
future, browsers will take care of this stuff for us
and understand the 401 with the realm we give it.
"""
response = HttpResponse()
response.status_code = 401
realm = 'API'
for k, v in self.builder(realm=realm).iteritems():
response[k] = v
tmpl = loader.render_to_string('oauth/challenge.html',
{ 'MEDIA_URL': settings.MEDIA_URL })
response.content = tmpl
return response
@staticmethod
def is_valid_request(request):
"""
Checks whether the required parameters are either in
the http-authorization header sent by some clients,
which is by the way the preferred method according to
OAuth spec, but otherwise fall back to `GET` and `POST`.
"""
must_have = [ 'oauth_'+s for s in [
'consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce' ] ]
is_in = lambda l: all([ (p in l) for p in must_have ])
auth_params = request.META.get("HTTP_AUTHORIZATION", "")
req_params = request.REQUEST
return is_in(auth_params) or is_in(req_params)
@staticmethod
def validate_token(request, check_timestamp=True, check_nonce=True):
oauth_server, oauth_request = initialize_server_request(request)
return oauth_server.verify_request(oauth_request)
| Python |
from django.middleware.http import ConditionalGetMiddleware
from django.middleware.common import CommonMiddleware
def compat_middleware_factory(klass):
"""
Class wrapper that only executes `process_response`
if `streaming` is not set on the `HttpResponse` object.
Django has a bad habbit of looking at the content,
which will prematurely exhaust the data source if we're
using generators or buffers.
"""
class compatwrapper(klass):
def process_response(self, req, resp):
if not hasattr(resp, 'streaming'):
return klass.process_response(self, req, resp)
return resp
return compatwrapper
ConditionalMiddlewareCompatProxy = compat_middleware_factory(ConditionalGetMiddleware)
CommonMiddlewareCompatProxy = compat_middleware_factory(CommonMiddleware)
| Python |
from django.http import HttpResponseNotAllowed, HttpResponseForbidden, HttpResponse, HttpResponseBadRequest
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django import get_version as django_version
from decorator import decorator
from datetime import datetime, timedelta
__version__ = '0.2.2'
def get_version():
return __version__
def format_error(error):
return u"Piston/%s (Django %s) crash report:\n\n%s" % \
(get_version(), django_version(), error)
class rc_factory(object):
"""
Status codes.
"""
CODES = dict(ALL_OK = ('OK', 200),
CREATED = ('Created', 201),
DELETED = ('', 204), # 204 says "Don't send a body!"
BAD_REQUEST = ('Bad Request', 400),
FORBIDDEN = ('Forbidden', 401),
NOT_FOUND = ('Not Found', 404),
DUPLICATE_ENTRY = ('Conflict/Duplicate', 409),
NOT_HERE = ('Gone', 410),
NOT_IMPLEMENTED = ('Not Implemented', 501),
THROTTLED = ('Throttled', 503))
def __getattr__(self, attr):
"""
Returns a fresh `HttpResponse` when getting
an "attribute". This is backwards compatible
with 0.2, which is important.
"""
try:
(r, c) = self.CODES.get(attr)
except TypeError:
raise AttributeError(attr)
return HttpResponse(r, content_type='text/plain', status=c)
rc = rc_factory()
class FormValidationError(Exception):
def __init__(self, form):
self.form = form
class HttpStatusCode(Exception):
def __init__(self, response):
self.response = response
def validate(v_form, operation='POST'):
@decorator
def wrap(f, self, request, *a, **kwa):
form = v_form(getattr(request, operation))
if form.is_valid():
return f(self, request, *a, **kwa)
else:
raise FormValidationError(form)
return wrap
def throttle(max_requests, timeout=60*60, extra=''):
"""
Simple throttling decorator, caches
the amount of requests made in cache.
If used on a view where users are required to
log in, the username is used, otherwise the
IP address of the originating request is used.
Parameters::
- `max_requests`: The maximum number of requests
- `timeout`: The timeout for the cache entry (default: 1 hour)
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
if request.user.is_authenticated():
ident = request.user.username
else:
ident = request.META.get('REMOTE_ADDR', None)
if hasattr(request, 'throttle_extra'):
"""
Since we want to be able to throttle on a per-
application basis, it's important that we realize
that `throttle_extra` might be set on the request
object. If so, append the identifier name with it.
"""
ident += ':%s' % str(request.throttle_extra)
if ident:
"""
Preferrably we'd use incr/decr here, since they're
atomic in memcached, but it's in django-trunk so we
can't use it yet. If someone sees this after it's in
stable, you can change it here.
"""
ident += ':%s' % extra
now = datetime.now()
ts_key = 'throttle:ts:%s' % ident
timestamp = cache.get(ts_key)
offset = now + timedelta(seconds=timeout)
if timestamp and timestamp < offset:
t = rc.THROTTLED
wait = timeout - (offset-timestamp).seconds
t.content = 'Throttled, wait %d seconds.' % wait
return t
count = cache.get(ident, 1)
cache.set(ident, count+1)
if count >= max_requests:
cache.set(ts_key, offset, timeout)
cache.set(ident, 1)
return f(self, request, *args, **kwargs)
return wrap
def coerce_put_post(request):
"""
Django doesn't particularly understand REST.
In case we send data over PUT, Django won't
actually look at the data and load it. We need
to twist its arm here.
The try/except abominiation here is due to a bug
in mod_python. This should fix it.
"""
if request.method == "PUT":
try:
request.method = "POST"
request._load_post_and_files()
request.method = "PUT"
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = 'PUT'
request.PUT = request.POST
class MimerDataException(Exception):
"""
Raised if the content_type and data don't match
"""
pass
class Mimer(object):
TYPES = dict()
def __init__(self, request):
self.request = request
def is_multipart(self):
content_type = self.content_type()
if content_type is not None:
return content_type.lstrip().startswith('multipart')
return False
def loader_for_type(self, ctype):
"""
Gets a function ref to deserialize content
for a certain mimetype.
"""
for loadee, mimes in Mimer.TYPES.iteritems():
for mime in mimes:
if ctype.startswith(mime):
return loadee
def content_type(self):
"""
Returns the content type of the request in all cases where it is
different than a submitted form - application/x-www-form-urlencoded
"""
type_formencoded = "application/x-www-form-urlencoded"
ctype = self.request.META.get('CONTENT_TYPE', type_formencoded)
if ctype.startswith(type_formencoded):
return None
return ctype
def translate(self):
"""
Will look at the `Content-type` sent by the client, and maybe
deserialize the contents into the format they sent. This will
work for JSON, YAML, XML and Pickle. Since the data is not just
key-value (and maybe just a list), the data will be placed on
`request.data` instead, and the handler will have to read from
there.
It will also set `request.content_type` so the handler has an easy
way to tell what's going on. `request.content_type` will always be
None for form-encoded and/or multipart form data (what your browser sends.)
"""
ctype = self.content_type()
self.request.content_type = ctype
if not self.is_multipart() and ctype:
loadee = self.loader_for_type(ctype)
try:
self.request.data = loadee(self.request.raw_post_data)
# Reset both POST and PUT from request, as its
# misleading having their presence around.
self.request.POST = self.request.PUT = dict()
except (TypeError, ValueError):
raise MimerDataException
return self.request
@classmethod
def register(cls, loadee, types):
cls.TYPES[loadee] = types
@classmethod
def unregister(cls, loadee):
return cls.TYPES.pop(loadee)
def translate_mime(request):
request = Mimer(request).translate()
def require_mime(*mimes):
"""
Decorator requiring a certain mimetype. There's a nifty
helper called `require_extended` below which requires everything
we support except for post-data via form.
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
m = Mimer(request)
realmimes = set()
rewrite = { 'json': 'application/json',
'yaml': 'application/x-yaml',
'xml': 'text/xml',
'pickle': 'application/python-pickle' }
for idx, mime in enumerate(mimes):
realmimes.add(rewrite.get(mime, mime))
if not m.content_type() in realmimes:
return rc.BAD_REQUEST
return f(self, request, *args, **kwargs)
return wrap
require_extended = require_mime('json', 'yaml', 'xml', 'pickle')
| Python |
import inspect, handler
from piston.handler import typemapper
from piston.handler import handler_tracker
from django.core.urlresolvers import get_resolver, get_callable, get_script_prefix
from django.shortcuts import render_to_response
from django.template import RequestContext
def generate_doc(handler_cls):
"""
Returns a `HandlerDocumentation` object
for the given handler. Use this to generate
documentation for your API.
"""
if not type(handler_cls) is handler.HandlerMetaClass:
raise ValueError("Give me handler, not %s" % type(handler_cls))
return HandlerDocumentation(handler_cls)
class HandlerMethod(object):
def __init__(self, method, stale=False):
self.method = method
self.stale = stale
def iter_args(self):
args, _, _, defaults = inspect.getargspec(self.method)
for idx, arg in enumerate(args):
if arg in ('self', 'request', 'form'):
continue
didx = len(args)-idx
if defaults and len(defaults) >= didx:
yield (arg, str(defaults[-didx]))
else:
yield (arg, None)
@property
def signature(self, parse_optional=True):
spec = ""
for argn, argdef in self.iter_args():
spec += argn
if argdef:
spec += '=%s' % argdef
spec += ', '
spec = spec.rstrip(", ")
if parse_optional:
return spec.replace("=None", "=<optional>")
return spec
@property
def doc(self):
return inspect.getdoc(self.method)
@property
def name(self):
return self.method.__name__
@property
def http_name(self):
if self.name == 'read':
return 'GET'
elif self.name == 'create':
return 'POST'
elif self.name == 'delete':
return 'DELETE'
elif self.name == 'update':
return 'PUT'
def __repr__(self):
return "<Method: %s>" % self.name
class HandlerDocumentation(object):
def __init__(self, handler):
self.handler = handler
def get_methods(self, include_default=False):
for method in "read create update delete".split():
met = getattr(self.handler, method, None)
if not met:
continue
stale = inspect.getmodule(met) is handler
if not self.handler.is_anonymous:
if met and (not stale or include_default):
yield HandlerMethod(met, stale)
else:
if not stale or met.__name__ == "read" \
and 'GET' in self.allowed_methods:
yield HandlerMethod(met, stale)
def get_all_methods(self):
return self.get_methods(include_default=True)
@property
def is_anonymous(self):
return handler.is_anonymous
def get_model(self):
return getattr(self, 'model', None)
@property
def has_anonymous(self):
return self.handler.anonymous
@property
def anonymous(self):
if self.has_anonymous:
return HandlerDocumentation(self.handler.anonymous)
@property
def doc(self):
return self.handler.__doc__
@property
def name(self):
return self.handler.__name__
@property
def allowed_methods(self):
return self.handler.allowed_methods
def get_resource_uri_template(self):
"""
URI template processor.
See http://bitworking.org/projects/URI-Templates/
"""
def _convert(template, params=[]):
"""URI template converter"""
paths = template % dict([p, "{%s}" % p] for p in params)
return u'%s%s' % (get_script_prefix(), paths)
try:
resource_uri = self.handler.resource_uri()
components = [None, [], {}]
for i, value in enumerate(resource_uri):
components[i] = value
lookup_view, args, kwargs = components
lookup_view = get_callable(lookup_view, True)
possibilities = get_resolver(None).reverse_dict.getlist(lookup_view)
for possibility, pattern in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
return _convert(result, params)
else:
if set(kwargs.keys()) != set(params):
continue
return _convert(result, params)
except:
return None
resource_uri_template = property(get_resource_uri_template)
def __repr__(self):
return u'<Documentation for "%s">' % self.name
def documentation_view(request):
"""
Generic documentation view. Generates documentation
from the handlers you've defined.
"""
docs = [ ]
for handler in handler_tracker:
docs.append(generate_doc(handler))
def _compare(doc1, doc2):
#handlers and their anonymous counterparts are put next to each other.
name1 = doc1.name.replace("Anonymous", "")
name2 = doc2.name.replace("Anonymous", "")
return cmp(name1, name2)
docs.sort(_compare)
return render_to_response('documentation.html',
{ 'docs': docs }, RequestContext(request))
| Python |
import oauth
from models import Nonce, Token, Consumer
class DataStore(oauth.OAuthDataStore):
"""Layer between Python OAuth and Django database."""
def __init__(self, oauth_request):
self.signature = oauth_request.parameters.get('oauth_signature', None)
self.timestamp = oauth_request.parameters.get('oauth_timestamp', None)
self.scope = oauth_request.parameters.get('scope', 'all')
def lookup_consumer(self, key):
try:
self.consumer = Consumer.objects.get(key=key)
return self.consumer
except Consumer.DoesNotExist:
return None
def lookup_token(self, token_type, token):
if token_type == 'request':
token_type = Token.REQUEST
elif token_type == 'access':
token_type = Token.ACCESS
try:
self.request_token = Token.objects.get(key=token,
token_type=token_type)
return self.request_token
except Token.DoesNotExist:
return None
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
if oauth_token is None:
return None
nonce, created = Nonce.objects.get_or_create(consumer_key=oauth_consumer.key,
token_key=oauth_token.key,
key=nonce)
if created:
return None
else:
return nonce.key
def fetch_request_token(self, oauth_consumer):
if oauth_consumer.key == self.consumer.key:
self.request_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.REQUEST,
timestamp=self.timestamp)
return self.request_token
return None
def fetch_access_token(self, oauth_consumer, oauth_token):
if oauth_consumer.key == self.consumer.key \
and oauth_token.key == self.request_token.key \
and self.request_token.is_approved:
self.access_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.ACCESS,
timestamp=self.timestamp,
user=self.request_token.user)
return self.access_token
return None
def authorize_request_token(self, oauth_token, user):
if oauth_token.key == self.request_token.key:
# authorize the request token in the store
self.request_token.is_approved = True
self.request_token.user = user
self.request_token.save()
return self.request_token
return None | Python |
from M2Crypto import RSA
def getPublicKey(keys):
pubKey = RSA.new_pub_key(keys.pub())
return pubKey.as_pem(cipher=None)
def encrypt(plaintext,keys):
encText = keys.private_encrypt(plaintext, 1)
return encText
def decrypt(encText,pubKey):
dec = pubKey.public_decrypt(encText, 1)
return decl
def loadKeys(pem):
return RSA.load_key_string(pem)
| Python |
from jplot2d.python import *
from org.jplot2d.swing import JPlot2DFrame
# Create plot
p = plot(preferredContentSize=(400, 230), sizeMode=AutoPackSizeMode())
p.legend.visible = 0
pf = JPlot2DFrame(p)
pf.size = (480, 360)
pf.visible = 1
# Axes
xaxes = axes(2, range=(0, 4), tickInterval=1, minorTicks=9)
xaxes[0].title.text = "z"
xaxes[1].labelVisible = 0
yaxes = axes(2, transform=LOGARITHMIC, range=(0.4, 7), tickInterval=1)
yaxes[0].title.text = "$\\mathrm{S_160/S_100}$"
yaxes[1].labelVisible = 0
p.addXAxes(xaxes)
p.addYAxes(yaxes)
# Layer
layer = layer()
p.addLayer(layer, xaxes[0], yaxes[0])
x = 0.00000, 0.100000, 0.200000, 0.300000, 0.400000, 0.500000, 0.600000, 0.700000, 0.800000, 0.900000, 1.00000, 1.10000, 1.20000, 1.30000, 1.40000, 1.50000, 1.60000, 1.70000, 1.80000, 1.90000, 2.00000, 2.10000, 2.20000, 2.30000, 2.40000, 2.50000, 2.60000, 2.70000, 2.80000, 2.90000, 3.00000, 3.10000, 3.20000, 3.30000, 3.40000, 3.50000, 3.60000, 3.70000, 3.80000, 3.90000, 4.00000, 4.10000, 4.20000, 4.30000, 4.40000, 4.50000, 4.60000, 4.70000, 4.80000, 4.90000
# GAL
galy = 1.03222, 1.25094, 1.49081, 1.74462, 2.01059, 2.28914, 2.57141, 2.84729, 3.10365, 3.33465, 3.53097, 3.68335, 3.78715, 3.83992, 3.84221, 3.79946, 3.71873, 3.61091, 3.48488, 3.34745, 3.20485, 3.06281, 2.92494, 2.79436, 2.67321, 2.56291, 2.46347, 2.37459, 2.29566, 2.22629, 2.16592, 2.11373, 2.06849, 2.02944, 1.99586, 1.96725, 1.94287, 1.92241, 1.90535, 1.89116, 1.87969, 1.87064, 1.86369, 1.85858, 1.85505, 1.85300, 1.85217, 1.85245, 1.85373, 1.85584
gal = xygraph(x, galy, color=(95, 207, 125), lineStroke=stroke(2, [6, 6]))
layer.addGraph(gal)
# STARB
starby = 0.912616, 1.06472, 1.21351, 1.35387, 1.48253, 1.59905, 1.70259, 1.79352, 1.86953, 1.93058, 1.97602, 2.00531, 2.02072, 2.02515, 2.02118, 2.01191, 2.00092, 1.99176, 1.98598, 1.98427, 1.98757, 1.99676, 2.01133, 2.02998, 2.05105, 2.07340, 2.09615, 2.11933, 2.14282, 2.16651, 2.19010, 2.21347, 2.23635, 2.25868, 2.28044, 2.30127, 2.32095, 2.33888, 2.35458, 2.36749, 2.37679, 2.38218, 2.38364, 2.38127, 2.37430, 2.36275, 2.34753, 2.33303, 2.31973, 2.30573
starb = xygraph(x, starby, color=(131, 213, 227), lineStroke=stroke(2, [1, 3, 6, 3]))
layer.addGraph(starb)
# COMP
compy = 0.501706, 0.562159, 0.620910, 0.686251, 0.746177, 0.796718, 0.848579, 0.907463, 0.973662, 1.04519, 1.11828, 1.19289, 1.26902, 1.34396, 1.41677, 1.48817, 1.55816, 1.62519, 1.68898, 1.74984, 1.80912, 1.86740, 1.92393, 1.97873, 2.03165, 2.08255, 2.13060, 2.17504, 2.21671, 2.25848, 2.30272, 2.35204, 2.40766, 2.46871, 2.53245, 2.59683, 2.66079, 2.72436, 2.78559, 2.84441, 2.89868, 2.94720, 2.98878, 3.02225, 3.04582, 3.05892, 3.06194, 3.05405, 3.03615, 3.00939
comp = xygraph(x, compy, color=(200, 99, 184), lineStroke=stroke(2, [1, 3]))
layer.addGraph(comp)
# AGN2
agn2y = 0.760343, 0.857363, 0.938720, 1.00954, 1.09040, 1.19802, 1.33700, 1.50992, 1.71970, 1.96892, 2.25033, 2.53001, 2.79015, 3.02753, 3.24030, 3.42664, 3.58853, 3.72476, 3.83835, 3.92658, 3.99002, 4.03012, 4.05141, 4.06144, 4.06581, 4.06756, 4.06678, 4.06253, 4.05450, 4.04236, 4.02581, 4.00460, 3.97868, 3.94816, 3.91312, 3.87413, 3.83128, 3.78502, 3.73568, 3.68375, 3.62957, 3.57363, 3.51634, 3.45818, 3.39824, 3.33743, 3.27520, 3.21520, 3.15798, 3.10293
agn2 = xygraph(x, agn2y, color=(229, 57, 42), lineStroke=stroke(2))
layer.addGraph(agn2)
# AGN1
agn1y = 0.857343, 0.951230, 1.01367, 1.04761, 1.06091, 1.06239, 1.05883, 1.05450, 1.05286, 1.05524, 1.06168, 1.07179, 1.08492, 1.10048, 1.11786, 1.13652, 1.15561, 1.17397, 1.19142, 1.20795, 1.22359, 1.23840, 1.25238, 1.26561, 1.27814, 1.29005, 1.30133, 1.31218, 1.32266, 1.33314, 1.34376, 1.35463, 1.36581, 1.37737, 1.38929, 1.40153, 1.41405, 1.42675, 1.43960, 1.45255, 1.46554, 1.47853, 1.49147, 1.50432, 1.51704, 1.52957, 1.54183, 1.55370, 1.56498, 1.57554
agn1 = xygraph(x, agn1y, color=(71, 82, 166), lineStroke=stroke(2, [12, 6]))
layer.addGraph(agn1)
# GAL Dots
galdx = 0.458000, 0.253000, 0.210000, 0.189000, 0.278000, 0.120000, 0.437000, 0.200000, 0.0500000, 0.233000, 0.0790000, 0.519000, 0.136000, 0.299000, 0.254000, 0.211000, 0.438000, 0.556000, 0.136000, 0.337000, 0.562000, 0.348000, 0.114000, 0.224000, 0.377000, 0.0870000, 0.456000, 0.0700000, 0.299000, 0.286000, 0.520000, 0.139000, 0.278000, 0.954000, 0.207000, 0.561000, 0.638000, 0.114000, 0.845000, 0.202000, 0.517000, 0.478000, 0.105000, 1.14600, 0.377000, 0.560000, 0.642000, 0.253000, 0.639000, 0.560000, 0.557000, 0.562000, 0.377000, 0.206000, 0.561000, 0.457000, 0.476000, 0.848000, 0.423000, 0.534000, 0.354000, 0.410000, 0.559000
galdy = 1.19983, 1.40916, 3.97851, 1.54511, 1.64740, 1.62750, 2.01484, 1.39483, 1.31159, 2.37443, 1.08603, 1.71949, 1.33610, 1.17804, 1.71285, 1.15963, 1.43642, 5.52177, 2.52000, 1.94232, 3.49881, 3.03669, 1.54654, 4.35791, 1.95578, 1.08363, 1.63426, 0.619898, 1.24222, 2.34942, 2.12971, 0.751623, 2.06348, 2.41457, 1.54825, 1.93624, 2.67497, 0.776319, 2.51536, 0.894211, 1.86535, 2.19948, 1.85952, 2.46762, 2.38978, 1.88730, 2.33775, 2.24698, 2.19522, 2.18212, 1.67340, 1.58139, 2.13363, 1.55927, 1.11779, 1.21026, 1.96498, 1.92929, 1.42285, 1.84178, 1.46110, 1.74453, 1.62121
galdyel = (0,) * 63
galdyeh = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), 0, 0, 0, 0, 0, float('inf'), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), 0, 0, 0, 0, 0, 0, float('inf'), float('inf'), 0, float('inf'), float('inf'), float('inf'), float('inf'), 0, 0, float('inf'), 0, 0, 0, float('inf'), float('inf'), 0, float('inf'), 0, float('inf'), float('inf')
gald = xygraph(galdx, galdy, None, None, galdyel, galdyeh, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FCIRCLE, symbolSize=6, color=(95,207,125))
layer.addGraph(gald)
# STARB Stars
starbdx = 1.22400, 0.276000, 0.792000, 0.971000, 0.638000, 2.07800, 0.276000, 0.837000, 2.00000, 1.14800, 0.678000, 1.44900, 1.76000, 1.27000, 0.965000, 1.01300, 1.52300, 4.42800, 0.556000, 0.817000, 1.24800, 1.54800, 0.590000, 0.534000, 0.634000, 1.01600, 0.835000, 0.937000, 0.839000, 2.23500, 1.46500, 0.846000, 0.472000, 1.54800, 1.36300, 1.01200, 1.67800, 1.15200, 2.49000, 1.79000, 0.761000, 1.57400, 0.821000, 1.42400, 0.835000, 0.845000, 1.73200, 0.914000, 0.678000, 1.22600, 1.91700, 1.15200, 0.486000, 1.52500, 0.935000, 0.711000, 1.70500, 2.20300, 1.22300, 1.54800, 3.15700, 0.784000, 1.60400, 1.47300, 1.01300, 1.02100, 0.855000, 1.44900, 0.850000, 1.02900, 1.40000, 0.940000, 0.959000, 1.22400, 1.01700, 1.57400, 1.03100, 1.02200, 1.01600, 2.68200, 2.53800, 0.796000
starbdy = 1.86449, 1.67325, 1.41945, 1.93981, 1.19289, 3.11257, 1.60635, 1.40437, 4.23058, 1.89618, 0.761027, 1.39534, 3.40534, 1.41488, 2.83191, 2.78433, 2.56756, 3.24429, 3.78861, 2.80002, 3.17892, 1.63983, 1.90686, 1.91320, 1.66525, 1.32813, 4.64642, 1.18533, 1.55998, 3.95584, 3.19948, 1.39701, 3.73524, 3.40942, 3.72253, 0.927855, 3.51171, 3.48882, 3.41758, 1.86501, 1.20016, 3.23265, 1.88261, 3.19712, 1.23492, 2.02955, 2.80465, 0.860281, 1.12129, 2.11778, 2.68806, 2.67497, 1.13344, 2.55834, 2.55175, 2.50633, 2.49022, 2.45448, 1.72822, 2.37990, 2.30865, 1.28932, 2.28265, 2.23398, 2.21146, 2.17891, 1.14953, 1.52855, 1.40540, 2.03310, 2.01353, 0.874178, 1.99085, 1.98116, 1.89042, 1.52757, 1.87413, 1.86122, 1.83856, 1.83197, 1.71521, 1.68282
starbdyel = (0,) * 82
starbdyeh = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), 0, 0, float('inf'), 0, 0, float('inf'), 0, float('inf'), 0, float('inf'), float('inf'), float('inf'), 0, 0, float('inf'), 0, float('inf'), 0, 0, float('inf'), 0, 0, 0, float('inf'), float('inf'), 0, float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), 0, float('inf'), float('inf'), 0, float('inf'), float('inf'), float('inf'), float('inf'), 0, 0, 0, float('inf'), float('inf'), 0, float('inf'), float('inf'), float('inf'), 0, float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), float('inf')
starbd = xygraph(starbdx, starbdy, None, None, starbdyel, starbdyeh, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.STAR, symbolSize=8, color=(131, 213, 227))
layer.addGraph(starbd)
# COMP F-Squares
compdx = 2.00200, 2.42000, 2.00500, 2.79400, 3.49300, 2.66000, 3.72200, 1.84300, 3.86500, 2.43400, 2.75600, 1.61000, 0.764000
compdy = 2.11174, 7.80762, 1.83755, 2.77715, 4.02086, 3.82929, 3.75145, 1.64104, 1.02414, 2.04871, 1.82877, 0.580657, 1.71854
compdyel = (0,) * 13
compdyeh = 0, float('inf'), 0, 0, float('inf'), float('inf'), float('inf'), 0, 0, 0, float('inf'), 0, float('inf')
compd = xygraph(compdx, compdy, None, None, compdyel, compdyeh, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FSQUARE, symbolSize=6, color=(200, 99, 184))
layer.addGraph(compd)
# AGN2 F-TRIANGLE
agn2dx = 0.279000, 0.473000, 0.410000, 0.640000, 0.423000, 0.489000, 0.433000, 0.638000, 0.639000, 0.507000, 0.858000, 0.475000, 0.306000, 0.555000, 0.458000, 0.438000, 0.934000, 0.903000, 0.946000, 0.817000, 0.694000, 0.975000, 1.21500, 0.799000, 0.460000, 0.557000, 0.529000, 0.489000, 1.19500, 0.849000, 0.202000, 0.566000, 0.840000, 0.851000, 1.01400, 0.475000, 0.559000, 1.02100, 1.33600, 0.841000, 0.935000, 0.271000, 0.557000, 0.839000, 1.00700, 0.679000, 0.840000, 0.489000, 0.847000, 0.763000, 0.508000, 1.92000, 1.70500, 0.746000, 0.683000, 1.26400, 0.975000, 0.936000, 1.01400, 0.940000, 0.836000, 1.01600, 3.02700, 0.417000, 0.502000, 0.556000, 1.14500, 1.14400, 1.67800, 0.517000, 1.75900, 0.612000, 1.30700, 1.01800, 0.454000, 0.484000
agn2dy = 1.30293, 1.32434, 1.16939, 1.83265, 1.32471, 1.26253, 1.17360, 1.41605, 1.20282, 2.31398, 1.78550, 1.56257, 1.21428, 3.12925, 1.06307, 1.44278, 1.76653, 1.59544, 5.95989, 5.93360, 3.30248, 2.34596, 2.13678, 1.75307, 0.878780, 1.12357, 1.12512, 1.08773, 4.48744, 2.08833, 0.688018, 4.21850, 4.14075, 1.61897, 1.98866, 1.71554, 1.25268, 3.74834, 2.78586, 2.09353, 3.37877, 1.42903, 2.39244, 2.39662, 1.60694, 0.881049, 1.53038, 1.33242, 1.46893, 1.79275, 2.96701, 1.91567, 2.85022, 2.58988, 1.58679, 1.33070, 2.65534, 1.54625, 2.72044, 2.66857, 1.38688, 2.60687, 2.59083, 0.953587, 1.11573, 0.937821, 2.24057, 2.07835, 2.07835, 2.05569, 2.00705, 1.99746, 1.93249, 1.89042, 1.06601, 0.988098
agn2dyel = (0,) * 76
agn2dyeh = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), float('inf'), 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), 0, 0, float('inf'), float('inf'), 0, 0, 0, 0, float('inf'), 0, 0, float('inf'), 0, 0, 0, 0, 0, 0, 0, 0, 0, float('inf'), 0, float('inf'), 0, 0, 0, 0, 0, float('inf'), float('inf'), 0, float('inf'), float('inf'), 0, 0, 0, float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), float('inf'), 0, 0
agn2d = xygraph(agn2dx, agn2dy, None, None, agn2dyel, agn2dyeh, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FTRIANGLE, symbolSize=8, color=(229, 57, 42))
layer.addGraph(agn2d)
# AGN1 U-TRIANGLE
agn1dx = 4.16400, 3.23300, 3.58300
agn1dy = 0.608247, 2.42214, 1.41423
agn1dyel = 0, 0, 0
agn1dyeh = 0, float('inf'), 0
agn1d = xygraph(agn1dx, agn1dy, None, None, agn1dyel, agn1dyeh, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.UTRIANGLE, symbolSize=8, color=(71, 82, 166))
layer.addGraph(agn1d)
| Python |
from jplot2d.python import *
from org.jplot2d.swing import JPlot2DFrame
# Create plot
p = plot()
p.sizeMode = AutoPackSizeMode()
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
# create subplots
lsp = subplot(preferredContentSize=(400, 100))
msp = subplot(preferredContentSize=(400, 100))
hsp = subplot(preferredContentSize=(400, 100))
p.layoutDirector = GridLayoutDirector();
p.addSubplot(lsp, GridConstraint(0, 0))
p.addSubplot(msp, GridConstraint(0, 1))
p.addSubplot(hsp, GridConstraint(0, 2))
# x-axes
lxaxes = axes(2, transform=LOGARITHMIC, range=(0.1, 6))
lxaxes[0].title.text = "Redshift"
lxaxes[0].title.fontScale = 1
lxaxes[0].tickVisible = 0
lxaxes[1].tickVisible = 0
lxaxes[1].labelVisible = 0
lsp.addXAxes(lxaxes)
hxaxes = axes(2)
hxaxes[0].tickVisible = 0
hxaxes[0].labelVisible = 0
hxaxes[1].tickVisible = 0
hxaxes[1].labelVisible = 0
hsp.addXAxes(hxaxes)
hxaxes[0].tickManager = lxaxes[0].tickManager
# y-axes
lyaxes = axes(2, range=(0, 34))
lyaxes[0].title.text = "Number"
lyaxes[0].title.fontScale = 1
lyaxes[1].labelVisible = 0
lsp.addYAxes(lyaxes)
myaxes = axes(2, range=(0, 34))
myaxes[0].title.text = "Number"
myaxes[0].title.fontScale = 1
myaxes[1].labelVisible = 0
msp.addYAxes(myaxes)
myaxes[0].tickManager = lyaxes[0].tickManager
hyaxes = axes(2, range=(0, 34))
hyaxes[0].title.text = "Number"
hyaxes[0].title.fontScale = 1
hyaxes[1].labelVisible = 0
hsp.addYAxes(hyaxes)
hyaxes[0].tickManager = lyaxes[0].tickManager
# Layer
hlayer = layer()
hsp.addLayer(hlayer, hxaxes[0], hyaxes[0])
mlayer = layer()
msp.addLayer(mlayer, hxaxes[0], myaxes[0])
llayer = layer()
lsp.addLayer(llayer, lxaxes[0], lyaxes[0])
z_grid = 0.00000, 0.100000, 0.200000, 0.330000, 0.480000, 0.630000, 0.800000, 1.00000, 1.20000, 1.40000, 1.65000, 1.90000, 2.20000, 2.50000, 2.85000, 3.20000, 3.60000, 4.05000, 4.55000, 5.10000, 5.70000
p1_all = 0, 0, 3, 7, 12, 12, 28, 19, 9, 8, 2, 5, 5, 6, 5, 4, 1, 1, 0, 1, 0
p1_both = 0, 0, 1, 1, 3, 3, 6, 3, 3, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0
p1_100 = 0, 0, 1, 1, 5, 3, 7, 4, 4, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0
p1_160 = 0, 0, 1, 1, 5, 5, 10, 6, 4, 3, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0
hlayer.addGraph(xygraph(z_grid, p1_all, chartType=HISTOGRAM_EDGE, name="GOODS-N AGNs"))
hlayer.addGraph(xygraph(z_grid, p1_100, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, 45, 4), name="100$\\micro$m detecte only"))
hlayer.addGraph(xygraph(z_grid, p1_160, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, -45, 4), name="100$\\micro$m detecte only"))
hlayer.addGraph(xygraph(z_grid, p1_both, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=GRAY, name="Detected in both bamds"))
setp(hsp.legend, position=None, location=(390, 90), columns=1, HAlign=HAlign.RIGHT, VAlign=VAlign.TOP, fontScale=0.9, borderVisible=0)
p2_all = 0, 0, 0, 1, 0, 0, 3, 7, 17, 17, 17, 15, 7, 4, 4, 3, 1, 0, 0, 0, 0
p2_both = 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0
p2_100 = 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0
p2_160 = 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0
mlayer.addGraph(xygraph(z_grid, p2_all, chartType=HISTOGRAM_EDGE))
mlayer.addGraph(xygraph(z_grid, p2_100, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, 45, 4)))
mlayer.addGraph(xygraph(z_grid, p2_160, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, -45, 4)))
mlayer.addGraph(xygraph(z_grid, p2_both, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=GRAY))
p3_all = 0, 0, 3, 8, 12, 12, 31, 26, 26, 25, 19, 20, 12, 10, 9, 7, 2, 1, 0, 1, 0
p3_both = 0, 0, 1, 1, 3, 3, 6, 3, 3, 4, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0
p3_100 = 0, 0, 1, 1, 5, 3, 7, 4, 5, 4, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0
p3_160 = 0, 0, 1, 1, 5, 5, 10, 6, 6, 5, 2, 3, 0, 1, 1, 0, 0, 0, 0, 0, 0
llayer.addGraph(xygraph(z_grid, p2_all, chartType=HISTOGRAM_EDGE))
llayer.addGraph(xygraph(z_grid, p2_100, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, 45, 4)))
llayer.addGraph(xygraph(z_grid, p2_160, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=LineHatchPaint(0, -45, 4)))
llayer.addGraph(xygraph(z_grid, p2_both, chartType=HISTOGRAM_EDGE, fillEnabled=1, fillPaint=GRAY))
| Python |
from jplot2d.python import *
# the sizeMode must be set
p=plot(sizeMode=AutoPackSizeMode())
from org.jplot2d.swing import JPlot2DFrame
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
x = 0,1,2,3
y = 0,2,1,4
graph = xygraph(x, y)
layer = layer()
layer.addGraph(graph)
yaxis=axis()
xaxis=axis()
# the axes must be added before adding layers
p.addYAxis(yaxis)
p.addXAxis(xaxis)
p.addLayer(layer,xaxis.tickManager.axisTransform, yaxis.tickManager.axisTransform) | Python |
from jplot2d.python import *
from org.jplot2d.swing import JPlot2DFrame
# Create plot
p = plot(sizeMode=AutoPackSizeMode(), layoutDirector=GridLayoutDirector())
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
# create subplots
usp = subplot(preferredContentSize=(380, 260))
lsp = subplot(preferredContentSize=(380, 160))
lsp.margin.extraTop = 10
p.addSubplot(usp, GridConstraint(0, 1))
p.addSubplot(lsp, GridConstraint(0, 0))
# upper subplot Axes
uspx = axes(2, range=(10, 2e6), transform=LOGARITHMIC)
uspx[0].title.text = "wavelength $\\mathrm{\\lambda}$ [$\\mathrm{\\micro}$m]"
uspx[0].title.fontSize = 12
uspx[1].labelVisible = 0
uspy = axes(2, range=(0.05, 1200), transform=LOGARITHMIC, labelFormat="%.0f")
uspy[0].title.text = "flux density [Jy]"
uspy[0].title.fontSize = 12
uspy[1].labelVisible = 0
usp.addXAxes(uspx)
usp.addYAxes(uspy)
# lower subplot Axes
lspx = axes(2, range=(10, 1500), transform=LOGARITHMIC)
lspx[0].title.text = "wavelength $\\mathrm{\\lambda}$ [$\\mathrm{\\micro}$m]"
lspx[0].title.fontSize = 12
lspx[1].labelVisible = 0
lspy = axes(2, range=(-0.7, 0.7), ticks=3)
lspy[0].title.text = "residual [Jy]"
lspy[0].title.fontSize = 12
lspy[1].labelVisible = 0
lsp.addXAxes(lspx)
lsp.addYAxes(lspy)
# Layer
ulayer = layer()
usp.addLayer(ulayer, uspx[0], uspy[0])
llayer = layer()
lsp.addLayer(llayer, lspx[0], lspy[0])
# solid line
solx = 10, 2000000
soly = 0.09, 900
sol = xygraph(solx, soly, color=BLUE)
sol.legendItem.visible = 0
ulayer.addGraph(sol)
# dashed line
dlx = 10, 2000000
dly = 0.1, 820
dl = xygraph(dlx, dly, color=BLUE, lineStroke=stroke(1,[1, 3]))
dl.legendItem.visible = 0
ulayer.addGraph(dl)
# ISO
xx = (15,)
xy = (0.1059,)
xye = (0.0212,)
xl = xygraph(xx, xy, None, None, xye, xye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.SQUARE, name="Xilouris et al. 2004")
ulayer.addGraph(xl)
# IRAS
gx = 24.9, 59.9, 99.8
gy = 0.187, 0.546, 0.559
gye = 0.0281, 0.0819, 0.0839
gl = xygraph(gx, gy, None, None, gye, gye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FTRIANGLE, name="Golombek et al. 1988")
ulayer.addGraph(gl)
# MIPS
sx = 23.67, 71.3, 156
sy = 0.171, 0.455, 0.582
sye = 0.013, 0.0092, 0.01
sl = xygraph(sx, sy, None, None, sye, sye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FDIAMOND, name="Shi et al. 2007")
ulayer.addGraph(sl)
# SCUBA
hx = 449, 848
hy = 1.32, 2.48
hye = 0.396, 0.496
hl = xygraph(hx, hy, None, None, hye, hye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.TRIANGLE, name="Haas et al. 2004")
ulayer.addGraph(hl)
# WMAP
wx = 3180, 4910, 7300, 9070, 13000
wy = 6.2, 9.7, 13.3, 15.5, 19.7
wye = 0.4, 0.2, 0.1, 0.09, 0.06
wl = xygraph(wx, wy, None, None, wye, wye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.STAR, name="Wright et al. 2009")
ulayer.addGraph(wl)
# VLA
cx = 20130, 36540, 61730, 180200, 908400
cy = 26.4, 45.8, 70.1, 136.2, 327
cye = 2.643, 3.66, 5.61, 10.89, 16.38
cl = xygraph(cx, cy, None, None, cye, cye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FOCTAGON, name="Cotton et al. 2009")
ulayer.addGraph(cl)
# HERSCHEL
tx = 100, 160, 250, 350, 500
ty = 0.517, 0.673, 0.86, 1.074, 1.426
tye = 0.129, 0.168, 0.215, 0.267, 0.375
tl = xygraph(tx, ty, None, None, tye, tye, color=RED, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FOCTAGON, name="this paper")
ulayer.addGraph(tl)
# legend
setp(usp.legend, position=None, columns=1, location=(-10, 250), HAlign=HAlign.LEFT, VAlign=VAlign.TOP, borderVisible=0, fontSize=12)
# residual
slrx = 10, 1000
slry = 0, 0
slrl = xygraph(slrx, slry, color=BLUE)
slrl.legendItem.visible=0
llayer.addGraph(slrl)
xry = (-0.01,)
xrl = xygraph(xx, xry, None, None, xye, xye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.SQUARE)
xrl.legendItem.visible = 0
llayer.addGraph(xrl)
gry = 0.01, 0.2, 0.06
grl = xygraph(gx, gry, None, None, gye, gye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FTRIANGLE)
grl.legendItem.visible = 0
llayer.addGraph(grl)
sry = 0.0, 0.07, -0.11
srl = xygraph(sx, sry, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FDIAMOND)
srl.legendItem.visible = 0
llayer.addGraph(srl)
hry = -0.23, -0.03
hrl = xygraph(hx, hry, None, None, hye, hye, color=GREEN, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.TRIANGLE)
hrl.legendItem.visible = 0
llayer.addGraph(hrl)
trry = 0.01, -0.03, -0.13, -0.21, -0.26
trl = xygraph(tx, trry, None, None, tye, tye, color=RED, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FOCTAGON)
trl.legendItem.visible = 0
llayer.addGraph(trl)
| Python |
from jplot2d.python import *
# the sizeMode must be set
p=plot(sizeMode=AutoPackSizeMode())
from org.jplot2d.swing import JPlot2DFrame
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
z = [[0,1],[2,3.1]]
graph = imagegraph(z)
layer = layer()
layer.addGraph(graph)
yaxis=axis()
xaxis=axis()
# the axes must be added before adding layers
p.addYAxis(yaxis)
p.addXAxis(xaxis)
p.addLayer(layer,xaxis.tickManager.axisTransform, yaxis.tickManager.axisTransform) | Python |
from jplot2d.python import *
from org.jplot2d.swing import JPlot2DFrame
# Create plot
p = plot(preferredContentSize=(300, 200), sizeMode=AutoPackSizeMode(), fontSize = 16)
p.legend.visible = 0
pf = JPlot2DFrame(p)
pf.size = (480, 360)
pf.visible = 1
# Axes
xaxis = axis(transform=LOGARITHMIC, range=(5e13, 2.5e17))
xaxis.title.text = "r (cm)"
xaxis.title.fontScale = 1
xaxisTop = axis(position=POSITIVE_SIDE, transform=LOGARITHMIC, range=(2.8e-2, 2e2), labelFormat="%.0m")
xaxisTop.title.text = u"r(\u2033)"
xaxisTop.title.fontScale = 1
yaxisLeft = axis(color=RED, transform=LOGARITHMIC, range=(3e-10, 7e-7))
yaxisLeft.title.text = "X"
yaxisLeft.title.fontScale = 1
yaxisRight = axis(color=BLUE, position=POSITIVE_SIDE, transform=LINEAR, range=(0, 1200), tickInterval=500)
yaxisRight.title.text = "$\\mathrm{T_K}$"
yaxisRight.title.fontScale = 1
p.addXAxis(xaxis)
p.addXAxis(xaxisTop)
p.addYAxis(yaxisLeft)
p.addYAxis(yaxisRight)
# Tk Layer
tkx = 1.1e14, 1e15, 1e16, 1e17
tky = 1200, 360, 80, 0
tkl = layer(xygraph(tkx, tky), color=BLUE)
tka = symbolannotation(5e14, 600, "T$\\mathrm{_K}$", fontScale=1.2)
tkl.addAnnotation(tka)
p.addLayer(tkl, xaxis, yaxisRight)
# SiC2
scx = 5e13, 1e16, 2e16, 7e16, 1e17, 2.1e17
scy = 2e-7, 2e-7, 5e-7, 2e-7, 5e-8, 3e-10
scl = layer(xygraph(scx, scy), color=RED)
sca = symbolannotation(2e16, 1e-7, "SiC$\\mathrm{_2}$", fontScale=1.2)
scl.addAnnotation(sca)
p.addLayer(scl, xaxis, yaxisLeft)
# SiC2 LTE
ltex = 5e13, 6e13, 9e13, 1.3e14, 2e14, 2.1e14, 2.7e14
ltey = 5e-8, 3e-7, 1.8e-7, 3e-7, 4e-8, 6e-8, 3e-10
ltel = layer(xygraph(ltex, ltey), color=GREEN)
ltea = symbolannotation(7e13, 3e-9, "SiC$\\mathrm{_2}$\nLTE", fontScale = 1.2)
ltel.addAnnotation(ltea)
p.addLayer(ltel, xaxis, yaxisLeft)
| Python |
from jplot2d.python import *
from org.jplot2d.swing import JPlot2DFrame
# Create plot
p = plot(preferredContentSize=(300, 300), sizeMode=AutoPackSizeMode())
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
# Axes
xaxes = axes(2, tickSide=AxisTickSide.OUTWARD, range=(0.8, 2.2))
xaxes[0].title.text = "S$_100$/S$_24$"
xaxes[1].labelVisible = 0
yaxes = axes(2, range=(0, 5.2), tickSide=AxisTickSide.OUTWARD, tickInterval=1, autoMinorTicks=0)
yaxes[0].title.text = "SFR$_\\mathrm{FIR}$/SFR$_\\mathrm{24\\microm}$"
yaxes[1].labelVisible = 0
p.addXAxes(xaxes)
p.addYAxes(yaxes)
# Layer
layer = layer()
p.addLayer(layer, xaxes[0], yaxes[1])
# horizontal dash line
hdl = hlineannotation(1, color=GRAY, stroke=stroke(1, [6, 6]))
layer.addAnnotation(hdl);
# horizontal strip
strip = hstripannotation(0.5, 1.5, fillPaint=(251, 232, 189), ZOrder=-1)
layer.addAnnotation(strip);
#
b1_1 = 1.419899, 1.619893, 1.419907, 1.319923, 1.810285, 1.841473, 1.608309
b1_2 = 0.017994, 0.017103, 0.015887, 0.017436, 0.037015, 0.036288, 0.023305
b1_3 = 1.194547, 2.334788, 0.975215, 1.046990, 4.026402, 4.353637, 1.914610
b1_4 = 0.106746, 0.670998, 0.114467, 0.120429, 0.592627, 0.566388, 0.199998
b1 = xygraph(b1_1, b1_3, b1_2, b1_2, b1_4, b1_4, color=GRAY, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FTRIANGLE, symbolColor=RED, name="Bullet Cluster in PACS field")
layer.addGraph(b1)
#
bg1_1 = 1.391505, 1.442690, 1.467886, 1.139075, 0.959461, 1.393919, 1.505945, 1.506306, 1.671487, 1.336313, 1.272283, 0.941866, 1.857896
bg1_2 = 0.014170, 0.018310, 0.015610, 0.054709, 0.046394, 0.033695, 0.038559, 0.023530, 0.016676, 0.022353, 0.035198, 0.027152, 0.011652
bg1_3 = 1.005709, 1.128696, 1.483923, 0.824007, 0.430120, 1.229956, 2.137710, 1.136676, 2.438485, 0.910690, 0.965880, 0.409521, 3.091505
bg1_4 = 0.091131, 0.102801, 0.118804, 0.139412, 0.058699, 0.147821, 0.326636, 0.105967, 0.280889, 0.211824, 0.112502, 0.052559, 0.246040
bg1 = xygraph(bg1_1, bg1_3, bg1_2, bg1_2, bg1_4, bg1_4, color=GRAY, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.FCIRCLE, symbolSize=6, symbolColor=RED, name="BG system in PACS field")
layer.addGraph(bg1)
#
b0_1 = 1.674486, 1.069313, 1.584537, 1.654299, 1.365276, 1.578115, 1.196919, 1.942886, 1.144155, 1.609685, 0.374725, 0.304246, 1.245910, 1.659333, 1.379772
b0_2 = 0.046158, 0.045599, 0.044985, 0.046158, 0.044663, 0.045469, 0.044663, 0.049357, 0.045100, 0.045599, 0.045342, 0.045342, 0.044663, 0.045219, 0.046013
b0_3 = 1.931079, 0.439483, 1.661472, 1.946264, 1.213043, 1.480344, 1.051577, 4.586347, 0.561341, 1.994514, 0.245011, 0.367738, 1.145222, 2.320756, 1.071991
b0_4 = 0.618866, 0.085960, 0.291826, 0.681112, 0.230290, 0.260769, 0.264749, 0.856806, 0.111786, 0.390115, 0.114122, 0.144550, 0.593335, 0.443113, 0.816351
b0 = xygraph(b0_1, b0_3, b0_2, b0_2, b0_4, b0_4, color=GRAY, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.TRIANGLE, symbolColor=DARK_GRAY, name="Bullet Cluster OutSide PACS (100$\\micro$m estimated)")
layer.addGraph(b0)
#
bg0_1 = 1.548598, 2.066779, 1.143054, 1.648204, 1.721049, 1.417007, 1.807384, 1.491405
bg0_2 = 0.045469, 0.045219, 0.044985, 0.044873, 0.045219, 0.045733, 0.049990, 0.045342
bg0_3 = 1.552600, 3.422128, 0.459055, 1.807932, 2.435197, 0.955189, 3.364770, 1.068040
bg0_4 = 0.312489, 0.465107, 0.080527, 0.324565, 0.495231, 0.165624, 0.684698, 0.183726
bg0 = xygraph(bg0_1, bg0_3, bg0_2, bg0_2, bg0_4, bg0_4, color=GRAY, lineVisible=0, symbolVisible=1, symbolShape=SymbolShape.CIRCLE, symbolSize=6, symbolColor=DARK_GRAY, name="BG system OutSide PACS (100$\\micro$m estimated)")
layer.addGraph(bg0)
# legend
setp(p.legend, position=None, columns=1, location=(-20,300), HAlign=HAlign.LEFT, VAlign=VAlign.TOP, borderVisible=0, fontSize=9)
| Python |
from jplot2d.python import *
# the sizeMode must be set
p=plot(sizeMode=AutoPackSizeMode())
from org.jplot2d.swing import JPlot2DFrame
pf = JPlot2DFrame(p)
pf.size = (480, 480)
pf.visible = 1
r = [[0,1],[2,3]]
g = [[3,0],[1,2]]
b = [[2,3],[0,1]]
graph = rgbimagegraph(r,g,b)
layer = layer()
layer.addGraph(graph)
yaxis=axis()
xaxis=axis()
# the axes must be added before adding layers
p.addYAxis(yaxis)
p.addXAxis(xaxis)
p.addLayer(layer,xaxis.tickManager.axisTransform, yaxis.tickManager.axisTransform) | Python |
#
from org.jplot2d.env import InterfaceInfo
from org.jplot2d.axtype import *
from org.jplot2d.data import *
from org.jplot2d.element import *
from org.jplot2d.element.AxisPosition import *
from org.jplot2d.element.XYGraph.ChartType import *
from org.jplot2d.layout import *
from org.jplot2d.sizing import *
from org.jplot2d.transform.TransformType import *
from org.jplot2d.util import *
from java.awt import Color
from java.awt.Color import *
from java.awt import Paint
from java.awt.geom import Dimension2D
from java.awt.geom import Point2D
from org.python.core import PyArray
from jarray import array
jplot2d_default_element_factory = ElementFactory.getInstance()
def jplot2d_set_prop(iinfo, obj, name, v):
# print name, '=', v
if isinstance(v, tuple):
argType = iinfo.getPropWriteMethodType(name)
if argType == Dimension2D:
v = DoubleDimension2D(v[0], v[1])
elif argType == Point2D:
v = Point2D.Double(v[0], v[1])
elif argType == Range:
v = Range.Double(v[0], v[1])
elif argType == Paint and len(v) == 3:
v = Color(*v)
setattr(obj, name, v)
def plot(*args, **kwargs):
p = jplot2d_default_element_factory.createPlot()
plotinfo = InterfaceInfo.loadInterfaceInfo(Plot)
for key in kwargs:
if plotinfo.isWritableProp(key):
jplot2d_set_prop(plotinfo, p, key, kwargs[key])
else:
raise AttributeError, "Plot has no attribute " + key
return p
def subplot(*args, **kwargs):
p = jplot2d_default_element_factory.createSubplot()
plotinfo = InterfaceInfo.loadInterfaceInfo(Plot)
for key in kwargs:
if plotinfo.isWritableProp(key):
jplot2d_set_prop(plotinfo, p, key, kwargs[key])
else:
raise AttributeError, "Plot has no attribute " + key
return p
def title(text, *args, **kwargs):
title = jplot2d_default_element_factory.createTitle(text)
iinfo = InterfaceInfo.loadInterfaceInfo(Title)
for key in kwargs:
if iinfo.isWritableProp(key):
jplot2d_set_prop(iinfo, title, key, kwargs[key])
else:
raise AttributeError, "Title has no attribute " + key
return title
def axis(*args, **kwargs):
return axes(1, *args, **kwargs)[0]
def axes(n, *args, **kwargs):
axes = jplot2d_default_element_factory.createAxes(n)
axisinfo = InterfaceInfo.loadInterfaceInfo(Axis)
tminfo = InterfaceInfo.loadInterfaceInfo(AxisTickManager)
txfinfo = InterfaceInfo.loadInterfaceInfo(AxisTransform)
for key in kwargs:
if axisinfo.isWritableProp(key):
for axis in axes:
jplot2d_set_prop(axisinfo, axis, key, kwargs[key])
elif tminfo.isWritableProp(key):
jplot2d_set_prop(tminfo, axes[0].tickManager, key, kwargs[key])
elif txfinfo.isWritableProp(key):
jplot2d_set_prop(tminfo, axes[0].tickManager.axisTransform, key, kwargs[key])
else:
raise AttributeError, "Axis has no attribute " + key
return axes
def layer(*args, **kwargs):
layer = jplot2d_default_element_factory.createLayer()
iinfo = InterfaceInfo.loadInterfaceInfo(Layer)
for key in kwargs:
if iinfo.isWritableProp(key):
jplot2d_set_prop(iinfo, layer, key, kwargs[key])
else:
raise AttributeError, "Layer has no attribute " + key
for arg in args:
if isinstance(arg, Graph):
layer.addGraph(arg)
else:
raise TypeError, "Cannot add " + str(type(arg)) + " to layer."
return layer
def xygraph(*args, **kwargs):
graph = jplot2d_default_element_factory.createXYGraph(*args);
ginfo = InterfaceInfo.loadInterfaceInfo(XYGraph)
for key in kwargs:
if ginfo.isWritableProp(key):
jplot2d_set_prop(ginfo, graph, key, kwargs[key])
else:
raise AttributeError, "XYGraph has no attribute " + key
return graph
def imagegraph(*args, **kwargs):
graph = jplot2d_default_element_factory.createImageGraph(*args)
ginfo = InterfaceInfo.loadInterfaceInfo(ImageGraph)
for key in kwargs:
if ginfo.isWritableProp(key):
jplot2d_set_prop(ginfo, graph, key, kwargs[key])
else:
raise AttributeError, "ImageGraph has no attribute " + key
return graph
def rgbimagegraph(*args, **kwargs):
graph = jplot2d_default_element_factory.createRGBImageGraph(*args);
ginfo = InterfaceInfo.loadInterfaceInfo(RGBImageGraph)
for key in kwargs:
if ginfo.isWritableProp(key):
jplot2d_set_prop(ginfo, graph, key, kwargs[key])
else:
raise AttributeError, "RGBImageGraph has no attribute " + key
return graph
def hlineannotation(y, *args, **kwargs):
ann = jplot2d_default_element_factory.createHLineAnnotation(y)
anninfo = InterfaceInfo.loadInterfaceInfo(HLineAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "HLineAnnotation has no attribute " + key
return ann
def vlineannotation(x, *args, **kwargs):
ann = jplot2d_default_element_factory.createVLineAnnotation(x)
anninfo = InterfaceInfo.loadInterfaceInfo(VLineAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "VLineAnnotation has no attribute " + key
return ann
def hstripannotation(start, end, *args, **kwargs):
ann = jplot2d_default_element_factory.createHStripAnnotation(start, end)
anninfo = InterfaceInfo.loadInterfaceInfo(HStripAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "HStripAnnotation has no attribute " + key
return ann
def vstripannotation(start, end, *args, **kwargs):
ann = jplot2d_default_element_factory.createVStripAnnotation(start, end)
anninfo = InterfaceInfo.loadInterfaceInfo(VStripAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "VStripAnnotation has no attribute " + key
return ann
def rectangleannotation(x1, x2, y1, y2, *args, **kwargs):
ann = jplot2d_default_element_factory.createRectangleAnnotation(x1, x2, y1, y2)
anninfo = InterfaceInfo.loadInterfaceInfo(RectangleAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "RectangleAnnotation has no attribute " + key
return ann
def symbolannotation(*args, **kwargs):
ann = jplot2d_default_element_factory.createSymbolAnnotation(*args)
anninfo = InterfaceInfo.loadInterfaceInfo(SymbolAnnotation)
for key in kwargs:
if anninfo.isWritableProp(key):
jplot2d_set_prop(anninfo, ann, key, kwargs[key])
else:
raise AttributeError, "SymbolAnnotation has no attribute " + key
return ann
def stroke(width, dash=None):
return jplot2d_default_element_factory.createStroke(width, dash)
# set property for the given obj
def setp(obj, *args, **kwargs):
if isinstance(obj, Legend):
iinfo = InterfaceInfo.loadInterfaceInfo(Legend)
for key in kwargs:
if iinfo.isWritableProp(key):
jplot2d_set_prop(iinfo, obj, key, kwargs[key])
else:
raise AttributeError, obj + " has no attribute " + key
| Python |
#!/usr/bin/python2.6
#
# Simple http server to emulate api.playfoursquare.com
import logging
import shutil
import sys
import urlparse
import SimpleHTTPServer
import BaseHTTPServer
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle playfoursquare.com requests, for testing."""
def do_GET(self):
logging.warn('do_GET: %s, %s', self.command, self.path)
url = urlparse.urlparse(self.path)
logging.warn('do_GET: %s', url)
query = urlparse.parse_qs(url.query)
query_keys = [pair[0] for pair in query]
response = self.handle_url(url)
if response != None:
self.send_200()
shutil.copyfileobj(response, self.wfile)
self.wfile.close()
do_POST = do_GET
def handle_url(self, url):
path = None
if url.path == '/v1/venue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/addvenue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/venues':
path = '../captures/api/v1/venues.xml'
elif url.path == '/v1/user':
path = '../captures/api/v1/user.xml'
elif url.path == '/v1/checkcity':
path = '../captures/api/v1/checkcity.xml'
elif url.path == '/v1/checkins':
path = '../captures/api/v1/checkins.xml'
elif url.path == '/v1/cities':
path = '../captures/api/v1/cities.xml'
elif url.path == '/v1/switchcity':
path = '../captures/api/v1/switchcity.xml'
elif url.path == '/v1/tips':
path = '../captures/api/v1/tips.xml'
elif url.path == '/v1/checkin':
path = '../captures/api/v1/checkin.xml'
elif url.path == '/history/12345.rss':
path = '../captures/api/v1/feed.xml'
if path is None:
self.send_error(404)
else:
logging.warn('Using: %s' % path)
return open(path)
def send_200(self):
self.send_response(200)
self.send_header('Content-type', 'text/xml')
self.end_headers()
def main():
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port = 8080
server_address = ('0.0.0.0', port)
httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna (joe@joelapenna.com)
* @param <T>
*/
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
"""
Pull a oAuth protected page from foursquare.
Expects ~/.oget to contain (one on each line):
CONSUMER_KEY
CONSUMER_KEY_SECRET
USERNAME
PASSWORD
Don't forget to chmod 600 the file!
"""
import httplib
import os
import re
import sys
import urllib
import urllib2
import urlparse
import user
from xml.dom import pulldom
from xml.dom import minidom
import oauth
"""From: http://groups.google.com/group/foursquare-api/web/oauth
@consumer = OAuth::Consumer.new("consumer_token","consumer_secret", {
:site => "http://foursquare.com",
:scheme => :header,
:http_method => :post,
:request_token_path => "/oauth/request_token",
:access_token_path => "/oauth/access_token",
:authorize_path => "/oauth/authorize"
})
"""
SERVER = 'api.foursquare.com:80'
CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'}
SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1()
AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange'
def parse_auth_response(auth_response):
return (
re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0],
re.search('<oauth_token_secret>(.*)</oauth_token_secret>',
auth_response).groups()[0]
)
def create_signed_oauth_request(username, password, consumer):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(
consumer, http_method='POST', http_url=AUTHEXCHANGE_URL,
parameters=dict(fs_username=username, fs_password=password))
oauth_request.sign_request(SIGNATURE_METHOD, consumer, None)
return oauth_request
def main():
url = urlparse.urlparse(sys.argv[1])
# Nevermind that the query can have repeated keys.
parameters = dict(urlparse.parse_qsl(url.query))
password_file = open(os.path.join(user.home, '.oget'))
lines = [line.strip() for line in password_file.readlines()]
if len(lines) == 4:
cons_key, cons_key_secret, username, password = lines
access_token = None
else:
cons_key, cons_key_secret, username, password, token, secret = lines
access_token = oauth.OAuthToken(token, secret)
consumer = oauth.OAuthConsumer(cons_key, cons_key_secret)
if not access_token:
oauth_request = create_signed_oauth_request(username, password, consumer)
connection = httplib.HTTPConnection(SERVER)
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
connection.request(oauth_request.http_method, AUTHEXCHANGE_URL,
body=oauth_request.to_postdata(), headers=headers)
auth_response = connection.getresponse().read()
token = parse_auth_response(auth_response)
access_token = oauth.OAuthToken(*token)
open(os.path.join(user.home, '.oget'), 'w').write('\n'.join((
cons_key, cons_key_secret, username, password, token[0], token[1])))
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer,
access_token, http_method='POST', http_url=url.geturl(),
parameters=parameters)
oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token)
connection = httplib.HTTPConnection(SERVER)
connection.request(oauth_request.http_method, oauth_request.to_url(),
body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER)
print connection.getresponse().read()
#print minidom.parse(connection.getresponse()).toprettyxml(indent=' ')
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import os
import subprocess
import sys
BASEDIR = '../main/src/com/joelapenna/foursquare'
TYPESDIR = '../captures/types/v1'
captures = sys.argv[1:]
if not captures:
captures = os.listdir(TYPESDIR)
for f in captures:
basename = f.split('.')[0]
javaname = ''.join([c.capitalize() for c in basename.split('_')])
fullpath = os.path.join(TYPESDIR, f)
typepath = os.path.join(BASEDIR, 'types', javaname + '.java')
parserpath = os.path.join(BASEDIR, 'parsers', javaname + 'Parser.java')
cmd = 'python gen_class.py %s > %s' % (fullpath, typepath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
cmd = 'python gen_parser.py %s > %s' % (fullpath, parserpath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
| Python |
#!/usr/bin/python
import logging
from xml.dom import minidom
from xml.dom import pulldom
BOOLEAN = "boolean"
STRING = "String"
GROUP = "Group"
# Interfaces that all FoursquareTypes implement.
DEFAULT_INTERFACES = ['FoursquareType']
# Interfaces that specific FoursqureTypes implement.
INTERFACES = {
}
DEFAULT_CLASS_IMPORTS = [
]
CLASS_IMPORTS = {
# 'Checkin': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Venue': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Tip': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
}
COMPLEX = [
'Group',
'Badge',
'Beenhere',
'Checkin',
'CheckinResponse',
'City',
'Credentials',
'Data',
'Mayor',
'Rank',
'Score',
'Scoring',
'Settings',
'Stats',
'Tags',
'Tip',
'User',
'Venue',
]
TYPES = COMPLEX + ['boolean']
def WalkNodesForAttributes(path):
"""Parse the xml file getting all attributes.
<venue>
<attribute>value</attribute>
</venue>
Returns:
type_name - The java-style name the top node will have. "Venue"
top_node_name - unadultured name of the xml stanza, probably the type of
java class we're creating. "venue"
attributes - {'attribute': 'value'}
"""
doc = pulldom.parse(path)
type_name = None
top_node_name = None
attributes = {}
level = 0
for event, node in doc:
# For skipping parts of a tree.
if level > 0:
if event == pulldom.END_ELEMENT:
level-=1
logging.warn('(%s) Skip end: %s' % (str(level), node))
continue
elif event == pulldom.START_ELEMENT:
logging.warn('(%s) Skipping: %s' % (str(level), node))
level+=1
continue
if event == pulldom.START_ELEMENT:
logging.warn('Parsing: ' + node.tagName)
# Get the type name to use.
if type_name is None:
type_name = ''.join([word.capitalize()
for word in node.tagName.split('_')])
top_node_name = node.tagName
logging.warn('Found Top Node Name: ' + top_node_name)
continue
typ = node.getAttribute('type')
child = node.getAttribute('child')
# We don't want to walk complex types.
if typ in COMPLEX:
logging.warn('Found Complex: ' + node.tagName)
level = 1
elif typ not in TYPES:
logging.warn('Found String: ' + typ)
typ = STRING
else:
logging.warn('Found Type: ' + typ)
logging.warn('Adding: ' + str((node, typ)))
attributes.setdefault(node.tagName, (typ, [child]))
logging.warn('Attr: ' + str((type_name, top_node_name, attributes)))
return type_name, top_node_name, attributes
| Python |
import platform
import sys
#Display user's OS platform using two options
print "OS:",platform.platform()
print "Platform:",sys.platform
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl
except:
from cgi import parse_qsl
import oauth2 as oauth
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
consumer_key = None
consumer_secret = None
if consumer_key is None or consumer_secret is None:
print 'You need to edit this script and provide values for the'
print 'consumer_key and also consumer_secret.'
print ''
print 'The values you need come from Twitter - you need to register'
print 'as a developer your "application". This is needed only until'
print 'Twitter finishes the idea they have of a way to allow open-source'
print 'based libraries to have a token that can be used to generate a'
print 'one-time use key that will allow the library to make the request'
print 'on your behalf.'
print ''
sys.exit(1)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
print 'Requesting temp token from Twitter'
resp, content = oauth_client.request(REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
print 'Invalid respond from Twitter requesting temp token: %s' % resp['status']
else:
request_token = dict(parse_qsl(content))
print ''
print 'Please visit this Twitter page and retrieve the pincode to be used'
print 'in the next step to obtaining an Authentication Token:'
print ''
print '%s?oauth_token=%s' % (AUTHORIZATION_URL, request_token['oauth_token'])
print ''
pincode = raw_input('Pincode? ')
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
token.set_verifier(pincode)
print ''
print 'Generating and signing request for an access token'
print ''
oauth_client = oauth.Client(oauth_consumer, token)
resp, content = oauth_client.request(ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % pincode)
access_token = dict(parse_qsl(content))
if resp['status'] != '200':
print 'The request for a Token did not succeed: %s' % resp['status']
print access_token
else:
print 'Your Twitter Access Token key: %s' % access_token['oauth_token']
print ' Access Token secret: %s' % access_token['oauth_token_secret']
print ''
| Python |
#!/usr/bin/python2.4
# -*- coding: utf-8 -*-#
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Unit tests for the twitter.py library'''
__author__ = 'python-twitter@googlegroups.com'
import os
import simplejson
import time
import calendar
import unittest
import urllib
import twitter
class StatusTest(unittest.TestCase):
SAMPLE_JSON = '''{"created_at": "Fri Jan 26 23:17:14 +0000 2007", "id": 4391023, "text": "A l\u00e9gp\u00e1rn\u00e1s haj\u00f3m tele van angoln\u00e1kkal.", "user": {"description": "Canvas. JC Penny. Three ninety-eight.", "id": 718443, "location": "Okinawa, Japan", "name": "Kesuke Miyagi", "profile_image_url": "https://twitter.com/system/user/profile_image/718443/normal/kesuke.png", "screen_name": "kesuke", "url": "https://twitter.com/kesuke"}}'''
def _GetSampleUser(self):
return twitter.User(id=718443,
name='Kesuke Miyagi',
screen_name='kesuke',
description=u'Canvas. JC Penny. Three ninety-eight.',
location='Okinawa, Japan',
url='https://twitter.com/kesuke',
profile_image_url='https://twitter.com/system/user/pro'
'file_image/718443/normal/kesuke.pn'
'g')
def _GetSampleStatus(self):
return twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007',
id=4391023,
text=u'A légpárnás hajóm tele van angolnákkal.',
user=self._GetSampleUser())
def testInit(self):
'''Test the twitter.Status constructor'''
status = twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007',
id=4391023,
text=u'A légpárnás hajóm tele van angolnákkal.',
user=self._GetSampleUser())
def testGettersAndSetters(self):
'''Test all of the twitter.Status getters and setters'''
status = twitter.Status()
status.SetId(4391023)
self.assertEqual(4391023, status.GetId())
created_at = calendar.timegm((2007, 1, 26, 23, 17, 14, -1, -1, -1))
status.SetCreatedAt('Fri Jan 26 23:17:14 +0000 2007')
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.GetCreatedAt())
self.assertEqual(created_at, status.GetCreatedAtInSeconds())
status.SetNow(created_at + 10)
self.assertEqual("about 10 seconds ago", status.GetRelativeCreatedAt())
status.SetText(u'A légpárnás hajóm tele van angolnákkal.')
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.',
status.GetText())
status.SetUser(self._GetSampleUser())
self.assertEqual(718443, status.GetUser().id)
def testProperties(self):
'''Test all of the twitter.Status properties'''
status = twitter.Status()
status.id = 1
self.assertEqual(1, status.id)
created_at = calendar.timegm((2007, 1, 26, 23, 17, 14, -1, -1, -1))
status.created_at = 'Fri Jan 26 23:17:14 +0000 2007'
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.created_at)
self.assertEqual(created_at, status.created_at_in_seconds)
status.now = created_at + 10
self.assertEqual('about 10 seconds ago', status.relative_created_at)
status.user = self._GetSampleUser()
self.assertEqual(718443, status.user.id)
def _ParseDate(self, string):
return calendar.timegm(time.strptime(string, '%b %d %H:%M:%S %Y'))
def testRelativeCreatedAt(self):
'''Test various permutations of Status relative_created_at'''
status = twitter.Status(created_at='Fri Jan 01 12:00:00 +0000 2007')
status.now = self._ParseDate('Jan 01 12:00:00 2007')
self.assertEqual('about a second ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:01 2007')
self.assertEqual('about a second ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:02 2007')
self.assertEqual('about 2 seconds ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:05 2007')
self.assertEqual('about 5 seconds ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:50 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:01:00 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:01:10 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:02:00 2007')
self.assertEqual('about 2 minutes ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:31:50 2007')
self.assertEqual('about 31 minutes ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:50:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 13:00:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 13:10:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 14:00:00 2007')
self.assertEqual('about 2 hours ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 19:00:00 2007')
self.assertEqual('about 7 hours ago', status.relative_created_at)
status.now = self._ParseDate('Jan 02 11:30:00 2007')
self.assertEqual('about a day ago', status.relative_created_at)
status.now = self._ParseDate('Jan 04 12:00:00 2007')
self.assertEqual('about 3 days ago', status.relative_created_at)
status.now = self._ParseDate('Feb 04 12:00:00 2007')
self.assertEqual('about 34 days ago', status.relative_created_at)
def testAsJsonString(self):
'''Test the twitter.Status AsJsonString method'''
self.assertEqual(StatusTest.SAMPLE_JSON,
self._GetSampleStatus().AsJsonString())
def testAsDict(self):
'''Test the twitter.Status AsDict method'''
status = self._GetSampleStatus()
data = status.AsDict()
self.assertEqual(4391023, data['id'])
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', data['created_at'])
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', data['text'])
self.assertEqual(718443, data['user']['id'])
def testEq(self):
'''Test the twitter.Status __eq__ method'''
status = twitter.Status()
status.created_at = 'Fri Jan 26 23:17:14 +0000 2007'
status.id = 4391023
status.text = u'A légpárnás hajóm tele van angolnákkal.'
status.user = self._GetSampleUser()
self.assertEqual(status, self._GetSampleStatus())
def testNewFromJsonDict(self):
'''Test the twitter.Status NewFromJsonDict method'''
data = simplejson.loads(StatusTest.SAMPLE_JSON)
status = twitter.Status.NewFromJsonDict(data)
self.assertEqual(self._GetSampleStatus(), status)
class UserTest(unittest.TestCase):
SAMPLE_JSON = '''{"description": "Indeterminate things", "id": 673483, "location": "San Francisco, CA", "name": "DeWitt", "profile_image_url": "https://twitter.com/system/user/profile_image/673483/normal/me.jpg", "screen_name": "dewitt", "status": {"created_at": "Fri Jan 26 17:28:19 +0000 2007", "id": 4212713, "text": "\\"Select all\\" and archive your Gmail inbox. The page loads so much faster!"}, "url": "http://unto.net/"}'''
def _GetSampleStatus(self):
return twitter.Status(created_at='Fri Jan 26 17:28:19 +0000 2007',
id=4212713,
text='"Select all" and archive your Gmail inbox. '
' The page loads so much faster!')
def _GetSampleUser(self):
return twitter.User(id=673483,
name='DeWitt',
screen_name='dewitt',
description=u'Indeterminate things',
location='San Francisco, CA',
url='http://unto.net/',
profile_image_url='https://twitter.com/system/user/prof'
'ile_image/673483/normal/me.jpg',
status=self._GetSampleStatus())
def testInit(self):
'''Test the twitter.User constructor'''
user = twitter.User(id=673483,
name='DeWitt',
screen_name='dewitt',
description=u'Indeterminate things',
url='https://twitter.com/dewitt',
profile_image_url='https://twitter.com/system/user/prof'
'ile_image/673483/normal/me.jpg',
status=self._GetSampleStatus())
def testGettersAndSetters(self):
'''Test all of the twitter.User getters and setters'''
user = twitter.User()
user.SetId(673483)
self.assertEqual(673483, user.GetId())
user.SetName('DeWitt')
self.assertEqual('DeWitt', user.GetName())
user.SetScreenName('dewitt')
self.assertEqual('dewitt', user.GetScreenName())
user.SetDescription('Indeterminate things')
self.assertEqual('Indeterminate things', user.GetDescription())
user.SetLocation('San Francisco, CA')
self.assertEqual('San Francisco, CA', user.GetLocation())
user.SetProfileImageUrl('https://twitter.com/system/user/profile_im'
'age/673483/normal/me.jpg')
self.assertEqual('https://twitter.com/system/user/profile_image/673'
'483/normal/me.jpg', user.GetProfileImageUrl())
user.SetStatus(self._GetSampleStatus())
self.assertEqual(4212713, user.GetStatus().id)
def testProperties(self):
'''Test all of the twitter.User properties'''
user = twitter.User()
user.id = 673483
self.assertEqual(673483, user.id)
user.name = 'DeWitt'
self.assertEqual('DeWitt', user.name)
user.screen_name = 'dewitt'
self.assertEqual('dewitt', user.screen_name)
user.description = 'Indeterminate things'
self.assertEqual('Indeterminate things', user.description)
user.location = 'San Francisco, CA'
self.assertEqual('San Francisco, CA', user.location)
user.profile_image_url = 'https://twitter.com/system/user/profile_i' \
'mage/673483/normal/me.jpg'
self.assertEqual('https://twitter.com/system/user/profile_image/6734'
'83/normal/me.jpg', user.profile_image_url)
self.status = self._GetSampleStatus()
self.assertEqual(4212713, self.status.id)
def testAsJsonString(self):
'''Test the twitter.User AsJsonString method'''
self.assertEqual(UserTest.SAMPLE_JSON,
self._GetSampleUser().AsJsonString())
def testAsDict(self):
'''Test the twitter.User AsDict method'''
user = self._GetSampleUser()
data = user.AsDict()
self.assertEqual(673483, data['id'])
self.assertEqual('DeWitt', data['name'])
self.assertEqual('dewitt', data['screen_name'])
self.assertEqual('Indeterminate things', data['description'])
self.assertEqual('San Francisco, CA', data['location'])
self.assertEqual('https://twitter.com/system/user/profile_image/6734'
'83/normal/me.jpg', data['profile_image_url'])
self.assertEqual('http://unto.net/', data['url'])
self.assertEqual(4212713, data['status']['id'])
def testEq(self):
'''Test the twitter.User __eq__ method'''
user = twitter.User()
user.id = 673483
user.name = 'DeWitt'
user.screen_name = 'dewitt'
user.description = 'Indeterminate things'
user.location = 'San Francisco, CA'
user.profile_image_url = 'https://twitter.com/system/user/profile_image/67' \
'3483/normal/me.jpg'
user.url = 'http://unto.net/'
user.status = self._GetSampleStatus()
self.assertEqual(user, self._GetSampleUser())
def testNewFromJsonDict(self):
'''Test the twitter.User NewFromJsonDict method'''
data = simplejson.loads(UserTest.SAMPLE_JSON)
user = twitter.User.NewFromJsonDict(data)
self.assertEqual(self._GetSampleUser(), user)
class TrendTest(unittest.TestCase):
SAMPLE_JSON = '''{"name": "Kesuke Miyagi", "query": "Kesuke Miyagi"}'''
def _GetSampleTrend(self):
return twitter.Trend(name='Kesuke Miyagi',
query='Kesuke Miyagi',
timestamp='Fri Jan 26 23:17:14 +0000 2007')
def testInit(self):
'''Test the twitter.Trend constructor'''
trend = twitter.Trend(name='Kesuke Miyagi',
query='Kesuke Miyagi',
timestamp='Fri Jan 26 23:17:14 +0000 2007')
def testProperties(self):
'''Test all of the twitter.Trend properties'''
trend = twitter.Trend()
trend.name = 'Kesuke Miyagi'
self.assertEqual('Kesuke Miyagi', trend.name)
trend.query = 'Kesuke Miyagi'
self.assertEqual('Kesuke Miyagi', trend.query)
trend.timestamp = 'Fri Jan 26 23:17:14 +0000 2007'
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', trend.timestamp)
def testNewFromJsonDict(self):
'''Test the twitter.Trend NewFromJsonDict method'''
data = simplejson.loads(TrendTest.SAMPLE_JSON)
trend = twitter.Trend.NewFromJsonDict(data, timestamp='Fri Jan 26 23:17:14 +0000 2007')
self.assertEqual(self._GetSampleTrend(), trend)
def testEq(self):
'''Test the twitter.Trend __eq__ method'''
trend = twitter.Trend()
trend.name = 'Kesuke Miyagi'
trend.query = 'Kesuke Miyagi'
trend.timestamp = 'Fri Jan 26 23:17:14 +0000 2007'
self.assertEqual(trend, self._GetSampleTrend())
class FileCacheTest(unittest.TestCase):
def testInit(self):
"""Test the twitter._FileCache constructor"""
cache = twitter._FileCache()
self.assert_(cache is not None, 'cache is None')
def testSet(self):
"""Test the twitter._FileCache.Set method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cache.Remove("foo")
def testRemove(self):
"""Test the twitter._FileCache.Remove method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cache.Remove("foo")
data = cache.Get("foo")
self.assertEqual(data, None, 'data is not None')
def testGet(self):
"""Test the twitter._FileCache.Get method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
data = cache.Get("foo")
self.assertEqual('Hello World!', data)
cache.Remove("foo")
def testGetCachedTime(self):
"""Test the twitter._FileCache.GetCachedTime method"""
now = time.time()
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cached_time = cache.GetCachedTime("foo")
delta = cached_time - now
self.assert_(delta <= 1,
'Cached time differs from clock time by more than 1 second.')
cache.Remove("foo")
class ApiTest(unittest.TestCase):
def setUp(self):
self._urllib = MockUrllib()
api = twitter.Api(consumer_key='CONSUMER_KEY',
consumer_secret='CONSUMER_SECRET',
access_token_key='OAUTH_TOKEN',
access_token_secret='OAUTH_SECRET',
cache=None)
api.SetUrllib(self._urllib)
self._api = api
def testTwitterError(self):
'''Test that twitter responses containing an error message are wrapped.'''
self._AddHandler('https://api.twitter.com/1/statuses/public_timeline.json',
curry(self._OpenTestData, 'public_timeline_error.json'))
# Manually try/catch so we can check the exception's value
try:
statuses = self._api.GetPublicTimeline()
except twitter.TwitterError, error:
# If the error message matches, the test passes
self.assertEqual('test error', error.message)
else:
self.fail('TwitterError expected')
def testGetPublicTimeline(self):
'''Test the twitter.Api GetPublicTimeline method'''
self._AddHandler('https://api.twitter.com/1/statuses/public_timeline.json?since_id=12345',
curry(self._OpenTestData, 'public_timeline.json'))
statuses = self._api.GetPublicTimeline(since_id=12345)
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(20, len(statuses))
self.assertEqual(89497702, statuses[0].id)
def testGetUserTimeline(self):
'''Test the twitter.Api GetUserTimeline method'''
self._AddHandler('https://api.twitter.com/1/statuses/user_timeline/kesuke.json?count=1',
curry(self._OpenTestData, 'user_timeline-kesuke.json'))
statuses = self._api.GetUserTimeline('kesuke', count=1)
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(89512102, statuses[0].id)
self.assertEqual(718443, statuses[0].user.id)
def testGetFriendsTimeline(self):
'''Test the twitter.Api GetFriendsTimeline method'''
self._AddHandler('https://api.twitter.com/1/statuses/friends_timeline/kesuke.json',
curry(self._OpenTestData, 'friends_timeline-kesuke.json'))
statuses = self._api.GetFriendsTimeline('kesuke')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(20, len(statuses))
self.assertEqual(718443, statuses[0].user.id)
def testGetStatus(self):
'''Test the twitter.Api GetStatus method'''
self._AddHandler('https://api.twitter.com/1/statuses/show/89512102.json',
curry(self._OpenTestData, 'show-89512102.json'))
status = self._api.GetStatus(89512102)
self.assertEqual(89512102, status.id)
self.assertEqual(718443, status.user.id)
def testDestroyStatus(self):
'''Test the twitter.Api DestroyStatus method'''
self._AddHandler('https://api.twitter.com/1/statuses/destroy/103208352.json',
curry(self._OpenTestData, 'status-destroy.json'))
status = self._api.DestroyStatus(103208352)
self.assertEqual(103208352, status.id)
def testPostUpdate(self):
'''Test the twitter.Api PostUpdate method'''
self._AddHandler('https://api.twitter.com/1/statuses/update.json',
curry(self._OpenTestData, 'update.json'))
status = self._api.PostUpdate(u'Моё судно на воздушной подушке полно угрей'.encode('utf8'))
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text)
def testGetReplies(self):
'''Test the twitter.Api GetReplies method'''
self._AddHandler('https://api.twitter.com/1/statuses/replies.json?page=1',
curry(self._OpenTestData, 'replies.json'))
statuses = self._api.GetReplies(page=1)
self.assertEqual(36657062, statuses[0].id)
def testGetFriends(self):
'''Test the twitter.Api GetFriends method'''
self._AddHandler('https://api.twitter.com/1/statuses/friends.json?cursor=123',
curry(self._OpenTestData, 'friends.json'))
users = self._api.GetFriends(cursor=123)
buzz = [u.status for u in users if u.screen_name == 'buzz']
self.assertEqual(89543882, buzz[0].id)
def testGetFollowers(self):
'''Test the twitter.Api GetFollowers method'''
self._AddHandler('https://api.twitter.com/1/statuses/followers.json?page=1',
curry(self._OpenTestData, 'followers.json'))
users = self._api.GetFollowers(page=1)
# This is rather arbitrary, but spot checking is better than nothing
alexkingorg = [u.status for u in users if u.screen_name == 'alexkingorg']
self.assertEqual(89554432, alexkingorg[0].id)
def testGetFeatured(self):
'''Test the twitter.Api GetFeatured method'''
self._AddHandler('https://api.twitter.com/1/statuses/featured.json',
curry(self._OpenTestData, 'featured.json'))
users = self._api.GetFeatured()
# This is rather arbitrary, but spot checking is better than nothing
stevenwright = [u.status for u in users if u.screen_name == 'stevenwright']
self.assertEqual(86991742, stevenwright[0].id)
def testGetDirectMessages(self):
'''Test the twitter.Api GetDirectMessages method'''
self._AddHandler('https://api.twitter.com/1/direct_messages.json?page=1',
curry(self._OpenTestData, 'direct_messages.json'))
statuses = self._api.GetDirectMessages(page=1)
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', statuses[0].text)
def testPostDirectMessage(self):
'''Test the twitter.Api PostDirectMessage method'''
self._AddHandler('https://api.twitter.com/1/direct_messages/new.json',
curry(self._OpenTestData, 'direct_messages-new.json'))
status = self._api.PostDirectMessage('test', u'Моё судно на воздушной подушке полно угрей'.encode('utf8'))
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text)
def testDestroyDirectMessage(self):
'''Test the twitter.Api DestroyDirectMessage method'''
self._AddHandler('https://api.twitter.com/1/direct_messages/destroy/3496342.json',
curry(self._OpenTestData, 'direct_message-destroy.json'))
status = self._api.DestroyDirectMessage(3496342)
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, status.sender_id)
def testCreateFriendship(self):
'''Test the twitter.Api CreateFriendship method'''
self._AddHandler('https://api.twitter.com/1/friendships/create/dewitt.json',
curry(self._OpenTestData, 'friendship-create.json'))
user = self._api.CreateFriendship('dewitt')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, user.id)
def testDestroyFriendship(self):
'''Test the twitter.Api DestroyFriendship method'''
self._AddHandler('https://api.twitter.com/1/friendships/destroy/dewitt.json',
curry(self._OpenTestData, 'friendship-destroy.json'))
user = self._api.DestroyFriendship('dewitt')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, user.id)
def testGetUser(self):
'''Test the twitter.Api GetUser method'''
self._AddHandler('https://api.twitter.com/1/users/show/dewitt.json',
curry(self._OpenTestData, 'show-dewitt.json'))
user = self._api.GetUser('dewitt')
self.assertEqual('dewitt', user.screen_name)
self.assertEqual(89586072, user.status.id)
def _AddHandler(self, url, callback):
self._urllib.AddHandler(url, callback)
def _GetTestDataPath(self, filename):
directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(directory, 'testdata')
return os.path.join(test_data_dir, filename)
def _OpenTestData(self, filename):
f = open(self._GetTestDataPath(filename))
# make sure that the returned object contains an .info() method:
# headers are set to {}
return urllib.addinfo(f, {})
class MockUrllib(object):
'''A mock replacement for urllib that hardcodes specific responses.'''
def __init__(self):
self._handlers = {}
self.HTTPBasicAuthHandler = MockHTTPBasicAuthHandler
def AddHandler(self, url, callback):
self._handlers[url] = callback
def build_opener(self, *handlers):
return MockOpener(self._handlers)
def HTTPHandler(self, *args, **kwargs):
return None
def HTTPSHandler(self, *args, **kwargs):
return None
def OpenerDirector(self):
return self.build_opener()
class MockOpener(object):
'''A mock opener for urllib'''
def __init__(self, handlers):
self._handlers = handlers
self._opened = False
def open(self, url, data=None):
if self._opened:
raise Exception('MockOpener already opened.')
# Remove parameters from URL - they're only added by oauth and we
# don't want to test oauth
if '?' in url:
# We split using & and filter on the beginning of each key
# This is crude but we have to keep the ordering for now
(url, qs) = url.split('?')
tokens = [token for token in qs.split('&')
if not token.startswith('oauth')]
if len(tokens) > 0:
url = "%s?%s"%(url, '&'.join(tokens))
if url in self._handlers:
self._opened = True
return self._handlers[url]()
else:
raise Exception('Unexpected URL %s (Checked: %s)' % (url, self._handlers))
def add_handler(self, *args, **kwargs):
pass
def close(self):
if not self._opened:
raise Exception('MockOpener closed before it was opened.')
self._opened = False
class MockHTTPBasicAuthHandler(object):
'''A mock replacement for HTTPBasicAuthHandler'''
def add_password(self, realm, uri, user, passwd):
# TODO(dewitt): Add verification that the proper args are passed
pass
class curry:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549
def __init__(self, fun, *args, **kwargs):
self.fun = fun
self.pending = args[:]
self.kwargs = kwargs.copy()
def __call__(self, *args, **kwargs):
if kwargs and self.kwargs:
kw = self.kwargs.copy()
kw.update(kwargs)
else:
kw = kwargs or self.kwargs
return self.fun(*(self.pending + args), **kw)
def suite():
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(FileCacheTest))
suite.addTests(unittest.makeSuite(StatusTest))
suite.addTests(unittest.makeSuite(UserTest))
suite.addTests(unittest.makeSuite(ApiTest))
return suite
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python2.4
'''Load the latest update for a Twitter user and leave it in an XHTML fragment'''
__author__ = 'dewitt@google.com'
import codecs
import getopt
import sys
import twitter
TEMPLATE = """
<div class="twitter">
<span class="twitter-user"><a href="http://twitter.com/%s">Twitter</a>: </span>
<span class="twitter-text">%s</span>
<span class="twitter-relative-created-at"><a href="http://twitter.com/%s/statuses/%s">Posted %s</a></span>
</div>
"""
def Usage():
print 'Usage: %s [options] twitterid' % __file__
print
print ' This script fetches a users latest twitter update and stores'
print ' the result in a file as an XHTML fragment'
print
print ' Options:'
print ' --help -h : print this help'
print ' --output : the output file [default: stdout]'
def FetchTwitter(user, output):
assert user
statuses = twitter.Api().GetUserTimeline(user=user, count=1)
s = statuses[0]
xhtml = TEMPLATE % (s.user.screen_name, s.text, s.user.screen_name, s.id, s.relative_created_at)
if output:
Save(xhtml, output)
else:
print xhtml
def Save(xhtml, output):
out = codecs.open(output, mode='w', encoding='ascii',
errors='xmlcharrefreplace')
out.write(xhtml)
out.close()
def main():
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'ho', ['help', 'output='])
except getopt.GetoptError:
Usage()
sys.exit(2)
try:
user = args[0]
except:
Usage()
sys.exit(2)
output = None
for o, a in opts:
if o in ("-h", "--help"):
Usage()
sys.exit(2)
if o in ("-o", "--output"):
output = a
FetchTwitter(user, output)
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/python2.4
'''Post a message to twitter'''
__author__ = 'dewitt@google.com'
import ConfigParser
import getopt
import os
import sys
import twitter
USAGE = '''Usage: tweet [options] message
This script posts a message to Twitter.
Options:
-h --help : print this help
--consumer-key : the twitter consumer key
--consumer-secret : the twitter consumer secret
--access-key : the twitter access token key
--access-secret : the twitter access token secret
--encoding : the character set encoding used in input strings, e.g. "utf-8". [optional]
Documentation:
If either of the command line flags are not present, the environment
variables TWEETUSERNAME and TWEETPASSWORD will then be checked for your
consumer_key or consumer_secret, respectively.
If neither the command line flags nor the enviroment variables are
present, the .tweetrc file, if it exists, can be used to set the
default consumer_key and consumer_secret. The file should contain the
following three lines, replacing *consumer_key* with your consumer key, and
*consumer_secret* with your consumer secret:
A skeletal .tweetrc file:
[Tweet]
consumer_key: *consumer_key*
consumer_secret: *consumer_password*
access_key: *access_key*
access_secret: *access_password*
'''
def PrintUsageAndExit():
print USAGE
sys.exit(2)
def GetConsumerKeyEnv():
return os.environ.get("TWEETUSERNAME", None)
def GetConsumerSecretEnv():
return os.environ.get("TWEETPASSWORD", None)
def GetAccessKeyEnv():
return os.environ.get("TWEETACCESSKEY", None)
def GetAccessSecretEnv():
return os.environ.get("TWEETACCESSSECRET", None)
class TweetRc(object):
def __init__(self):
self._config = None
def GetConsumerKey(self):
return self._GetOption('consumer_key')
def GetConsumerSecret(self):
return self._GetOption('consumer_secret')
def GetAccessKey(self):
return self._GetOption('access_key')
def GetAccessSecret(self):
return self._GetOption('access_secret')
def _GetOption(self, option):
try:
return self._GetConfig().get('Tweet', option)
except:
return None
def _GetConfig(self):
if not self._config:
self._config = ConfigParser.ConfigParser()
self._config.read(os.path.expanduser('~/.tweetrc'))
return self._config
def main():
try:
shortflags = 'h'
longflags = ['help', 'consumer-key=', 'consumer-secret=',
'access-key=', 'access-secret=', 'encoding=']
opts, args = getopt.gnu_getopt(sys.argv[1:], shortflags, longflags)
except getopt.GetoptError:
PrintUsageAndExit()
consumer_keyflag = None
consumer_secretflag = None
access_keyflag = None
access_secretflag = None
encoding = None
for o, a in opts:
if o in ("-h", "--help"):
PrintUsageAndExit()
if o in ("--consumer-key"):
consumer_keyflag = a
if o in ("--consumer-secret"):
consumer_secretflag = a
if o in ("--access-key"):
access_keyflag = a
if o in ("--access-secret"):
access_secretflag = a
if o in ("--encoding"):
encoding = a
message = ' '.join(args)
if not message:
PrintUsageAndExit()
rc = TweetRc()
consumer_key = consumer_keyflag or GetConsumerKeyEnv() or rc.GetConsumerKey()
consumer_secret = consumer_secretflag or GetConsumerSecretEnv() or rc.GetConsumerSecret()
access_key = access_keyflag or GetAccessKeyEnv() or rc.GetAccessKey()
access_secret = access_secretflag or GetAccessSecretEnv() or rc.GetAccessSecret()
if not consumer_key or not consumer_secret or not access_key or not access_secret:
PrintUsageAndExit()
api = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,
access_token_key=access_key, access_token_secret=access_secret,
input_encoding=encoding)
try:
status = api.PostUpdate(message)
except UnicodeDecodeError:
print "Your message could not be encoded. Perhaps it contains non-ASCII characters? "
print "Try explicitly specifying the encoding with the --encoding flag"
sys.exit(2)
print "%s just posted: %s" % (status.user.name, status.text)
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A class that defines the default URL Shortener.
TinyURL is provided as the default and as an example.
'''
import urllib
# Change History
#
# 2010-05-16
# TinyURL example and the idea for this comes from a bug filed by
# acolorado with patch provided by ghills. Class implementation
# was done by bear.
#
# Issue 19 http://code.google.com/p/python-twitter/issues/detail?id=19
#
class ShortenURL(object):
'''Helper class to make URL Shortener calls if/when required'''
def __init__(self,
userid=None,
password=None):
'''Instantiate a new ShortenURL object
Args:
userid: userid for any required authorization call [optional]
password: password for any required authorization call [optional]
'''
self.userid = userid
self.password = password
def Shorten(self,
longURL):
'''Call TinyURL API and returned shortened URL result
Args:
longURL: URL string to shorten
Returns:
The shortened URL as a string
Note:
longURL is required and no checks are made to ensure completeness
'''
result = None
f = urllib.urlopen("http://tinyurl.com/api-create.php?url=%s" % longURL)
try:
result = f.read()
finally:
f.close()
return result
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''The setup and build script for the python-twitter library.'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '0.8.3'
# The base package metadata to be used by both distutils and setuptools
METADATA = dict(
name = "python-twitter",
version = __version__,
py_modules = ['twitter'],
author='The Python-Twitter Developers',
author_email='python-twitter@googlegroups.com',
description='A python wrapper around the Twitter API',
license='Apache License 2.0',
url='http://code.google.com/p/python-twitter/',
keywords='twitter api',
)
# Extra package metadata to be used only if setuptools is installed
SETUPTOOLS_METADATA = dict(
install_requires = ['setuptools', 'simplejson', 'oauth2'],
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet',
],
test_suite = 'twitter_test.suite',
)
def Read(file):
return open(file).read()
def BuildLongDescription():
return '\n'.join([Read('README'), Read('CHANGES')])
def Main():
# Build the long_description from the README and CHANGES
METADATA['long_description'] = BuildLongDescription()
# Use setuptools if available, otherwise fallback and use distutils
try:
import setuptools
METADATA.update(SETUPTOOLS_METADATA)
setuptools.setup(**METADATA)
except ImportError:
import distutils.core
distutils.core.setup(**METADATA)
if __name__ == '__main__':
Main()
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
raise ValueError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
raise ValueError(
errmsg("Invalid \\escape: %r" % (esc,), s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is False, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is True, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is True, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is True, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is True, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError("%r is not JSON serializable" % (o,))
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError("Out of range float values are not JSON compliant: %r"
% (o,))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif isinstance(key, (int, long)):
key = str(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif _skipkeys:
continue
else:
raise TypeError("key %r is not a string" % (key,))
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError("%r is not JSON serializable" % (o,))
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -msimplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -msimplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.7'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| Python |
r"""Using simplejson from the shell to validate and
pretty-print::
$ echo '{"json":"obj"}' | python -msimplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -msimplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import simplejson
def main():
import sys
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],))
try:
obj = simplejson.load(infile)
except ValueError, e:
raise SystemExit(e)
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a Python interface to the Twitter API'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '0.8.3'
import base64
import calendar
import datetime
import httplib
import os
import rfc822
import sys
import tempfile
import textwrap
import time
import calendar
import urllib
import urllib2
import urlparse
import gzip
import StringIO
try:
# Python >= 2.6
import json as simplejson
except ImportError:
try:
# Python < 2.6
import simplejson
except ImportError:
try:
# Google App Engine
from django.utils import simplejson
except ImportError:
raise ImportError, "Unable to load a json library"
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl, parse_qs
except ImportError:
from cgi import parse_qsl, parse_qs
try:
from hashlib import md5
except ImportError:
from md5 import md5
import oauth2 as oauth
CHARACTER_LIMIT = 140
# A singleton representing a lazily instantiated FileCache.
DEFAULT_CACHE = object()
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.location
status.relative_created_at # read only
status.user
status.urls
status.user_mentions
status.hashtags
status.geo
status.place
status.coordinates
status.contributors
'''
def __init__(self,
created_at=None,
favorited=None,
id=None,
text=None,
location=None,
user=None,
in_reply_to_screen_name=None,
in_reply_to_user_id=None,
in_reply_to_status_id=None,
truncated=None,
source=None,
now=None,
urls=None,
user_mentions=None,
hashtags=None,
geo=None,
place=None,
coordinates=None,
contributors=None,
retweeted=None,
retweeted_status=None,
retweet_count=None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at:
The time this status message was posted. [Optional]
favorited:
Whether this is a favorite of the authenticated user. [Optional]
id:
The unique id of this status message. [Optional]
text:
The text of this status message. [Optional]
location:
the geolocation string associated with this message. [Optional]
relative_created_at:
A human readable string representing the posting time. [Optional]
user:
A twitter.User instance representing the person posting the
message. [Optional]
now:
The current time, if the client choses to set it.
Defaults to the wall clock time. [Optional]
urls:
user_mentions:
hashtags:
geo:
place:
coordinates:
contributors:
retweeted:
retweeted_status:
retweet_count:
'''
self.created_at = created_at
self.favorited = favorited
self.id = id
self.text = text
self.location = location
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.retweeted = retweeted
self.source = source
self.urls = urls
self.user_mentions = user_mentions
self.hashtags = hashtags
self.geo = geo
self.place = place
self.coordinates = coordinates
self.contributors = contributors
self.retweeted_status = retweeted_status
self.retweet_count = retweet_count
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at:
The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited:
boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc='The favorited state of this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id:
The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc='')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc='')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc='')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc='')
def GetRetweeted(self):
return self._retweeted
def SetRetweeted(self, retweeted):
self._retweeted = retweeted
retweeted = property(GetRetweeted, SetRetweeted,
doc='')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc='')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text:
The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this status message')
def GetLocation(self):
'''Get the geolocation associated with this status message
Returns:
The geolocation string of this status message.
'''
return self._location
def SetLocation(self, location):
'''Set the geolocation associated with this status message
Args:
location:
The geolocation string of this status message
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geolocation string of this status message')
def GetRelativeCreatedAt(self):
'''Get a human redable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = long(self.now) - long(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1/fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1/fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge) or delta / (60 * 60) == 1:
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1/fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge) or delta / (60 * 60 * 24) == 1:
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc='Get a human readable string representing '
'the posting time')
def GetUser(self):
'''Get a twitter.User reprenting the entity posting this status message.
Returns:
A twitter.User reprenting the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User reprenting the entity posting this status message.
Args:
user:
A twitter.User reprenting the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc='A twitter.User reprenting the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now:
The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc='The wallclock time for this status instance.')
def GetGeo(self):
return self._geo
def SetGeo(self, geo):
self._geo = geo
geo = property(GetGeo, SetGeo,
doc='')
def GetPlace(self):
return self._place
def SetPlace(self, place):
self._place = place
place = property(GetPlace, SetPlace,
doc='')
def GetCoordinates(self):
return self._coordinates
def SetCoordinates(self, coordinates):
self._coordinates = coordinates
coordinates = property(GetCoordinates, SetCoordinates,
doc='')
def GetContributors(self):
return self._contributors
def SetContributors(self, contributors):
self._contributors = contributors
contributors = property(GetContributors, SetContributors,
doc='')
def GetRetweeted_status(self):
return self._retweeted_status
def SetRetweeted_status(self, retweeted_status):
self._retweeted_status = retweeted_status
retweeted_status = property(GetRetweeted_status, SetRetweeted_status,
doc='')
def GetRetweetCount(self):
return self._retweet_count
def SetRetweetCount(self, retweet_count):
self._retweet_count = retweet_count
retweet_count = property(GetRetweetCount, SetRetweetCount,
doc='')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.location == other.location and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.retweeted == other.retweeted and \
self.favorited == other.favorited and \
self.source == other.source and \
self.geo == other.geo and \
self.place == other.place and \
self.coordinates == other.coordinates and \
self.contributors == other.contributors and \
self.retweeted_status == other.retweeted_status and \
self.retweet_count == other.retweet_count
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.location:
data['location'] = self.location
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.retweeted is not None:
data['retweeted'] = self.retweeted
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
if self.geo:
data['geo'] = self.geo
if self.place:
data['place'] = self.place
if self.coordinates:
data['coordinates'] = self.coordinates
if self.contributors:
data['contributors'] = self.contributors
if self.hashtags:
data['hashtags'] = [h.text for h in self.hashtags]
if self.retweeted_status:
data['retweeted_status'] = self.retweeted_status.AsDict()
if self.retweet_count:
data['retweet_count'] = self.retweet_count
if self.urls:
data['urls'] = dict([(url.url, url.expanded_url) for url in self.urls])
if self.user_mentions:
data['user_mentions'] = [um.AsDict() for um in self.user_mentions]
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
if 'retweeted_status' in data:
retweeted_status = Status.NewFromJsonDict(data['retweeted_status'])
else:
retweeted_status = None
urls = None
user_mentions = None
hashtags = None
if 'entities' in data:
if 'urls' in data['entities']:
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
if 'user_mentions' in data['entities']:
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
if 'hashtags' in data['entities']:
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
return Status(created_at=data.get('created_at', None),
favorited=data.get('favorited', None),
id=data.get('id', None),
text=data.get('text', None),
location=data.get('location', None),
in_reply_to_screen_name=data.get('in_reply_to_screen_name', None),
in_reply_to_user_id=data.get('in_reply_to_user_id', None),
in_reply_to_status_id=data.get('in_reply_to_status_id', None),
truncated=data.get('truncated', None),
retweeted=data.get('retweeted', None),
source=data.get('source', None),
user=user,
urls=urls,
user_mentions=user_mentions,
hashtags=hashtags,
geo=data.get('geo', None),
place=data.get('place', None),
coordinates=data.get('coordinates', None),
contributors=data.get('contributors', None),
retweeted_status=retweeted_status,
retweet_count=data.get('retweet_count', None))
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
user.geo_enabled
user.verified
user.lang
user.notifications
user.contributors_enabled
user.created_at
user.listed_count
'''
def __init__(self,
id=None,
name=None,
screen_name=None,
location=None,
description=None,
profile_image_url=None,
profile_background_tile=None,
profile_background_image_url=None,
profile_sidebar_fill_color=None,
profile_background_color=None,
profile_link_color=None,
profile_text_color=None,
protected=None,
utc_offset=None,
time_zone=None,
followers_count=None,
friends_count=None,
statuses_count=None,
favourites_count=None,
url=None,
status=None,
geo_enabled=None,
verified=None,
lang=None,
notifications=None,
contributors_enabled=None,
created_at=None,
listed_count=None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
self.geo_enabled = geo_enabled
self.verified = verified
self.lang = lang
self.notifications = notifications
self.contributors_enabled = contributors_enabled
self.created_at = created_at
self.listed_count = listed_count
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this user.')
def GetScreenName(self):
'''Get the short twitter name of this user.
Returns:
The short twitter name of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short twitter name of this user.
Args:
screen_name: the short twitter name of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc='The short twitter name of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc='The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url= property(GetProfileImageUrl, SetProfileImageUrl,
doc='The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc='Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc='The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone:
The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status:
The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc='The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count:
The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc='The number of friends for this user.')
def GetListedCount(self):
'''Get the listed count for this user.
Returns:
The number of lists this user belongs to.
'''
return self._listed_count
def SetListedCount(self, count):
'''Set the listed count for this user.
Args:
count:
The number of lists this user belongs to.
'''
self._listed_count = count
listed_count = property(GetListedCount, SetListedCount,
doc='The number of lists this user belongs to.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count:
The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc='The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count:
The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc='The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count:
The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc='The number of favourites for this user.')
def GetGeoEnabled(self):
'''Get the setting of geo_enabled for this user.
Returns:
True/False if Geo tagging is enabled
'''
return self._geo_enabled
def SetGeoEnabled(self, geo_enabled):
'''Set the latest twitter.geo_enabled of this user.
Args:
geo_enabled:
True/False if Geo tagging is to be enabled
'''
self._geo_enabled = geo_enabled
geo_enabled = property(GetGeoEnabled, SetGeoEnabled,
doc='The value of twitter.geo_enabled for this user.')
def GetVerified(self):
'''Get the setting of verified for this user.
Returns:
True/False if user is a verified account
'''
return self._verified
def SetVerified(self, verified):
'''Set twitter.verified for this user.
Args:
verified:
True/False if user is a verified account
'''
self._verified = verified
verified = property(GetVerified, SetVerified,
doc='The value of twitter.verified for this user.')
def GetLang(self):
'''Get the setting of lang for this user.
Returns:
language code of the user
'''
return self._lang
def SetLang(self, lang):
'''Set twitter.lang for this user.
Args:
lang:
language code for the user
'''
self._lang = lang
lang = property(GetLang, SetLang,
doc='The value of twitter.lang for this user.')
def GetNotifications(self):
'''Get the setting of notifications for this user.
Returns:
True/False for the notifications setting of the user
'''
return self._notifications
def SetNotifications(self, notifications):
'''Set twitter.notifications for this user.
Args:
notifications:
True/False notifications setting for the user
'''
self._notifications = notifications
notifications = property(GetNotifications, SetNotifications,
doc='The value of twitter.notifications for this user.')
def GetContributorsEnabled(self):
'''Get the setting of contributors_enabled for this user.
Returns:
True/False contributors_enabled of the user
'''
return self._contributors_enabled
def SetContributorsEnabled(self, contributors_enabled):
'''Set twitter.contributors_enabled for this user.
Args:
contributors_enabled:
True/False contributors_enabled setting for the user
'''
self._contributors_enabled = contributors_enabled
contributors_enabled = property(GetContributorsEnabled, SetContributorsEnabled,
doc='The value of twitter.contributors_enabled for this user.')
def GetCreatedAt(self):
'''Get the setting of created_at for this user.
Returns:
created_at value of the user
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set twitter.created_at for this user.
Args:
created_at:
created_at value for the user
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The value of twitter.created_at for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status and \
self.geo_enabled == other.geo_enabled and \
self.verified == other.verified and \
self.lang == other.lang and \
self.notifications == other.notifications and \
self.contributors_enabled == other.contributors_enabled and \
self.created_at == other.created_at and \
self.listed_count == other.listed_count
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
if self.geo_enabled:
data['geo_enabled'] = self.geo_enabled
if self.verified:
data['verified'] = self.verified
if self.lang:
data['lang'] = self.lang
if self.notifications:
data['notifications'] = self.notifications
if self.contributors_enabled:
data['contributors_enabled'] = self.contributors_enabled
if self.created_at:
data['created_at'] = self.created_at
if self.listed_count:
data['listed_count'] = self.listed_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id=data.get('id', None),
name=data.get('name', None),
screen_name=data.get('screen_name', None),
location=data.get('location', None),
description=data.get('description', None),
statuses_count=data.get('statuses_count', None),
followers_count=data.get('followers_count', None),
favourites_count=data.get('favourites_count', None),
friends_count=data.get('friends_count', None),
profile_image_url=data.get('profile_image_url', None),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url=data.get('url', None),
status=status,
geo_enabled=data.get('geo_enabled', None),
verified=data.get('verified', None),
lang=data.get('lang', None),
notifications=data.get('notifications', None),
contributors_enabled=data.get('contributors_enabled', None),
created_at=data.get('created_at', None),
listed_count=data.get('listed_count', None))
class List(object):
'''A class representing the List structure used by the twitter API.
The List structure exposes the following properties:
list.id
list.name
list.slug
list.description
list.full_name
list.mode
list.uri
list.member_count
list.subscriber_count
list.following
'''
def __init__(self,
id=None,
name=None,
slug=None,
description=None,
full_name=None,
mode=None,
uri=None,
member_count=None,
subscriber_count=None,
following=None,
user=None):
self.id = id
self.name = name
self.slug = slug
self.description = description
self.full_name = full_name
self.mode = mode
self.uri = uri
self.member_count = member_count
self.subscriber_count = subscriber_count
self.following = following
self.user = user
def GetId(self):
'''Get the unique id of this list.
Returns:
The unique id of this list
'''
return self._id
def SetId(self, id):
'''Set the unique id of this list.
Args:
id:
The unique id of this list.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this list.')
def GetName(self):
'''Get the real name of this list.
Returns:
The real name of this list
'''
return self._name
def SetName(self, name):
'''Set the real name of this list.
Args:
name:
The real name of this list
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this list.')
def GetSlug(self):
'''Get the slug of this list.
Returns:
The slug of this list
'''
return self._slug
def SetSlug(self, slug):
'''Set the slug of this list.
Args:
slug:
The slug of this list.
'''
self._slug = slug
slug = property(GetSlug, SetSlug,
doc='The slug of this list.')
def GetDescription(self):
'''Get the description of this list.
Returns:
The description of this list
'''
return self._description
def SetDescription(self, description):
'''Set the description of this list.
Args:
description:
The description of this list.
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The description of this list.')
def GetFull_name(self):
'''Get the full_name of this list.
Returns:
The full_name of this list
'''
return self._full_name
def SetFull_name(self, full_name):
'''Set the full_name of this list.
Args:
full_name:
The full_name of this list.
'''
self._full_name = full_name
full_name = property(GetFull_name, SetFull_name,
doc='The full_name of this list.')
def GetMode(self):
'''Get the mode of this list.
Returns:
The mode of this list
'''
return self._mode
def SetMode(self, mode):
'''Set the mode of this list.
Args:
mode:
The mode of this list.
'''
self._mode = mode
mode = property(GetMode, SetMode,
doc='The mode of this list.')
def GetUri(self):
'''Get the uri of this list.
Returns:
The uri of this list
'''
return self._uri
def SetUri(self, uri):
'''Set the uri of this list.
Args:
uri:
The uri of this list.
'''
self._uri = uri
uri = property(GetUri, SetUri,
doc='The uri of this list.')
def GetMember_count(self):
'''Get the member_count of this list.
Returns:
The member_count of this list
'''
return self._member_count
def SetMember_count(self, member_count):
'''Set the member_count of this list.
Args:
member_count:
The member_count of this list.
'''
self._member_count = member_count
member_count = property(GetMember_count, SetMember_count,
doc='The member_count of this list.')
def GetSubscriber_count(self):
'''Get the subscriber_count of this list.
Returns:
The subscriber_count of this list
'''
return self._subscriber_count
def SetSubscriber_count(self, subscriber_count):
'''Set the subscriber_count of this list.
Args:
subscriber_count:
The subscriber_count of this list.
'''
self._subscriber_count = subscriber_count
subscriber_count = property(GetSubscriber_count, SetSubscriber_count,
doc='The subscriber_count of this list.')
def GetFollowing(self):
'''Get the following status of this list.
Returns:
The following status of this list
'''
return self._following
def SetFollowing(self, following):
'''Set the following status of this list.
Args:
following:
The following of this list.
'''
self._following = following
following = property(GetFollowing, SetFollowing,
doc='The following status of this list.')
def GetUser(self):
'''Get the user of this list.
Returns:
The owner of this list
'''
return self._user
def SetUser(self, user):
'''Set the user of this list.
Args:
user:
The owner of this list.
'''
self._user = user
user = property(GetUser, SetUser,
doc='The owner of this list.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.slug == other.slug and \
self.description == other.description and \
self.full_name == other.full_name and \
self.mode == other.mode and \
self.uri == other.uri and \
self.member_count == other.member_count and \
self.subscriber_count == other.subscriber_count and \
self.following == other.following and \
self.user == other.user
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.List instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.List instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.List instance.
Returns:
A JSON string representation of this twitter.List instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.List instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.List instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.slug:
data['slug'] = self.slug
if self.description:
data['description'] = self.description
if self.full_name:
data['full_name'] = self.full_name
if self.mode:
data['mode'] = self.mode
if self.uri:
data['uri'] = self.uri
if self.member_count is not None:
data['member_count'] = self.member_count
if self.subscriber_count is not None:
data['subscriber_count'] = self.subscriber_count
if self.following is not None:
data['following'] = self.following
if self.user is not None:
data['user'] = self.user
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.List instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return List(id=data.get('id', None),
name=data.get('name', None),
slug=data.get('slug', None),
description=data.get('description', None),
full_name=data.get('full_name', None),
mode=data.get('mode', None),
uri=data.get('uri', None),
member_count=data.get('member_count', None),
subscriber_count=data.get('subscriber_count', None),
following=data.get('following', None),
user=user)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id=None,
created_at=None,
sender_id=None,
sender_screen_name=None,
recipient_id=None,
recipient_screen_name=None,
text=None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id:
The unique id of this direct message. [Optional]
created_at:
The time this direct message was posted. [Optional]
sender_id:
The id of the twitter user that sent this message. [Optional]
sender_screen_name:
The name of the twitter user that sent this message. [Optional]
recipient_id:
The id of the twitter that received this message. [Optional]
recipient_screen_name:
The name of the twitter that received this message. [Optional]
text:
The text of this direct message. [Optional]
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id:
The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at:
The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender_id:
The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc='The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name:
The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc='The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient_id:
The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc='The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name:
The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc='The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text:
The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at=data.get('created_at', None),
recipient_id=data.get('recipient_id', None),
sender_id=data.get('sender_id', None),
text=data.get('text', None),
sender_screen_name=data.get('sender_screen_name', None),
id=data.get('id', None),
recipient_screen_name=data.get('recipient_screen_name', None))
class Hashtag(object):
''' A class represeinting a twitter hashtag
'''
def __init__(self,
text=None):
self.text = text
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Hashtag instance
'''
return Hashtag(text = data.get('text', None))
class Trend(object):
''' A class representing a trending topic
'''
def __init__(self, name=None, query=None, timestamp=None):
self.name = name
self.query = query
self.timestamp = timestamp
def __str__(self):
return 'Name: %s\nQuery: %s\nTimestamp: %s\n' % (self.name, self.query, self.timestamp)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.name == other.name and \
self.query == other.query and \
self.timestamp == other.timestamp
except AttributeError:
return False
@staticmethod
def NewFromJsonDict(data, timestamp = None):
'''Create a new instance based on a JSON dict
Args:
data:
A JSON dict
timestamp:
Gets set as the timestamp property of the new object
Returns:
A twitter.Trend object
'''
return Trend(name=data.get('name', None),
query=data.get('query', None),
timestamp=timestamp)
class Url(object):
'''A class representing an URL contained in a tweet'''
def __init__(self,
url=None,
expanded_url=None):
self.url = url
self.expanded_url = expanded_url
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Url instance
'''
return Url(url=data.get('url', None),
expanded_url=data.get('expanded_url', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
consumer key and secret; and the oAuth key and secret:
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriendsTimeline(user)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
>>> api.VerifyCredentials()
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
input_encoding=None,
request_headers=None,
cache=DEFAULT_CACHE,
shortner=None,
base_url=None,
use_gzip_compression=False,
debugHTTP=False):
'''Instantiate a new twitter.Api object.
Args:
consumer_key:
Your Twitter user's consumer_key.
consumer_secret:
Your Twitter user's consumer_secret.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
input_encoding:
The encoding used to encode input strings. [Optional]
request_header:
A dictionary of additional HTTP request headers. [Optional]
cache:
The cache instance to use. Defaults to DEFAULT_CACHE.
Use None to disable caching. [Optional]
shortner:
The shortner instance to use. Defaults to None.
See shorten_url.py for an example shortner. [Optional]
base_url:
The base URL to use to contact the Twitter API.
Defaults to https://api.twitter.com. [Optional]
use_gzip_compression:
Set to True to tell enable gzip compression for any call
made to Twitter. Defaults to False. [Optional]
debugHTTP:
Set to True to enable debug output from urllib2 when performing
any HTTP requests. Defaults to False. [Optional]
'''
self.SetCache(cache)
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._input_encoding = input_encoding
self._use_gzip = use_gzip_compression
self._debugHTTP = debugHTTP
self._oauth_consumer = None
self._shortlink_size = 19
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
if base_url is None:
self.base_url = 'https://api.twitter.com/1'
else:
self.base_url = base_url
if consumer_key is not None and (access_token_key is None or
access_token_secret is None):
print >> sys.stderr, 'Twitter now requires an oAuth Access Token for API calls.'
print >> sys.stderr, 'If your using this library from a command line utility, please'
print >> sys.stderr, 'run the the included get_access_token.py tool to generate one.'
raise TwitterError('Twitter requires oAuth Access Token for all API access')
self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret)
def SetCredentials(self,
consumer_key,
consumer_secret,
access_token_key=None,
access_token_secret=None):
'''Set the consumer_key and consumer_secret for this instance
Args:
consumer_key:
The consumer_key of the twitter account.
consumer_secret:
The consumer_secret for the twitter account.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
'''
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._access_token_key = access_token_key
self._access_token_secret = access_token_secret
self._oauth_consumer = None
if consumer_key is not None and consumer_secret is not None and \
access_token_key is not None and access_token_secret is not None:
self._signature_method_plaintext = oauth.SignatureMethod_PLAINTEXT()
self._signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
self._oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
self._oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
def ClearCredentials(self):
'''Clear the any credentials for this instance
'''
self._consumer_key = None
self._consumer_secret = None
self._access_token_key = None
self._access_token_secret = None
self._oauth_consumer = None
def GetPublicTimeline(self,
since_id=None,
include_rts=None,
include_entities=None):
'''Fetch the sequence of public twitter.Status message for all users.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
An sequence of twitter.Status instances, one for each message
'''
parameters = {}
if since_id:
parameters['since_id'] = since_id
if include_rts:
parameters['include_rts'] = 1
if include_entities:
parameters['include_entities'] = 1
url = '%s/statuses/public_timeline.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def FilterPublicTimeline(self,
term,
since_id=None):
'''Filter the public twitter timeline by a given search term on
the local machine.
Args:
term:
term to search by.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
containing the term
'''
statuses = self.GetPublicTimeline(since_id)
results = []
for s in statuses:
if s.text.lower().find(term.lower()) != -1:
results.append(s)
return results
def GetSearch(self,
term=None,
geocode=None,
since_id=None,
per_page=15,
page=1,
lang="en",
show_user="true",
query_users=False):
'''Return twitter search results for a given term.
Args:
term:
term to search by. Optional if you include geocode.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
geocode:
geolocation information in the form (latitude, longitude, radius)
[Optional]
per_page:
number of results to return. Default is 15 [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
lang:
language for results. Default is English [Optional]
show_user:
prefixes screen name in status
query_users:
If set to False, then all users only have screen_name and
profile_image_url available.
If set to True, all information of users are available,
but it uses lots of request quota, one per status.
Returns:
A sequence of twitter.Status instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if since_id:
parameters['since_id'] = since_id
if term is None and geocode is None:
return []
if term is not None:
parameters['q'] = term
if geocode is not None:
parameters['geocode'] = ','.join(map(str, geocode))
parameters['show_user'] = show_user
parameters['lang'] = lang
parameters['rpp'] = per_page
parameters['page'] = page
# Make and send requests
url = 'http://search.twitter.com/search.json'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
results = []
for x in data['results']:
temp = Status.NewFromJsonDict(x)
if query_users:
# Build user object with new request
temp.user = self.GetUser(urllib.quote(x['from_user']))
else:
temp.user = User(screen_name=x['from_user'], profile_image_url=x['profile_image_url'])
results.append(temp)
# Return built list of statuses
return results # [Status.NewFromJsonDict(x) for x in data['results']]
def GetTrendsCurrent(self, exclude=None):
'''Get the current top trending topics
Args:
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains the twitter.
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
url = '%s/trends/current.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
for t in data['trends']:
for item in data['trends'][t]:
trends.append(Trend.NewFromJsonDict(item, timestamp = t))
return trends
def GetTrendsWoeid(self, woeid, exclude=None):
'''Return the top 10 trending topics for a specific WOEID, if trending
information is available for it.
Args:
woeid:
the Yahoo! Where On Earth ID for a location.
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a Trend.
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
url = '%s/trends/%s.json' % (self.base_url, woeid)
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
timestamp = data[0]['as_of']
for trend in data[0]['trends']:
trends.append(Trend.NewFromJsonDict(trend, timestamp = timestamp))
return trends
def GetTrendsDaily(self, exclude=None, startdate=None):
'''Get the current top trending topics for each hour in a given day
Args:
startdate:
The start date for the report.
Should be in the format YYYY-MM-DD. [Optional]
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 24 entries. Each entry contains the twitter.
Trend elements that were trending at the corresponding hour of the day.
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
if not startdate:
startdate = time.strftime('%Y-%m-%d', time.gmtime())
parameters['date'] = startdate
url = '%s/trends/daily.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
for i in xrange(24):
trends.append(None)
for t in data['trends']:
idx = int(time.strftime('%H', time.strptime(t, '%Y-%m-%d %H:%M')))
trends[idx] = [Trend.NewFromJsonDict(x, timestamp = t)
for x in data['trends'][t]]
return trends
def GetTrendsWeekly(self, exclude=None, startdate=None):
'''Get the top 30 trending topics for each day in a given week.
Args:
startdate:
The start date for the report.
Should be in the format YYYY-MM-DD. [Optional]
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with each entry contains the twitter.
Trend elements of trending topics for the corrsponding day of the week
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
if not startdate:
startdate = time.strftime('%Y-%m-%d', time.gmtime())
parameters['date'] = startdate
url = '%s/trends/weekly.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
for i in xrange(7):
trends.append(None)
# use the epochs of the dates as keys for a dictionary
times = dict([(calendar.timegm(time.strptime(t, '%Y-%m-%d')),t)
for t in data['trends']])
cnt = 0
# create the resulting structure ordered by the epochs of the dates
for e in sorted(times.keys()):
trends[cnt] = [Trend.NewFromJsonDict(x, timestamp = times[e])
for x in data['trends'][times[e]]]
cnt +=1
return trends
def GetFriendsTimeline(self,
user=None,
count=None,
page=None,
since_id=None,
retweets=None,
include_entities=None):
'''Fetch the sequence of twitter.Status messages for a user's friends
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
Specifies the ID or screen name of the user for whom to return
the friends_timeline. If not specified then the authenticated
user set in the twitter.Api instance will be used. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 100. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
retweets:
If True, the timeline will contain native retweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
if not user and not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
url = '%s/statuses/friends_timeline' % self.base_url
if user:
url = '%s/%s.json' % (url, user)
else:
url = '%s.json' % url
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if page is not None:
try:
parameters['page'] = int(page)
except ValueError:
raise TwitterError("'page' must be an integer")
if since_id:
parameters['since_id'] = since_id
if retweets:
parameters['include_rts'] = True
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self,
id=None,
user_id=None,
screen_name=None,
since_id=None,
max_id=None,
count=None,
page=None,
include_rts=None,
include_entities=None):
'''Fetch the sequence of public Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
id:
Specifies the ID or screen name of the user for whom to return
the user_timeline. [Optional]
user_id:
Specfies the ID of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid user ID
is also a valid screen name. [Optional]
screen_name:
Specfies the screen name of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid screen
name is also a user ID. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of Status instances, one for each message up to count
'''
parameters = {}
if id:
url = '%s/statuses/user_timeline/%s.json' % (self.base_url, id)
elif user_id:
url = '%s/statuses/user_timeline.json?user_id=%d' % (self.base_url, user_id)
elif screen_name:
url = ('%s/statuses/user_timeline.json?screen_name=%s' % (self.base_url,
screen_name))
elif not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = '%s/statuses/user_timeline.json' % self.base_url
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if page:
try:
parameters['page'] = int(page)
except:
raise TwitterError("page must be an integer")
if include_rts:
parameters['include_rts'] = 1
if include_entities:
parameters['include_entities'] = 1
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self, id, include_entities=None):
'''Returns a single status message.
The twitter.Api instance must be authenticated if the
status message is private.
Args:
id:
The numeric ID of the status you are trying to retrieve.
include_entities:
If True, each tweet will include a node called "entities".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A twitter.Status instance representing that status message
'''
try:
if id:
long(id)
except:
raise TwitterError("id must be an long integer")
parameters = {}
if include_entities:
parameters['include_entities'] = 1
url = '%s/statuses/show/%s.json' % (self.base_url, id)
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and the
authenticating user must be the author of the specified status.
Args:
id:
The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
try:
if id:
long(id)
except:
raise TwitterError("id must be an integer")
url = '%s/statuses/destroy/%s.json' % (self.base_url, id)
json = self._FetchUrl(url, post_data={'id': id})
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
@classmethod
def _calculate_status_length(cls, status, linksize=19):
dummy_link_replacement = 'https://-%d-chars%s/' % (linksize, '-'*(linksize - 18))
shortened = ' '.join([x if not (x.startswith('http://') or
x.startswith('https://'))
else
dummy_link_replacement
for x in status.split(' ')])
return len(shortened)
def PostUpdate(self, status, in_reply_to_status_id=None):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
Must be less than or equal to 140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/update.json' % self.base_url
if isinstance(status, unicode) or self._input_encoding is None:
u_status = status
else:
u_status = unicode(status, self._input_encoding)
if self._calculate_status_length(u_status, self._shortlink_size) > CHARACTER_LIMIT:
raise TwitterError("Text must be less than or equal to %d characters. "
"Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation=None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def GetUserRetweets(self, count=None, since_id=None, max_id=None, include_entities=False):
'''Fetch the sequence of retweets made by a single user.
The twitter.Api instance must be authenticated.
Args:
count:
The number of status messages to retrieve. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
url = '%s/statuses/retweeted_by_me.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
if count:
parameters['count'] = count
if since_id:
parameters['since_id'] = since_id
if include_entities:
parameters['include_entities'] = True
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetReplies(self, since=None, since_id=None, page=None):
'''Get a sequence of status messages representing the 20 most
recent replies (status updates prefixed with @twitterID) to the
authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
since:
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
url = '%s/statuses/replies.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetRetweets(self, statusid):
'''Returns up to 100 of the first retweets of the tweet identified
by statusid
Args:
statusid:
The ID of the tweet for which retweets should be searched for
Returns:
A list of twitter.Status instances, which are retweets of statusid
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instsance must be authenticated.")
url = '%s/statuses/retweets/%s.json?include_entities=true&include_rts=true' % (self.base_url, statusid)
parameters = {}
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetFriends(self, user=None, cursor=-1):
'''Fetch the sequence of twitter.User instances, one for each friend.
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not user and not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = '%s/statuses/friends/%s.json' % (self.base_url, user)
else:
url = '%s/statuses/friends.json' % self.base_url
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(x) for x in data['users']]
def GetFriendIDs(self, user=None, cursor=-1):
'''Returns a list of twitter user id's for every person
the specified user is following.
Args:
user:
The id or screen_name of the user to retrieve the id list for
[Optional]
Returns:
A list of integers, one for each user id.
'''
if not user and not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = '%s/friends/ids/%s.json' % (self.base_url, user)
else:
url = '%s/friends/ids.json' % self.base_url
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return data
def GetFollowerIDs(self, userid=None, cursor=-1):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each follower
'''
url = '%s/followers/ids.json' % self.base_url
parameters = {}
parameters['cursor'] = cursor
if userid:
parameters['user_id'] = userid
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return data
def GetFollowers(self, cursor=-1):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Args:
cursor:
Specifies the Twitter API Cursor location to start at. [Optional]
Note: there are pagination limits.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/statuses/followers.json' % self.base_url
result = []
while True:
parameters = { 'cursor': cursor }
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
break
return result
def GetFeatured(self):
'''Fetch the sequence of twitter.User instances featured on twitter.com
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances
'''
url = '%s/statuses/featured.json' % self.base_url
json = self._FetchUrl(url)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(x) for x in data]
def UsersLookup(self, user_id=None, screen_name=None, users=None):
'''Fetch extended information for the specified users.
Users may be specified either as lists of either user_ids,
screen_names, or twitter.User objects. The list of users that
are queried is the union of all specified parameters.
The twitter.Api instance must be authenticated.
Args:
user_id:
A list of user_ids to retrieve extended information.
[Optional]
screen_name:
A list of screen_names to retrieve extended information.
[Optional]
users:
A list of twitter.User objects to retrieve extended information.
[Optional]
Returns:
A list of twitter.User objects for the requested users
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if not user_id and not screen_name and not users:
raise TwitterError("Specify at least on of user_id, screen_name, or users.")
url = '%s/users/lookup.json' % self.base_url
parameters = {}
uids = list()
if user_id:
uids.extend(user_id)
if users:
uids.extend([u.id for u in users])
if len(uids):
parameters['user_id'] = ','.join(["%s" % u for u in uids])
if screen_name:
parameters['screen_name'] = ','.join(screen_name)
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(u) for u in data]
def GetUser(self, user):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user: The twitter name or id of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show/%s.json' % (self.base_url, user)
json = self._FetchUrl(url)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since=None, since_id=None, page=None):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, user, text):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated.
Args:
user: The ID or screen name of the recipient user.
text: The message text to be posted. Must be less than 140 characters.
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/direct_messages/new.json' % self.base_url
data = {'text': text, 'user': user}
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = '%s/direct_messages/destroy/%s.json' % (self.base_url, id)
json = self._FetchUrl(url, post_data={'id': id})
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user):
'''Befriends the user specified in the user parameter as the authenticating user.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user to befriend.
Returns:
A twitter.User instance representing the befriended user.
'''
url = '%s/friendships/create/%s.json' % (self.base_url, user)
json = self._FetchUrl(url, post_data={'user': user})
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user):
'''Discontinues friendship with the user specified in the user parameter.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user with whom to discontinue friendship.
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = '%s/friendships/destroy/%s.json' % (self.base_url, user)
json = self._FetchUrl(url, post_data={'user': user})
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status):
'''Favorites the status specified in the status parameter as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status instance to mark as a favorite.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = '%s/favorites/create/%s.json' % (self.base_url, status.id)
json = self._FetchUrl(url, post_data={'id': status.id})
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status):
'''Un-favorites the status specified in the ID parameter as the authenticating user.
Returns the un-favorited status in the requested format when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status to unmark as a favorite.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = '%s/favorites/destroy/%s.json' % (self.base_url, status.id)
json = self._FetchUrl(url, post_data={'id': status.id})
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetFavorites(self,
user=None,
page=None):
'''Return a list of Status objects representing favorited tweets.
By default, returns the (up to) 20 most recent tweets for the
authenticated user.
Args:
user:
The twitter name or id of the user whose favorites you are fetching.
If not specified, defaults to the authenticated user. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
'''
parameters = {}
if page:
parameters['page'] = page
if user:
url = '%s/favorites/%s.json' % (self.base_url, user)
elif not user and not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = '%s/favorites.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetMentions(self,
since_id=None,
max_id=None,
page=None):
'''Returns the 20 most recent mentions (status containing @twitterID)
for the authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than
(that is, older than) the specified ID. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
Returns:
A sequence of twitter.Status instances, one for each mention of the user.
'''
url = '%s/statuses/mentions.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def CreateList(self, user, name, mode=None, description=None):
'''Creates a new list with the give name
The twitter.Api instance must be authenticated.
Args:
user:
Twitter name to create the list for
name:
New name for the list
mode:
'public' or 'private'.
Defaults to 'public'. [Optional]
description:
Description of the list. [Optional]
Returns:
A twitter.List instance representing the new list
'''
url = '%s/%s/lists.json' % (self.base_url, user)
parameters = {'name': name}
if mode is not None:
parameters['mode'] = mode
if description is not None:
parameters['description'] = description
json = self._FetchUrl(url, post_data=parameters)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroyList(self, user, id):
'''Destroys the list from the given user
The twitter.Api instance must be authenticated.
Args:
user:
The user to remove the list from.
id:
The slug or id of the list to remove.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/%s/lists/%s.json' % (self.base_url, user, id)
json = self._FetchUrl(url, post_data={'_method': 'DELETE'})
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def CreateSubscription(self, owner, list):
'''Creates a subscription to a list by the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner:
User name or id of the owner of the list being subscribed to.
list:
The slug or list id to subscribe the user to
Returns:
A twitter.List instance representing the list subscribed to
'''
url = '%s/%s/%s/subscribers.json' % (self.base_url, owner, list)
json = self._FetchUrl(url, post_data={'list_id': list})
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroySubscription(self, owner, list):
'''Destroys the subscription to a list for the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner:
The user id or screen name of the user that owns the
list that is to be unsubscribed from
list:
The slug or list id of the list to unsubscribe from
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/%s/%s/subscribers.json' % (self.base_url, owner, list)
json = self._FetchUrl(url, post_data={'_method': 'DELETE', 'list_id': list})
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def GetSubscriptions(self, user, cursor=-1):
'''Fetch the sequence of Lists that the given user is subscribed to
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/%s/lists/subscriptions.json' % (self.base_url, user)
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetLists(self, user, cursor=-1):
'''Fetch the sequence of lists for a user.
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user whose friends you are fetching.
If the passed in user is the same as the authenticated user
then you will also receive private list data.
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/%s/lists.json' % (self.base_url, user)
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetUserByEmail(self, email):
'''Returns a single user by email address.
Args:
email:
The email of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show.json?email=%s' % (self.base_url, email)
json = self._FetchUrl(url)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def VerifyCredentials(self):
'''Returns a twitter.User instance if the authenticating user is valid.
Returns:
A twitter.User instance representing that user if the
credentials are valid, None otherwise.
'''
if not self._oauth_consumer:
raise TwitterError("Api instance must first be given user credentials.")
url = '%s/account/verify_credentials.json' % self.base_url
try:
json = self._FetchUrl(url, no_cache=True)
except urllib2.HTTPError, http_error:
if http_error.code == httplib.UNAUTHORIZED:
return None
else:
raise http_error
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache:
An instance that supports the same API as the twitter._FileCache
'''
if cache == DEFAULT_CACHE:
self._cache = _FileCache()
else:
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib:
An instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout:
Time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent:
A string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def GetRateLimitStatus(self):
'''Fetch the rate limit status for the currently authorized user.
Returns:
A dictionary containing the time the limit will reset (reset_time),
the number of remaining hits allowed before the reset (remaining_hits),
the number of hits allowed in a 60-minute period (hourly_limit), and
the time of the reset in seconds since The Epoch (reset_time_in_seconds).
'''
url = '%s/account/rate_limit_status.json' % self.base_url
json = self._FetchUrl(url, no_cache=True)
data = self._ParseAndCheckTwitter(json)
return data
def MaximumHitFrequency(self):
'''Determines the minimum number of seconds that a program must wait
before hitting the server again without exceeding the rate_limit
imposed for the currently authenticated user.
Returns:
The minimum second interval that a program must use so as to not
exceed the rate_limit imposed for the user.
'''
rate_status = self.GetRateLimitStatus()
reset_time = rate_status.get('reset_time', None)
limit = rate_status.get('remaining_hits', None)
if reset_time:
# put the reset time into a datetime object
reset = datetime.datetime(*rfc822.parsedate(reset_time)[:7])
# find the difference in time between now and the reset time + 1 hour
delta = reset + datetime.timedelta(hours=1) - datetime.datetime.utcnow()
if not limit:
return int(delta.seconds)
# determine the minimum number of seconds allowed as a regular interval
max_frequency = int(delta.seconds / limit) + 1
# return the number of seconds
return max_frequency
return 60
def _BuildUrl(self, url, path_elements=None, extra_params=None):
# Break url into consituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _DecompressGzippedResponse(self, response):
raw_data = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
url_data = gzip.GzipFile(fileobj=StringIO.StringIO(raw_data)).read()
else:
url_data = raw_data
return url_data
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _ParseAndCheckTwitter(self, json):
"""Try and parse the JSON returned from Twitter and return
an empty dictionary if there is any error. This is a purely
defensive check because during some Twitter network outages
it will return an HTML failwhale page."""
try:
data = simplejson.loads(json)
self._CheckForTwitterError(data)
except ValueError:
if "<title>Twitter / Over capacity</title>" in json:
raise TwitterError("Capacity Error")
if "<title>Twitter / Error</title>" in json:
raise TwitterError("Technical Error")
raise TwitterError("json decoding")
return data
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data:
A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
if 'errors' in data:
raise TwitterError(data['errors'])
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None,
use_gzip_compression=None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url:
The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs.
If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [Optional]
no_cache:
If true, overrides the cache on the current request
use_gzip_compression:
If True, tells the server to gzip-compress the response.
It does not apply to POST requests.
Defaults to None, which will get the value to use from
the instance variable self._use_gzip [Optional]
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
if post_data:
http_method = "POST"
else:
http_method = "GET"
if self._debugHTTP:
_debug = 1
else:
_debug = 0
http_handler = self._urllib.HTTPHandler(debuglevel=_debug)
https_handler = self._urllib.HTTPSHandler(debuglevel=_debug)
opener = self._urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
if use_gzip_compression is None:
use_gzip = self._use_gzip
else:
use_gzip = use_gzip_compression
# Set up compression
if use_gzip and not post_data:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
if self._oauth_consumer is not None:
if post_data and http_method == "POST":
parameters = post_data.copy()
req = oauth.Request.from_consumer_and_token(self._oauth_consumer,
token=self._oauth_token,
http_method=http_method,
http_url=url, parameters=parameters)
req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
else:
url = self._BuildUrl(url, extra_params=extra_params)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
opener.close()
else:
# Unique keys are a combination of the url and the oAuth Consumer Key
if self._consumer_key:
key = self._consumer_key + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
try:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
self._cache.Set(key, url_data)
except urllib2.HTTPError, e:
print e
opener.close()
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self,root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self,key,data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self,key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory ))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self,key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self,hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
| Python |
import socket
import struct
import time
import select
import os
MULTICAST_GROUP_ADDR="239.255.42.42"
UDP_PORT=10042
MAX_PACKET=2048
MAGIC_BYTES="FCst"
TICK_TIME=3.0 # seconds
MAX_NODE_AGE=12.0 # seconds
class NodeInfo:
def __init__(self):
self.nodeid="not set yet"
self.lastseen=0 # epoch
self.addr='0.0.0.0'
self.isme=0
self.up=1
class NodeMetadata:
# nodeid
def __init__(self):
self.nodeid="not set yet"
self.nodesById = { }
self.nodesByAddr = { }
pass
def startup(self):
# Load the node id from a file...
try:
f = open("nodeid.dat")
self.nodeid = f.read()
f.close
except IOError:
self.newid()
# Chop the \n off the nodeid, if present
if (self.nodeid[-1] == "\n"):
self.nodeid = self.nodeid[0:-1]
print "Node ID=[%s]" % self.nodeid
# Add our own node.
mynode = NodeInfo()
mynode.isme=1
mynode.nodeid=self.nodeid
self.addNode(mynode)
def newid(self):
p = os.popen("uuidgen")
self.nodeid = p.read()
p.close()
f = open("nodeid.dat", mode="w")
f.write(self.nodeid)
f.close()
def addNode(self,node):
self.nodesByAddr[node.addr] = node
self.nodesById[node.nodeid] = node
metadata = NodeMetadata()
class MultiAnnouncer:
def makesock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1)
sock.bind(('', UDP_PORT))
mreq = struct.pack("4sl", socket.inet_aton(MULTICAST_GROUP_ADDR), socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
self.sock = sock
def run(self):
while (1):
for i in xrange(1,4):
self.sendpacket()
self.tickwait()
self.expirenodes()
def tickwait(self):
tickstart = time.time()
tickend = tickstart + TICK_TIME
now = tickstart
while (now < tickend):
(readysocks,junk1,junk2) = select.select( [self.sock], [], [], (tickend - now))
if (len(readysocks) > 0):
self.handlepacket()
now = time.time()
def sendpacket(self):
data=":".join(
[MAGIC_BYTES,"hello",metadata.nodeid])
flags=0
self.sock.sendto(data, flags, (MULTICAST_GROUP_ADDR, UDP_PORT))
def handlepacket(self):
(data,addr) = self.sock.recvfrom(MAX_PACKET)
# Check magic.
if (data[0:len(MAGIC_BYTES)] != MAGIC_BYTES):
return
# Check colon.
if (data[len(MAGIC_BYTES)] != ':'):
return
# split into fields
(magic, command, cmddata) = data.split(":",3)
# Decide what to do depending on command
if (command == 'hello'):
self.handlehello(addr, cmddata)
# unknown cmd
def handlehello(self,addr,cmddata):
(ipaddr,port) = addr
nodeid = cmddata
#if (nodeid == metadata.nodeid) :
# # It's us.
# return
# Find the node...
if (nodeid in metadata.nodesById):
# Already got it.
# Update IP address and last seen time
existingnode = metadata.nodesById[nodeid]
if (existingnode.addr != ipaddr):
print "Node %s changed IP address" % nodeid
# Update IP addr
# Remove old pointer
del metadata.nodesByAddr[existingnode.addr]
existingnode.addr = ipaddr
# add new pointer
metadata.nodesByAddr[ipaddr] = existingnode
# Update last seen time
existingnode.lastseen = time.time()
existingnode.up = 1
else:
newnode = NodeInfo()
newnode.nodeid = nodeid
newnode.addr = ipaddr
newnode.lastseen = time.time()
metadata.addNode(newnode)
print "New node: %s" % nodeid
def expirenodes(self):
now = time.time()
totalnodes=0
upnodes=0
for id in metadata.nodesById:
node = metadata.nodesById[id]
if (id == metadata.nodeid):
# it's me
pass
else:
age = now - node.lastseen
# check expiry
if (age > MAX_NODE_AGE):
# Mark node down.
node.up=0
totalnodes = totalnodes + 1
if (node.up):
upnodes = upnodes + 1
print "Nodes up: %d / %d " % (upnodes, totalnodes)
def main():
metadata.startup()
m = MultiAnnouncer()
m.makesock()
m.run()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
import os
import fluidity
from distutils.core import setup
from textwrap import dedent
DATA_DIR = os.path.join("share", "fluidity")
DOCS_DIR = os.path.join("share", "doc", "fluidity-" + fluidity.__version__)
def build_data_files():
data_files = [(os.path.join("share", "applications"), ["fluidity.desktop"]), ]
docs = ['AUTHORS', 'ChangeLog', 'FAIL-METER', 'LICENSE', 'NEWS',
'COPYING', 'README', 'INSTALL', 'THANKS']
data_files.append((DOCS_DIR, docs))
misc_data = []
for fname in os.listdir('data'):
misc_data.append(os.path.join('data', fname))
data_files.append((DATA_DIR, misc_data))
for size in ('16', '24', '32', '48'):
data_files.append(
('share/icons/hicolor/{0}x{0}/apps'.format(size),
['icons/hicolor/{0}x{0}/apps/fluidity.png'.format(size)]))
return data_files
setup(
name = 'Fluidity',
description = "Black belt GTD for Linux",
long_description = (dedent("""\
Black belt GTD for Linux
Before you get your hopes up, I should warn you, Fluidity is not
really for The Cool Kids:
* It's not Web-based (2.0 or otherwise). At all.
* It doesn't sync with Remember The Milk.
* It doesn't support arbitrary tagging.
* It doesn't do Javascript plugins.
* It doesn't integrate with Twitter.
On the other hand, if...:
* you are serious about getting to the "mind like water" state,
through a complete, painless-as-possible GTD system
* you have ever spent an hour or more trying to process ALL your
inboxes (like the sign says, this is for *black belts*, people!)
* you have gotten frustrated with the nitpicky details of shuffling
projects between your Active and Someday/Maybe lists while still
keeping your system current and air-tight
* you have actually read/listened to all of David Allen's "Getting
Things Done" and have a reasonable understanding of it, or are
working on getting there (if you don't understand what a project
or next action really mean in GTD terms, Fluidity might seem a bit
overwhelming.)
...then Fluidity might just be what you're looking for. If you like it,
or have any constructive feedback, I would love to hear it.""")),
url = 'http://fluidity.googlecode.com',
author = 'Jens Knutson',
author_email = 'jens.knutson@gmail.com',
license = "GPLv3+",
version = fluidity.__version__,
keywords = "GTD gnome gtk pygtk productivity office organization",
classifiers = ['Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Desktop Environment :: File Managers',
'Topic :: Desktop Environment :: Gnome',
'Topic :: Office/Business',],
packages = ['fluidity', 'fluidity.ui', 'fluidity.tools'],
scripts = ["bin/fluidity", "bin/slider"],
provides = ["fluidity"],
data_files = build_data_files()
)
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
__author__ = 'Jens Knutson'
import os
if os.path.exists('.pathhack.py'):
import imp
imp.load_module('pathhack', *imp.find_module('.pathhack'))
from fluidity import main_app
main_app.run()
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Misc. functions for use throughout Fluidity."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import datetime
import os
import string #pylint: disable-msg:W0402
from fluidity import defs
def format_for_dict_key(orig_string):
"""Return a cleaned up version of `orig_string` which will work as a dict key.
In line with Python's rules & guidelines about dict key naming, orig_string
is stripped of everything but lowercase ascii characters a-z & digits 0-9,
with spaces converted to underscores.
"""
new = str(orig_string).decode('ascii', 'ignore').lower()
new = "".join([i for i in new if i in defs.SANITARY_CHARS])
new = new.replace(' ', '_')
new = "_" + new if new[0] in string.digits else new
return new
def invert_dict(orig):
zippy = zip(orig.values(), orig.keys())
return dict(zippy)
def log_line(message, msg_datetime=None, path=defs.LOG_FILE_PATH, debug=False):
"""Write `message` to log file at `path` with a timestemp of `msg_time`.
Not intended for recording tracebacks; that's what ABRT is for. ;P
"""
if not debug or os.getenv('USER') in ('jknutson', 'jensck'):
if not msg_datetime:
msg_datetime = datetime.datetime.now()
timestamp = str(msg_datetime).split('.')[0]
log_msg = timestamp + " -- " + message + "\n"
with open(path, 'a') as log_file:
log_file.write(log_msg)
def validate_paths():
# main_prj_support_folder must be first in the list below - have to ensure
# the main folder is created before trying to create the others
for p in (defs.ALL_DATA_FOLDERS):
if not os.path.exists(p):
os.mkdir(p)
if not os.path.exists(p):
# FIXME: LAME! Should define a proper exception instead
raise Exception("Could not create folder {0}".format(p))
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import shutil
import sys
import time
import yaml
from textwrap import dedent
ORIGINAL_WORKING_DIR = os.path.abspath(os.curdir)
# no idea why these are both needed, but I have seen both of them break,
# depending on the situation. bah.
sys.path.append("/home/jensck/workspace/Fluidity")
import fluidity.defs as defs
import fluidity.gee_tee_dee # pylint: disable-msg=W0611
DEFAULT_FILE_BASE_NAME = 'fity_data'
DEFAULT_YAML_FNAME = DEFAULT_FILE_BASE_NAME + '.yaml'
DEFAULT_PKL_FNAME = DEFAULT_FILE_BASE_NAME + '.pkl'
DEFAULT_YAML_PATH, DEFAULT_PKL_PATH = [os.path.join(ORIGINAL_WORKING_DIR, name)
for name in
DEFAULT_YAML_FNAME, DEFAULT_PKL_FNAME]
COWARDLY_REFUSAL = ("{0} found in current folder: "
"cowardly refusing to overwrite it.")
USAGE_MESSAGE = dedent("""\
Usage:
fity_data_debug_converter.py [fity_data.ext]
If fity_data.ext is a yaml file, you'll get back a pkl in your current working
directory, or vice versa.
SPECIAL CASES:
If you feed it a yaml file, and $HOME/.local/share/fluidity/fluidity.pkl
doesn't exist, the resulting pickle file will get dumped to that latter
path instead of the current working directory.
If you give it no args, you'll get $HOME/.local/share/fluidity/fluidity.pkl
copied as a yaml file to your current working directory (provided there
isn't already one there).
""")
def back_up_fity_data_file():
backup_path = os.path.join(defs.BACKUPS_PATH,
defs.USER_DATA_MAIN_FNAME + str(time.time()))
shutil.copy(defs.USER_DATA_MAIN_FILE, backup_path)
def fail():
print(USAGE_MESSAGE)
sys.exit(1)
def convert_fity_data_file(orig_path, new_path, delete_original=False):
"""Convert orig_path YAML to pickle or vice-versa, unless new_path exists."""
if os.path.exists(new_path):
print(COWARDLY_REFUSAL.format(new_path))
# print "well, we got this far...."
if orig_path.endswith('.yaml') and new_path.endswith('.pkl'):
with open(orig_path, 'r') as orig_file:
fity_data = yaml.load(orig_file, Loader=yaml.CLoader)
with open(new_path, 'w') as new_file:
pickle.dump(fity_data, new_file, protocol=pickle.HIGHEST_PROTOCOL)
elif orig_path.endswith('.pkl') and new_path.endswith('.yaml'):
with open(orig_path, 'r') as orig_file:
fity_data = pickle.load(orig_file)
with open(new_path, 'w') as new_file:
yaml.dump(fity_data, new_file, Dumper=yaml.CDumper,
default_flow_style=False)
else:
fail()
if delete_original:
os.remove(orig_path)
def main():
yaml_exists, fity_data_exists = [os.path.exists(p) for p in
DEFAULT_YAML_PATH, defs.USER_DATA_MAIN_FILE]
orig, new = None, None
delete_orig = False
if len(sys.argv) == 1:
if not fity_data_exists or yaml_exists:
fail()
else:
orig = defs.USER_DATA_MAIN_FILE
new = DEFAULT_YAML_PATH
elif len(sys.argv) == 2:
orig = sys.argv[1]
base = "".join(orig.split('.')[:-1])
orig = os.path.join(ORIGINAL_WORKING_DIR, orig)
if orig.endswith('.yaml'):
if not fity_data_exists:
new = defs.USER_DATA_MAIN_FILE
delete_orig = True
ext = '.pkl'
else:
ext = '.yaml'
if not new:
new = os.path.join(ORIGINAL_WORKING_DIR, base + ext)
else:
fail()
if fity_data_exists:
back_up_fity_data_file()
# print orig, new, delete_orig
convert_fity_data_file(orig, new, delete_orig)
if __name__ == '__main__':
main() | Python |
#!/usr/bin/python -O
#-*- coding:utf-8 -*-
# ZOMG, a time when -O might not be total rice!
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = "Jens Knutson"
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import sys
QUERY_STRINGS = ['.', 'nvr', 'doug', 'mcc', 'jason', 'parking', 'camera']
RESULTS_REPORT_PATH = "data_hunter_report.txt"
def find_pickles(root):
for i in os.walk(root):
for filename in i[2]:
if '.pkl' in filename and not "singletons" in filename:
yield os.path.join(i[0], filename)
def pull_notes_from_pickle(pkl_path):
with open(pkl_path, 'r') as pkl:
notes = pickle.load(pkl)['single_notes']
# SOOOOooo not in the mood for case sensitivity issues
return (n['summary'].lower().strip('.') for n in notes)
def search_notes(notes, queries):
found = set()
for n in notes:
for q in queries:
if q in n:
found.add(n)
print("|", end="")
return found
def main(search_root):
pickles_to_search = []
basenames = []
print("Finding pkl files")
for pklpath in find_pickles(search_root):
base = os.path.basename(pklpath)
if base not in basenames:
pickles_to_search.append(pklpath)
basenames.append(base)
print(".", end="")
print("\nReading in data from pkl files...")
notes = set()
for search_pickle in pickles_to_search:
print(".", end="")
for pnote in pull_notes_from_pickle(search_pickle):
notes.add(pnote)
print("\nNow searching notes...\nMatches found: ", end="")
results = search_notes(notes, QUERY_STRINGS)
print("\nDone -- writing report.")
with open(RESULTS_REPORT_PATH, 'w') as report:
report.write("\n".join(sorted(results)))
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.exit("fity_data_hunter.py takes 1 and only 1 argument: a root "
"path to be searched for pkl files.")
else:
# import cProfile
# cProfile.run("main(sys.argv[1])", 'profile.out')
main(sys.argv[1]) | Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Resets the age attribute on each Next Action for all active Projects."""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = 'Jens Knutson'
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
from fluidity import defs
def main():
with open(defs.USER_DATA_MAIN_FILE, 'r') as pkl:
top_data = pickle.load(pkl)
for prj in [p for p in top_data['projects'].values() if p.status == 'active']:
for na in prj.next_actions:
if not na.complete:
na.creation_date = datetime.datetime.now()
with open(defs.USER_DATA_MAIN_FILE, 'w') as pkl:
pickle.dump(top_data, pkl, pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
# include in this a total of the time estimates!
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
import operator
import os
import sys
import gtk
from fluidity import defs
from fluidity import gee_tee_dee #pylint: disable-msg=W0611
def copy_to_clipboard(text):
clipboard = gtk.clipboard_get()
clipboard.set_text(text)
clipboard.store()
clipboard = None
def create_overall_summary(nas_completed_today):
overall_summary = ""
time_total = 0
na_format = "{0} -- {1} mins\n"
nas_completed_today.sort(key=operator.attrgetter('summary'))
if len(nas_completed_today) > 0:
for na in nas_completed_today:
overall_summary += na_format.format(na.summary, na.time_est)
time_total += na.time_est
mins = int(time_total % 60)
mins = "" if mins == 0 else ", {0} mins".format(mins)
time_summary = "{0} hours{1}".format(int(time_total // 60), mins)
overall_summary += "Total (estimated) time: " + time_summary
else:
overall_summary = "(No completed NAs found for this date)"
return overall_summary
def get_nas_completed_on_date(archived_nas, completion_date):
results = []
for na in archived_nas:
if na.completion_date == completion_date:
results.append(na)
return results
def get_parsed_date(date):
if date in ('y', 'yest', 'yesterday'):
return datetime.date.today() - datetime.timedelta(1)
else:
split = [int(x) for x in date.split('-')]
if len(split) != 3:
print("You screwed up, boss. I need dates as YYYY-MM-DD.")
sys.exit(1)
return datetime.date(split[0], split[1], split[2])
def load_data_files(path):
with open(path, 'r') as pkl:
data = pickle.load(pkl)
return data
def main():
if len(sys.argv) == 2:
today = get_parsed_date(sys.argv[1])
else:
today = datetime.date.today()
archived_path = os.path.join(defs.USER_DATA_PATH,
defs.ARCHIVED_SINGLETONS_FNAME.format(""))
top = load_data_files(defs.USER_DATA_MAIN_FILE)
archived = load_data_files(archived_path)
nas_completed_today = []
for prj in top['projects'].values():
nas_completed_today += get_nas_completed_on_date(prj.next_actions, today)
nas_completed_today += get_nas_completed_on_date(archived, today)
na_summaries = create_overall_summary(nas_completed_today)
copy_to_clipboard(na_summaries)
#print na_summaries
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = 'Jens Knutson'
import cPickle as pickle
import datetime
import os
import sys
import uuid
import yaml
import fluidity
from collections import namedtuple
GTD_NT = namedtuple("GTDDefaults", "default proper_type convert_func")
_DEFAULTS = {
'_completion_date': GTD_NT(None, datetime.datetime,
datetime.datetime.fromordinal),
'_creation_date': GTD_NT(datetime.datetime(2010, 1, 1, 0, 0), datetime.datetime,
datetime.datetime.fromordinal),
'_due_date': GTD_NT(None, datetime.date, datetime.date.fromordinal),
'_queue_date': GTD_NT(None, datetime.date, datetime.date.fromordinal),
'_priority': GTD_NT(2, int, None)}
NA_DEFAULTS = {'_complete': GTD_NT(False, bool, None),
'_context': GTD_NT("", basestring, None),
'_energy_est': GTD_NT(1, int, None),
'_notes': GTD_NT(None, basestring, None),
'_time_est': GTD_NT(10.0, float, float),
'_url': GTD_NT(None, basestring, None)}
PRJ_DEFAULTS = {
'_aofs': GTD_NT([], list, None),
'_incubating_next_actions': GTD_NT([], list, None),
'_next_actions': GTD_NT([], list, None),
'_status': GTD_NT('active', str, None),
'_subprojects': GTD_NT([], list, None),
'_waiting_for_since': GTD_NT(None, datetime.date,
datetime.date.fromordinal),
'waiting_for_text': GTD_NT(None, str, None)}
NA_DEFAULTS.update(_DEFAULTS)
PRJ_DEFAULTS.update(_DEFAULTS)
def dump(data, path, overwrite=False):
"""Format should be a string, either 'yaml' or 'pkl'."""
ext = os.path.splitext(path)[1].strip('.')
dumpers = {
'yaml': lambda d, s: yaml.dump(d, s, yaml.CDumper, default_flow_style=False),
'pkl': lambda d, s: pickle.dump(d, s, protocol=pickle.HIGHEST_PROTOCOL)}
if not overwrite and os.path.exists(path):
raise IOError("File already exists; cowardly refusing to overwrite")
else:
print("preparing to dump file in format {0}...".format(ext))
with open(path, 'w') as dfile:
dumpers[ext](data, dfile)
def get_dump_path(orig_path):
base, ext = os.path.splitext(orig_path)
new_ext = {'.yaml': '.pkl', '.pkl': '.yaml'}[ext]
return base + new_ext
def load(path):
"""Format should be either 'yaml' or 'pkl'."""
ext = os.path.splitext(path)[1].strip('.')
loaders = {'yaml': lambda stream: yaml.load(stream, Loader=yaml.CLoader),
'pkl': lambda stream: pickle.load(stream)}
with open(path, 'r') as lfile:
data = loaders[ext](lfile)
return data
def update_attrs(top):
for na in top['queued_singletons']:
_update_obj_attrs(na, NA_DEFAULTS)
print("Done processing queued_singletons...")
for i, prj in enumerate(top['projects'].values()):
if not i % 30:
print("Currently processing prj #{0}".format(i))
_update_obj_attrs(prj, PRJ_DEFAULTS)
big_na_list = prj.next_actions + prj.incubating_next_actions
for na in big_na_list:
_update_obj_attrs(na, NA_DEFAULTS)
def _update_obj_attrs(obj, defaults):
for attr, ntuple in defaults.items():
if hasattr(obj, attr):
value = obj.__getattribute__(attr)
if value and not isinstance(value, ntuple.proper_type):
obj.__setattr__(attr, ntuple.convert_func(value))
else:
obj.__setattr__(attr, ntuple.default)
# and give it a UUID
_uuid_me(obj)
def _uuid_me(obj):
"""UUID me.
You want me to.... give you a UUID?
UUID me.
Uhm, I'm sorry sir, I'm not sur...
UUID me.
"""
obj.__setattr__('uuid', str(uuid.uuid4()))
def main():
if len(sys.argv) < 2:
sys.exit("I need a file to operate on...")
else:
path = sys.argv[1]
path = path if path.startswith('/') else os.path.realpath(path)
top_data = load(path)
update_attrs(top_data)
dump(top_data, get_dump_path(path))
print("DONE.")
if __name__ == '__main__':
main()
| Python |
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
#pylint: disable-msg=W0201
"""Collection of "manager" classes, which handle disparate aspects of Fluidity."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
import glob
import operator
import os
import shutil
import time
import gio
import gobject
import pango
import yaml
from kiwi.ui.objectlist import Column
from xdg import BaseDirectory
from fluidity import defs
from fluidity import gee_tee_dee
from fluidity import inbox_items
from fluidity import magic_machine
from fluidity import utils
from fluidity.first_time import FirstTimeBot
from fluidity.note import ProjectNote
class DataManager(object):
def __init__(self):
self.pickle_path = defs.USER_DATA_MAIN_FILE
#first, make sure we have our data file - if not, invoke FirstTimeBot
if (not os.path.exists(self.pickle_path) or
not os.path.exists(defs.NOTE_SLIDER_FOLDER)):
bot = FirstTimeBot()
bot.create_initial_files_and_paths()
del(bot) # Thank you for your service, bot. Rest in peace.
try:
with open(self.pickle_path, 'r') as pfile:
self.top_data = pickle.load(pfile)
except EOFError:
# probably the main app in the middle of saving its file.
# Wait a couple seconds, then try again.
time.sleep(2)
# If it _still_ fails, something is really screwed - not
# accommodating this, at least not yet.
with open(self.pickle_path, 'r') as pfile:
self.top_data = pickle.load(pfile)
self.aofs = self.top_data['areas_of_focus']
self.prjs = self.top_data['projects']
self.single_notes = self.top_data['single_notes']
self.queued_singletons = self.top_data['queued_singletons']
# I would have called it _file_lickspittle, but that's too verbose
# even for *me*.
self._file_toady = FileSystemManager()
self._magic_maker = magic_machine.MagicMachine()
self.rebuild_aof_cache()
# PUBLIC METHODS
def activate_due_queued(self):
utils.log_line("Running activate_due_queued()", datetime.datetime.now())
for p in self.prjs:
prj = self.prjs[p]
if prj.status == "queued":
# FIXME: if the status is queued, we should /always/ have a
# queue date. What's the fallback?
if prj.queue_date:
if datetime.date.today() >= prj.queue_date:
self.change_project_status(prj, "active")
for na in self.queued_singletons:
if na.queue_date <= datetime.date.today():
self.prjs['singletons'].next_actions.append(na)
self.queued_singletons.remove(na)
def activate_nas(self, nas, prj_key):
"""Move the given NextActions to the Project's next_actions list"""
for na in nas:
self.prjs[prj_key].next_actions.append(na)
self.prjs[prj_key].incubating_next_actions.remove(na)
def add_na_to_prj(self, na, prj_key):
self.prjs[prj_key].next_actions.append(na)
def add_queued_singleton(self, na):
self.queued_singletons.append(na)
self.save_data()
#FIXME: total crap. fix later.
def archive_completed_singletons(self):
# the .format("") below is on purpose - look at the path for
# defs.USER_DATA_PATH in your filesystem, it'll make more sense.
pkl_path = os.path.join(defs.USER_DATA_PATH,
defs.ARCHIVED_SINGLETONS_FNAME.format(""))
try:
with open(pkl_path, 'r') as pkl_read:
nas_to_archive = pickle.load(pkl_read)
now = datetime.datetime.now().strftime(
defs.ARCHIVED_SINGLETONS_TIME_TMPLT)
# back up the old data file, just in case...
backup_file_name = defs.ARCHIVED_SINGLETONS_FNAME.format(now)
shutil.copy2(pkl_path, os.path.join(defs.BACKUPS_PATH, backup_file_name))
except IOError:
nas_to_archive = []
singletons = self.prjs['singletons'].next_actions
for na in singletons:
if na.complete:
nas_to_archive.append(na)
for na in nas_to_archive:
if na in singletons:
singletons.remove(na)
with open(pkl_path, 'wb') as pkl_write:
pickle.dump(nas_to_archive, pkl_write, pickle.HIGHEST_PROTOCOL)
self.save_data()
def autosave(self):
# FIXME: ZOMG this is so ghetto-tastic. fix it. srsly.
self.save_data()
return True
def change_project_status(self, prj, new_status, queue_date=None):
self._file_toady.move_project_folder(prj.summary, prj.status, new_status)
prj_ = prj
note = ProjectNote(prj=prj_)
note.change_prj_status(new_status)
if new_status == "queued":
prj.queue_date = queue_date
prj.status = new_status
self.save_data()
def cleanup_before_exit(self):
self.save_data()
def copy_to_project_folder(self, file_name, prj):
self._file_toady.copy_to_project_folder(file_name, prj.summary, prj.status)
def create_new_aof(self, new_name):
key_name = utils.format_for_dict_key(new_name)
self.aofs[key_name] = {'name': new_name, 'projects': []}
self.rebuild_aof_cache()
self.save_data()
return self.aofs
def delete_na(self, na, prj):
prj.next_actions.remove(na)
self.save_data()
def delete_prj(self, prj):
utils.log_line("Deleting project: " + str(prj), datetime.datetime.now())
# trash the folders first
self._file_toady.trash_project_folder(prj.summary, prj.status)
# then ditch the project notes
prj_ = prj
ProjectNote(prj=prj_).delete()
#this is /almost certainly/ The Hard Way...
for a in self.aofs.keys():
matches = []
# Welcome to my entry in the "Obfuscated Python" contest!
for p in xrange(len(self.aofs[a]['projects'])):
if self.aofs[a]['projects'][p] == prj.key_name:
matches.append({'aof': a, 'p_index': p})
for i in matches:
del(self.aofs[i['aof']]['projects'][i['p_index']])
del(self.prjs[prj.key_name])
self.save_data()
def delete_stuff_note(self, note_obj):
DUHLETED = False
i = 0
while not DUHLETED and i < len(self.single_notes):
if self.single_notes[i]['summary'] == note_obj.summary:
del(self.single_notes[i])
DUHLETED = True
i += 1
def dump_processed_stuff_notes(self, stuff_list):
# cull out the InboxFile items - unneeded.
real_list = []
for stuff in stuff_list:
if not isinstance(stuff, inbox_items.InboxFile):
real_list.append(stuff)
processed_path = \
os.path.join(defs.USER_DATA_PATH,
defs.PROCESSED_STUFF_FILE_NAME + str(time.time()))
with open(processed_path, 'wb') as pfile:
pickle.dump(real_list, pfile, pickle.HIGHEST_PROTOCOL)
gf = gio.File(processed_path)
gf.trash()
def file_stuff_as_read_review(self, stuff, rr_path):
stuff_path = os.path.split(stuff.path)[1]
shutil.move(stuff.path, os.path.join(rr_path, stuff_path))
def get_contexts(self):
contexts = []
for pk in self.prjs.keys():
p = self.prjs[pk]
if p.status == "active":
for na in p.next_actions:
if na.context != "" and na.context != None:
if not na.context in contexts:
contexts.append(na.context)
contexts.sort()
return contexts
def get_file_list_for_prj(self, prj):
return self._file_toady.get_file_list_for_prj(prj.summary, prj.status)
def get_inbox_files(self):
hiddens = os.path.join(defs.INBOX_FOLDER, ".hidden")
if os.path.exists(hiddens):
with open(hiddens, 'r') as dot_hidden:
hidden = dot_hidden.read()
else:
hidden = ""
hidden += "\n".join(defs.IGNORED_INBOX_PATHS)
for file_ in os.listdir(defs.INBOX_FOLDER):
if file_ not in hidden and not file_.startswith('.'):
yield inbox_items.InboxFile(os.path.join(defs.INBOX_FOLDER,
file_))
def get_inbox_notes(self):
return self.single_notes
def get_na_for_each_active_prj(self):
active_nas = []
for p in self.prjs.keys():
prj = self.prjs[p]
if prj.status == "active" and prj.summary != 'singletons':
for na in prj.next_actions:
if not na.complete:
active_nas.append(na)
break
for na in self.prjs['singletons'].next_actions:
if not na.complete:
active_nas.append(na)
return active_nas
def get_nas_for_prj(self, prj_key):
try:
return self.prjs[prj_key].next_actions
except AttributeError:
return None
def get_prj_aof_names(self, prj):
aof_list = []
if len(prj.aofs) == 0:
aof_list.append(defs.NO_AOF_ASSIGNED)
else:
for a in prj.aofs:
aof_list.append(self.aofs[a]['name'])
return sorted(aof_list)
def get_prjs_by_aof(self, area, review_filter):
prj_list = []
# "incomplete" is just used by Slider, so far"
if review_filter == "incomplete":
for p in sorted(self.prjs.keys()):
prj = self.prjs[p]
if prj.status != "completed":
prj_list.append(prj)
else:
if area == "All":
[prj_list.append(prj) for prj in self.prjs.values()
if prj.status == review_filter]
elif area == defs.NO_AOF_ASSIGNED:
for p in sorted(self.prjs.keys()):
prj = self.prjs[p]
if prj.status == review_filter and len(prj.aofs) == 0:
prj_list.append(prj)
else:
area_key = utils.format_for_dict_key(area)
if self.aofs[area_key]['projects']:
prj_keys = self.aofs[area_key]['projects']
# FIXME: this is hideous.
[prj_list.append(prj) for prj in self.prjs.values()
if prj.status == review_filter
and prj.key_name in prj_keys]
return sorted(prj_list, key=operator.attrgetter('summary'))
def get_project_folder_uri(self, prj):
return self._file_toady.get_project_folder_uri(prj.summary, prj.status)
def incubate_nas(self, nas, prj_key):
"""Move the given NextActions to the Project's incubating_next_actions."""
for na in nas:
self.prjs[prj_key].incubating_next_actions.append(na)
self.prjs[prj_key].next_actions.remove(na)
def queue_singleton_na(self, na, queue_date_str):
try:
self.prjs['singletons'].next_actions.remove(na)
na.queue_date = self._magic_maker.get_magic_date(queue_date_str)
self.add_queued_singleton(na)
except ValueError:
# don't freak out if someone tries queuing a NA that isn't in singletons
pass
def rebuild_aof_cache(self):
for aof in self.aofs:
del(self.aofs[aof]['projects'][:])
for prj in self.prjs.keys():
for aof_key in self.prjs[prj].aofs:
if prj not in self.aofs[aof_key]['projects']:
self.aofs[aof_key]['projects'].append(prj)
def remove_file_from_prj(self, file_name, prj):
self._file_toady.remove_from_project_folder(file_name, prj.summary,
prj.status)
def reparent_project(self, prj, new_parent):
"""Make `new_parent` the parent object of `prj`."""
new_parent.subprojects.append(prj.uuid)
prj.parent_project = new_parent.uuid
def save_data(self):
# utils.log_line("Saving main data file.", datetime.datetime.now())
backup_path = os.path.join(defs.BACKUPS_PATH,
defs.USER_DATA_MAIN_FNAME + str(time.time()))
shutil.copy(self.pickle_path, backup_path)
with open(self.pickle_path, 'wb') as pfile:
pickle.dump(self.top_data, pfile, pickle.HIGHEST_PROTOCOL)
return True
def search(self, query, include_completed=False, include_nas=False):
query = query.lower()
formatter = lambda x: "<b>{0}</b>".format(x) # pylint: disable-msg=W0108
results = []
for prj in self.prjs.values():
if include_nas and (include_completed or prj.status != 'completed'):
for na in prj.next_actions:
score = magic_machine.score(na.summary, query)
if score > 0.4:
# fuck me, this is ugly: "flat is better than nested."
summary_formatted = magic_machine.format_common_substrings(
na.summary, query,
format_match=formatter)
results.append(
SearchResult(na.summary, summary_formatted,
prj.key_name, score, na.uuid))
if include_completed:
score = magic_machine.score(prj.summary, query)
if score > 0.4:
formatted = magic_machine.format_common_substrings(
prj.summary, query, format_match=formatter)
results.append(SearchResult(prj.summary, formatted,
prj.key_name, score))
else:
if prj.status != 'completed':
score = magic_machine.score(prj.summary, query)
if score > 0.4:
formatted = magic_machine.format_common_substrings(
prj.summary, query,
format_match=formatter)
results.append(SearchResult(prj.summary, formatted,
prj.key_name, score))
results.sort(key=operator.attrgetter('score'), reverse=True)
return results
def set_prj_aofs(self, prj, aof_text):
if aof_text == defs.NO_AOF_ASSIGNED:
del(prj.aofs[:])
else:
for aof in self._parse_aof_text(aof_text):
del(prj.aofs[:])
if prj.key_name not in self.aofs[aof]['projects']:
self.aofs[aof]['projects'].append(prj.key_name)
prj.aofs.append(aof)
self.save_data()
# return self.get_prj_aof_names(prj)
def take_slider_items(self, na_list, note_list, queued_list):
self._take_these_fucking_nas(na_list)
self._take_these_fucking_notes(note_list)
self._take_these_fucking_queues(queued_list)
#Confirm that we made it to the step of saving
return self.save_data()
def _take_these_fucking_nas(self, na_list):
na_objs = []
for na_file in na_list:
na_objs.append(self._ploader(na_file))
for na in na_objs:
self.prjs[na['prj_key']].next_actions.append(na['na_obj'])
def _take_these_fucking_notes(self, note_list):
note_objs = []
for note in note_list:
note_objs.append(self._ploader(note))
for notey in note_objs:
self.single_notes.append(notey)
def _take_these_fucking_queues(self, queued_list):
q_objs = []
for q_file in queued_list:
q_objs.append(self._ploader(q_file))
for obj in q_objs:
self.queued_singletons.append(obj['na_obj'])
self.activate_due_queued()
def _parse_aof_text(self, atext):
if atext == '':
return [utils.format_for_dict_key(defs.NO_AOF_ASSIGNED)]
else:
return [utils.format_for_dict_key(atext)]
def _ploader(self, pfile_path):
with open(pfile_path, 'r') as pfile:
pcontent = pickle.load(pfile)
return pcontent
# PROPERTIES
def _get_aof_names(self):
# FIXME: now that I think about it... this is a stupid idea. change this
# from a property to a regular method.
names = []
for i in self.aofs.keys():
names.append(self.aofs[i]['name'])
return names
aof_names = property(_get_aof_names)
def engage_na_deleter(self, uuid):
"""Find the NA with the UID of uid arg, and delete it."""
for prj in self.prjs.values():
# only look at active projects, since this is for Engage
if prj.status == "active":
for na in prj.next_actions:
if na.uuid == uuid:
prj.next_actions.remove(na)
return True
# uh-oh. we REALLY shouldn't have gotten here.
# FIXME: this ought to throw an exception, really
return False
class FileSystemManager(object):
"""Filesystem manager for Fluidity"""
def __init__(self):
pass
def copy_to_project_folder(self, fname, prj_summary, prj_status):
full_path = self._get_path_for_type(prj_status) + \
self._sanitize_path(prj_summary)
# Does the project folder exist yet? If not, create it. If that fails,
# return False right away.
if not os.path.exists(full_path):
# try creating the right folder. if it fails, return False
if not self._create_project_folder(full_path):
return False
if fname.startswith('/'):
base_name = os.path.split(fname)[1]
else:
base_name = fname
# We got this far; now we can try the copy or move operation - which
# path will need to depend on if fname is a folder or not
if os.path.isdir(fname):
if fname.startswith(defs.INBOX_FOLDER):
shutil.move(fname, os.path.join(full_path, base_name))
else:
shutil.copytree(fname, os.path.join(full_path, base_name))
else:
if fname.startswith(defs.INBOX_FOLDER):
# more Evil(TM)... to be fixed with the signals rewrite
try:
shutil.move(fname, os.path.join(full_path, base_name))
except IOError:
# this might have "completed processing" already,
# so maybe it's in the trash...
base_name = os.path.split(fname)[1]
trash_path = BaseDirectory.xdg_data_home + "/Trash/files"
fname = os.path.join(trash_path, base_name)
shutil.move(fname, os.path.join(full_path, base_name))
else:
shutil.copy(fname, os.path.join(full_path, base_name))
return True
def move_project_folder(self, prj_summary, old_status, new_status):
sanitized_summary = self._sanitize_path(prj_summary)
full_path = self._get_path_for_type(old_status) + sanitized_summary
new_path = self._get_path_for_type(new_status) + sanitized_summary
if os.path.exists(full_path):
if full_path != new_path:
shutil.move(full_path, new_path)
def remove_from_project_folder(self, fname, prj_summary, prj_status):
full_path = os.path.join(self._get_path_for_type(prj_status),
self._sanitize_path(prj_summary), fname)
gf = gio.File(full_path)
gf.trash()
gf = None
del(gf)
def get_project_folder_uri(self, prj_summary, prj_status, create=True):
# this method assumes that if you're asking for the URI, you must want
# there to be a prj folder, so if there isn't one yet, just make one.
# However, if you don't want that, just set 'create' to False
full_path = self._get_path_for_type(prj_status) + \
self._sanitize_path(prj_summary)
if create:
if not os.path.exists(full_path):
# try creating the right folder. if it fails, return False
if not self._create_project_folder(full_path):
return ""
uri = "file://" + full_path
return uri
def get_file_list_for_prj(self, prj_summary, prj_status):
path = self.get_project_folder_uri(prj_summary, prj_status, create=False)
path = path.replace("file://", '')
path += os.sep
if os.path.exists(path):
return [path + f for f in os.listdir(path)]
else:
return []
def get_slider_filenames(self, path):
fnames = []
for f in os.listdir(path):
if f.endswith('.pkl'):
fnames.append(os.path.join(path, f))
return fnames
def trash_project_folder(self, prj_summary, prj_status):
full_path = self._get_path_for_type(prj_status) + \
self._sanitize_path(prj_summary)
if os.path.exists(full_path):
gf = gio.File(full_path)
gf.trash()
gf = None
del(gf)
def _create_project_folder(self, path):
os.mkdir(path)
if os.path.exists(path):
return True
else:
return False
def _sanitize_path(self, fname):
# I might want to extend this behavior later, which is why I made a custom
# method instead of just doing the raw replacement below each time
return fname.replace('/', '-')
def _get_path_for_type(self, prj_status):
if prj_status == "active":
return defs.ACTIVE_FOLDER + os.sep
elif prj_status == "queued":
return defs.QUEUED_FOLDER + os.sep
elif prj_status == "waiting_for":
return defs.WAITING_FOR_FOLDER + os.sep
elif prj_status == 'incubating':
return defs.INCUBATING_FOLDER + os.sep
elif prj_status == 'completed':
return defs.COMPLETED_FOLDER + os.sep
class InboxManager(object):
# CHOCK FULL OF PROFANITY! I'm a juvenile, easily frustrated asshole.
# Get used to it.
def __init__(self, caller, obj_tree, fucdkingdatamanager):
# I also write shitty code, get used to that, too.
self._caller = caller
self._tree = obj_tree
self._fsm = FileSystemManager()
self.dm = fucdkingdatamanager
col = [Column('summary', data_type=str, searchable=True,
ellipsize=pango.ELLIPSIZE_END, expand=True),]
self._tree.set_columns(col)
self._fill_rows()
self._tree.set_headers_visible(False)
# automagically import new Slider items
self._slider_inbox_monitor = \
gio.File(defs.NOTE_SLIDER_FOLDER).monitor_directory()
self._slider_inbox_monitor.connect('changed',
self.process_slider_inbox_changes)
def _fill_rows(self):
# FIXME: fix this FFS, use some actual polymorphism
#FIXME: reenable these later
self._row_inbox_folder = CategoryRow("Inbox Folder")
# self._row_email_inbox = CategoryRow("Emails")
# i.e.: Tomboy, e-d-s inbox "tasks", & collected items from Slider
self._row_single_notes = CategoryRow("Single notes")
self._row_processed_stuff = CategoryRow("Processed Stuff")
#FIXME: and re-enable these , too.
self._tree.append(None, self._row_inbox_folder)
# self._tree.append(None, self._row_email_inbox)
self._tree.append(None, self._row_single_notes)
self._tree.append(None, self._row_processed_stuff)
def add_actual_shit_to_columns(self):
notes = self.dm.get_inbox_notes()
notes.sort(key=operator.itemgetter('summary'))
# FIXME: this clears everything in "Processed Stuff", and it probably
# shouldn't - that should live in its own method.
self._tree.clear()
self._fill_rows()
for note in notes:
self._tree.append(self._row_single_notes,
inbox_items.InboxNote(note['summary'], note['details']))
for file_ in sorted(self.dm.get_inbox_files(),
key=operator.attrgetter('summary')):
self._tree.append(self._row_inbox_folder, file_)
# def add_inbox_files_to_clarify(self):
# note, file_, files = None, None, None
# for file_ in files:
# self._tree.append(self._row_single_notes,
# inbox_items.InboxNote(note['summary'], note['details']))
def complete_processing(self, obj):
#FIXME: wtf is this doing in here? this is GUI shit!
if isinstance(obj, inbox_items.InboxStuff):
selected_row = self._tree.get_selected_row_number()
self._tree.remove(obj)
self._tree.append(self._row_processed_stuff, obj)
if isinstance(obj, inbox_items.InboxNote):
self.dm.delete_stuff_note(obj)
elif isinstance(obj, inbox_items.InboxFile):
try:
obj.trash()
except gio.Error as error:
msg = ("Can't trash file (called from InboxManager."
"complete_processing): {0} -- error: {1}")
utils.log_line(msg.format(obj.summary, error))
self._tree.refresh()
self._tree.select_paths((selected_row, 0))
gobject.idle_add(self._tree.grab_focus)
def gather_slider_items(self):
na_list = []
note_list = []
queued_list = []
for n in self._fsm.get_slider_filenames(defs.NOTE_SLIDER_FOLDER):
if n.endswith('-note.pkl'):
note_list.append(n)
elif n.endswith('-na.pkl'):
na_list.append(n)
elif n.endswith("-queued_na.pkl"):
queued_list.append(n)
# only delete the actual files if we got confirmation that
# the data from them was saved successfully
if self.dm.take_slider_items(na_list, note_list, queued_list):
for f in note_list + na_list + queued_list:
gio.File(f).trash()
def process_slider_inbox_changes(self, gfile_mon, gfile, other_file, event): # pylint: disable-msg=W0613
if event.value_nick == 'changes-done-hint':
self.gather_slider_items()
self.add_actual_shit_to_columns()
class RecurrenceManager(object):
def __init__(self, dm):
self._data_lumbergh = dm
def place_recurring_tasks(self):
utils.log_line("Running place_recurring_tasks()", datetime.datetime.now())
self._load_data(defs.RECURRENCE_DATA)
data = self._recur_data
today = datetime.date.today()
if self._recur_data['last_run'] < today:
self._place_daily_tasks(today, data)
self._place_monthly_tasks(today, data)
self._place_weekly_tasks(today, data)
self._recur_data['last_run'] = today
self._save_data(defs.RECURRENCE_DATA)
def _create_na(self, task):
na = gee_tee_dee.NextAction(task['summary'])
na_attrs = ('priority', 'context', 'notes', 'url', 'time_est',
'energy_est')
for attr in na_attrs:
if attr in task:
na.__setattr__(attr, task[attr])
if 'due_date' in task:
na.due_date = datetime.date.today() + \
datetime.timedelta(task['due_date'])
return na
# everyXDays: 1 # integer
#- summary: # the task's description in e-d-s
# priority: # "gnite syntax": ! and + are high, - is low, blank is normal
# context: # String, enclosed in quotes
# notes: # probably ought to be a block I guess. until then, string.
# url: # url, enclosed in quotes
# due_date: # integer - X days after placement
def _load_data(self, data_file_path):
self._recur_data = None
self._recur_data = self._yloader(data_file_path)
# FIXME: datamanager is a fucking mess. clean it up.
self._singleton_nas = self._data_lumbergh.get_nas_for_prj('singletons')
def _place_daily_tasks(self, today, data):
for t in data['daily']:
if 'last_seen' not in t:
na = self._create_na(t)
self._data_lumbergh.add_na_to_prj(na, 'singletons')
t['last_seen'] = today
else:
delta = datetime.timedelta(t['everyXDays'])
found = False
index = 0
while found == False and index < len(self._singleton_nas):
if self._singleton_nas[index].summary == t['summary']:
if not self._singleton_nas[index].complete:
found = True
t['last_seen'] = today
index += 1
if found == False and today >= t['last_seen'] + delta:
na = self._create_na(t)
self._data_lumbergh.add_na_to_prj(na, 'singletons')
t['last_seen'] = today
def _place_monthly_tasks(self, today, data):
last = data['last_run']
for t in data['monthly']:
for day in t['days']:
# FIXME: make more generic wrt weekly tasks, too.
task_date = datetime.date(today.year, today.month, day)
if last < task_date <= today:
found = False
index = 0
while found == False and index < len(self._singleton_nas):
if self._singleton_nas[index].summary == t['summary']:
if not self._singleton_nas[index].complete:
found = True
index += 1
if found == False:
na = self._create_na(t)
self._data_lumbergh.add_na_to_prj(na, 'singletons')
def _place_weekly_tasks(self, today, data):
for t in data['weekly']:
for day in t['weekdays']:
# FIXME: make more generic wrt weekly tasks, too.
if day == today.weekday():
# FIXME: bah, I suck. make this work properly when we haven't run
# on a given day, make it run everything since the last time we ran.
# the following should help I guess...
# (today + datetime.timedelta(7 - (today - last_day).days)).weekday()
found = False
index = 0
while found == False and index < len(self._singleton_nas):
if self._singleton_nas[index].summary == t['summary']:
if not self._singleton_nas[index].complete:
found = True
index += 1
if found == False:
na = self._create_na(t)
self._data_lumbergh.add_na_to_prj(na, 'singletons')
def _save_data(self, data_file_path):
#FIXME: create a backup copy?
with open(data_file_path, 'w') as yfile:
print("Saving recurrence data")
yaml.dump(self._recur_data, yfile, Dumper=yaml.CDumper,
default_flow_style=False)
def _yloader(self, yfile_path):
with open(yfile_path, 'r') as yfile:
print("calling yaml.load()")
ycontent = yaml.load(yfile, Loader=yaml.CLoader)
return ycontent
class BackupJesus(object):
"""BackupJesus saaaaaaaaaaaves the righteous among thy backup files from the
fiery damnation of the void which is /dev/null!
(Actually, /dev/null has nothing to do with this code actually, I just
use gio.File.delete(), but that wouldn't be as funny. ;P)
"""
BACKUPS_PATH = defs.BACKUPS_PATH
FITY_EPOCH = defs.FITY_EPOCH
def __init__(self):
self.now = datetime.datetime.now()
# I'm lazy.
delta = datetime.timedelta
self.backup_policies = (# First four hours of *all* backups
{'start_time': self.now - delta(hours=4),
'end_time': self.now,
'interval': delta(0)},
# every hour of the past week
{'start_time': self.now - delta(weeks=1),
'end_time': self.now - delta(hours=4),
'interval': delta(hours=1)},
# every day of the past month
{'start_time': self.now - delta(weeks=4),
'end_time': self.now - delta(weeks=1),
'interval': delta(1)},
# every month since Fluidity's "epoch"
{'start_time': datetime.datetime.fromtimestamp(
defs.FITY_EPOCH),
'end_time': self.now - delta(weeks=4),
'interval': delta(weeks=4)})
def kill_stale_backups(self, dry_run=False):
pattern = os.path.join(defs.BACKUPS_PATH, 'fluidity*.pkl*')
kill_list = sorted(glob.glob(pattern))
the_book_of_life = []
for policy in self.backup_policies:
the_book_of_life += self._find_saved_indexes(kill_list, **policy)
the_book_of_life.sort()
doomed = self._delete_doomed_files(kill_list, the_book_of_life, dry_run)
elderly = [d for d in sorted(doomed) if self._is_senior_citizen(d)]
message = "Damned {0} backups to the void; {1} were senior citizens."
utils.log_line(message.format(len(doomed), len(elderly)),
datetime.datetime.now())
def _delete_doomed_files(self, klist, saved_indexes, keep_the_safety_on):
doomed = []
for idx, victim in enumerate(klist):
if idx not in saved_indexes:
doomed.append(self._get_creation_time(victim))
if not keep_the_safety_on:
gfile = gio.File(victim)
gfile.trash()
return doomed
def _find_saved_indexes(self, klist, start_time, end_time, interval):
saved = []
for idx, backup_file in enumerate(klist):
creation_time = self._get_creation_time(backup_file)
if start_time < creation_time < end_time:
saved.append(idx)
start_time = creation_time + interval
return saved
def _get_creation_time(self, path):
file_name = path.replace(defs.BACKUPS_PATH + '/', '')
time_float = float(file_name.replace('fluidity.pkl', ''))
return datetime.datetime.fromtimestamp(time_float)
def _is_senior_citizen(self, dt):
return dt < datetime.datetime.now() - datetime.timedelta(weeks=9)
# pylint: disable-msg=R0903
class CategoryRow(object):
def __init__(self, summary):
self.summary = summary
class SearchResult(object):
"""Simple "row" class for use with Kiwi's ObjectList"""
def __init__(self, summary, summary_formatted, prj_key, score, na_uuid=None):
"""Initialize this SearchResult.
Args:
summary: a plain-text string of the result content
summary_formatted: a string formatted with pango markup
prj_key: ...I can't even remember what this does anymore. FML.
score: the 'score' returned by the relevance module
na_uuid: if this is a NextAction, give its uuid so we can jump to it;
defaults to None
"""
self.summary = summary
self.prj_key = prj_key
self.score = score
self.summary_formatted = summary_formatted
self.na_uuid = na_uuid
if self.na_uuid:
self.result_type = "na"
self.result_type_formatted = "<i>Next Action</i>"
else:
self.result_type = "prj"
self.result_type_formatted = "<i>Project</i>"
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""GeeTeeDee-related data objects"""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = 'Jens Knutson'
import abc
import datetime
import os
import sys
import uuid
# FIXME: ewww. this should NOT be here.
import gtk
from xml.sax import saxutils
from fluidity import defs
from fluidity import utils
TOP_LEVEL_PROJECT = '00000000-0000-0000-0000-000000000000'
# FIXME: use dependency injection to handle this for now.
ICON_THEME = gtk.icon_theme_get_for_screen(gtk.gdk.Screen())
ALERT_ICON_PIXBUF = ICON_THEME.load_icon('gtk-dialog-warning', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
FAKE_ICON_PIXBUF = gtk.gdk.pixbuf_new_from_file(
os.path.join(defs.APP_DATA_PATH, '16x16_trans.png'))
NOTE_ICON_PIXBUF = ICON_THEME.load_icon('text-x-generic', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
URL_ICON_PIXBUF = ICON_THEME.load_icon('emblem-web', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
ENERGY_LABELS_TO_VALUES = {"High": 2, "Normal": 1, "Low": 0}
ENERGY_VALUES_TO_LABELS = utils.invert_dict(ENERGY_LABELS_TO_VALUES)
PRIORITY_LABELS_TO_VALUES = {"High": 1, "Normal": 2, "Low": 3}
PRIORITY_VALUES_TO_LABELS = utils.invert_dict(PRIORITY_LABELS_TO_VALUES)
class GeeTeeDeeData(object):
# Not currently being used -- consider removing.
__metaclass__ = abc.ABCMeta
def __init__(self, summary):
self.summary = summary
self.creation_date = datetime.datetime.now()
self.priority = 2
self.uuid = str(uuid.uuid4()) # uuid4() -- i.e.: just a random UUID
self._completion_date = None
self._queue_date = None
self._due_date = None
@property
def summary(self):
return self._summary
@summary.setter
def summary(self, value):
type_error = "Summary must be a str"
assert isinstance(value, basestring), type_error
self._summary = value
@property
def age(self):
return (datetime.datetime.now() - self.creation_date).days
@property
def completion_date(self):
return self._completion_date
@completion_date.setter
def completion_date(self, value):
type_error = "completion_date must be a datetime.datetime or None"
assert isinstance(value, datetime.datetime) or value is None, type_error
self._completion_date = value
@property
def creation_date(self):
try:
return self._creation_date
except AttributeError:
return datetime.datetime.fromtimestamp(defs.CREATION_EPOCH)
@creation_date.setter
def creation_date(self, value):
type_error = "creation_date must be a datetime.datetime or None"
assert isinstance(value, datetime.datetime) or value is None, type_error
self._creation_date = value
@property
def due_date(self):
return self._due_date
@due_date.setter
def due_date(self, value):
type_error = "due_date must be a datetime.date or None"
assert isinstance(value, datetime.date) or value is None, type_error
self._due_date = value
@property
def priority(self):
return self._priority
@priority.setter
def priority(self, value):
error = "priority must be 1, 2, or 3, representing 'High', " + \
"'Normal', & 'Low', respectively."
assert value in [1, 2, 3], error
self._priority = value
@property
def queue_date(self):
if isinstance(self._queue_date, int):
self._queue_date = datetime.date.fromordinal(self._queue_date)
return self._queue_date
@queue_date.setter
def queue_date(self, value):
type_error = "queue_date must be a datetime.date or None"
assert isinstance(value, datetime.date) or value is None, type_error
self._queue_date = value
def _mark_complete(self):
self.completion_date = datetime.datetime.now()
class NextAction(GeeTeeDeeData):
def __init__(self, summary):
super(NextAction, self).__init__(summary)
self.complete = False
self.energy_est = 1
self.time_est = 10.0
self._context = ""
self._notes = None
self._url = None
def __str__(self):
return "NextAction: {0}, uuid: {1}".format(self.summary, self.uuid)
# PROPERTIES-A-GO-GO!
@property
def complete(self):
return self._complete
@complete.setter
def complete(self, value):
assert isinstance(value, bool), "'complete' property must be a bool."
self._complete = value
if self._complete:
self._mark_complete()
@property
def context(self):
return self._context
@context.setter
def context(self, value):
type_error = "Context must be a str"
space_error = "Contexts must not contain spaces"
assert isinstance(value, basestring), type_error
assert " " not in value, space_error
value = '@' + value.lstrip('@').capitalize()
self._context = value
@property
def energy_est(self):
return self._energy_est
@energy_est.setter
def energy_est(self, value):
error = "energy_est must be 0, 1, or 2, representing 'Low', " + \
"'Normal', & 'High', respectively."
assert value in [0, 1, 2], error
self._energy_est = value
@property
def notes(self):
return self._notes
@notes.setter
def notes(self, value):
value = None if value == "" else value
self._notes = value
@property
def time_est(self):
return self._time_est
@time_est.setter
def time_est(self, value):
error = ("time_est must be a float between 1 and 360 - a float "
"because that's what gtk.Spinbutton likes...")
assert (1 <= value < 360), error
self._time_est = float(value)
@property
def url(self):
return self._url
@url.setter
def url(self, value):
url_error = "WTF kind of URL is that?"
assert value in (None, "", str("")) or "://" in value, url_error
self._url = value
# DISPLAY/UI-RELATED BITS
# FIXME: this is gheeeeeeeeetttoooooooooooooo. icky icky icky icky icky.
# I think this can be done properly with Kiwi's column format setting?
@property
def formatted_summary(self):
fs = saxutils.escape(self.summary)
formats = {1: '<b>{0}</b>', 3: '<span weight="light">{0}</span>',
'complete': '<span strikethrough="true">{0}</span>'}
if self.priority in (1, 3):
fs = formats[self.priority].format(fs)
if self.complete:
fs = formats['complete'].format(fs)
return fs
@property
def energy_est_word(self):
return ENERGY_VALUES_TO_LABELS[self.energy_est]
@property
def notes_icon(self):
icon = NOTE_ICON_PIXBUF if self.notes else FAKE_ICON_PIXBUF
return icon
# FIXME: this should really be sort_due_date or something, shouldn't it?
@property
def sort_date(self):
# FIXME: this is pretty lame...
# i.e.: we don't have a due date...
due = self.due_date if self.due_date else datetime.date.fromordinal(1000000)
return due
@property
def url_icon(self):
icon = URL_ICON_PIXBUF if self.url else FAKE_ICON_PIXBUF
return icon
class Project(GeeTeeDeeData):
def __init__(self, summary):
super(Project, self).__init__(summary)
#self.subprojects = []
self.status = 'active'
self.waiting_for_text = None
self._aofs = []
self._incubating_next_actions = []
self._next_actions = []
self.parent_project = TOP_LEVEL_PROJECT
self.subprojects = []
self._waiting_for_since = None
def __str__(self):
return "Project: {0}, uuid: {1}".format(self.summary, self.uuid)
# PROPERTIES-A-GO-GO!
#FIXME: not sure this one should live on indefinitely...
@property
def aofs(self):
return self._aofs
@property
def incubating_next_actions(self):
# FIXME: lame -- replace this property with a regular attribute
return self._incubating_next_actions
@property
def key_name(self):
return utils.format_for_dict_key(self.summary)
@property
def next_actions(self):
# FIXME: lame -- replace this property with a regular attribute
return self._next_actions
@property
def status(self):
return self._status
@status.setter
def status(self, value):
valid = ['active', 'incubating', 'waiting_for', 'queued', 'completed']
assert value in valid, "status must be 'active', 'incubating'," + \
"'waiting_for', 'queued', or 'completed'"
self._status = value
if value == "completed":
self._mark_complete()
@property
def waiting_for_since(self):
return self._waiting_for_since
@waiting_for_since.setter
def waiting_for_since(self, value):
type_error = "waiting_for_since must be a datetime.date"
assert isinstance(value, datetime.date), type_error
self._waiting_for_since = value
# DISPLAY/UI-ONLY -- i.e.: these should be in a different class...
@property
def alert(self):
"""Indicate if the project is in an "alert" status."""
if self._status != "active":
return FAKE_ICON_PIXBUF
else:
for na in self.next_actions:
if not na.complete:
return FAKE_ICON_PIXBUF
return ALERT_ICON_PIXBUF
@property
def formatted_summary(self):
formats = {1: '<b>{0}</b>', 2: '{0}', 3: '<span weight="light">{0}</span>'}
fs = saxutils.escape(self.summary)
return formats[self.priority].format(fs)
| Python |
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""OH NOES."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import abc
import datetime
import os
import shutil
import time
import glib
import gio
from lxml import etree
from xml.sax import saxutils
from xdg import BaseDirectory
from fluidity import defs
from fluidity import dbus_misc
from fluidity import gio_fml
from fluidity import utils
INBOXES = ('obex://[EC:9B:5B:C1:EE:8C]/Data/Inbox',)
# 'sftp://jensck@anvil.solemnsilence.org/home/jensck/Inbox')
# INBOXES MUST BE A SET OF URIs/URLs, NOT JUST PLAIN PATHS
def testimate():
import gtk
import gobject
gobject.timeout_add_seconds(3, consolidate)
gtk.main()
def consolidate():
inboxes = []
for ibx in INBOXES:
if ibx.startswith('file://'):
inboxes.append(LocalFilesystemInbox(ibx))
else:
inboxes.append(MountableInbox(ibx))
inboxes.append(TomboyInbox())
for i in inboxes:
i.consolidate()
class Error(Exception):
"""Created because Google's Python style guidelines say so... I'm probably
misinterpreting. ;-P """
pass
class ConsolidateInboxError(Error):
def __init__(self, message):
self._message = message
super(ConsolidateInboxError, self).__init__(message)
def __str__(self):
return "ConsolidateInboxError: " + self._message
class Inbox(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def consolidate(self, mcs):
"""Consolidate this inbox into the main inbox for this inbox type."""
pass
class LocalFilesystemInbox(Inbox):
"""Inbox subclass for local filesystems."""
MAIN_INBOX = gio_fml.FityFile(defs.INBOX_FOLDER)
SLIDER_INBOX = gio_fml.FityFile(defs.NOTE_SLIDER_FOLDER)
def __init__(self, secondary_inbox):
"""Initialize this LocalFilesystemInbox.
Args:
secondary_path: full path to an inbox folder to be emptied into the
main Fity inbox.
"""
self._this_inbox = gio_fml.FityFile(secondary_inbox)
def consolidate(self):
for child in self._this_inbox.get_children():
try:
utils.log_line("Moving file {0} to inbox...".format(child.path),
datetime.datetime.now())
inbox = self.SLIDER_INBOX if child.ext == ".pkl" else self.MAIN_INBOX
child.move(inbox.get_child(child.basename))
except gio_fml.MoveError as m_err:
utils.log_line(str(m_err), datetime.datetime.now())
class MountableInbox(LocalFilesystemInbox):
def consolidate(self):
cb_user_data = {'cb': self.consolidate}
if self._mount_volume(user_data=cb_user_data):
try:
super(MountableInbox, self).consolidate()
except (gio.Error, ConsolidateInboxError) as error:
msg = "Unable to consolidate from {0} - message: {1}".format(
self._this_inbox, error)
utils.log_line(msg, datetime.datetime.now())
self._unmount_volume()
def _mount_volume(self, obj=None, async_result=None, user_data=None):
"""Tricky crap to get around having to do proper async I/O. ;-P"""
if async_result is None: # i.e.: we're not being called as a callback
try:
self._this_inbox.find_enclosing_mount()
return True
except gio.Error:
# location not mounted; let's try fixing that.
self._this_inbox.mount_enclosing_volume(
callback=self._mount_volume, user_data=user_data)
return False
else:
try:
obj.mount_enclosing_volume_finish(async_result)
except (gio.Error, glib.GError) as err:
msg = "unable to mount: {0}. Error: {1}".format(self._this_inbox,
err)
utils.log_line(msg, datetime.datetime.now())
raise ConsolidateInboxError("Unable to mount the requested volume: "
+ msg)
user_data['cb']()
def _unmount_volume(self, *args):
# FIXME: do I care about the fact that I'm not paying any additional
# attention to unmounts?
if not args:
mounty = self._this_inbox._gfile.find_enclosing_mount()
mounty.unmount(self._unmount_volume)
else:
try:
args[0].unmount_finish(args[1])
except gio.Error as err:
msg = "Unable to UNmount: {0}. Error: {1}".format(self._this_inbox,
err)
utils.log_line(msg, datetime.datetime.now())
class TomboyInbox(Inbox):
#shit I might actually want to change at some point
MAIN_INBOX = defs.INBOX_FOLDER
TOMBOY_NOTE_FOLDER = BaseDirectory.save_data_path("tomboy")
STARTHERE_BACKUPS = os.path.join(BaseDirectory.save_data_path("boxidate"),
"start_here_backups")
NOTE_SUFFIX = ".note"
PADDING = "\n\n\n"
# Tomboy "magic" - these really aren't the Right Way, but ElementTree
# pissed me off, and seemed like overkill when this is all that's needed.
# what to put inbetween the chunks of content grabbed from each NOTD
#
# Also: THE NEXT THREE LINES GET WRITTEN INTO START HERE, so don't screw with it
# unless you know what you're doing!
TB_CONTENT_START = "<note-content version=\"0.1\">"
TB_CONTENT_END = "</note-content>"
SH_CONTENT_START = TB_CONTENT_START + "Start Here"
def __init__(self):
super(TomboyInbox, self).__init__()
# Set everything up - path names, mostly, a few connections to dbus, etc
self.tbus = dbus_misc.notes_proxy
def consolidate(self):
notelist = self.build_note_list(self.MAIN_INBOX)
agg_xml = self.build_aggregate_note(notelist)
self._back_up_SH()
new_sh_xml = self.build_SH_replacement_xml(agg_xml)
msg = "".join(("Boxidate is adding this to your Start Here note:\n",
new_sh_xml, "\n\n\n"))
utils.log_line(msg, datetime.datetime.now(),
'/home/jensck/fity-data/boxidate.log')
self.set_SH_xml(new_sh_xml)
self.delete_notes(notelist)
def build_aggregate_note(self, notelist):
aggregate = self.PADDING
el = len(self.TB_CONTENT_START)
for n in notelist:
n = os.path.join(self.MAIN_INBOX, n)
c = open(n, 'r').read()
c_begin = c.find(self.TB_CONTENT_START) + el
c_end = c.find(self.TB_CONTENT_END)
aggregate += c[c_begin:c_end] + self.PADDING
aggregate += self._horrible_hack_to_get_PlainText_inbox()
aggregate += self. _horrible_hack_to_get_NV_inbox()
return aggregate
def build_note_list(self, folder):
notes = []
for f in os.listdir(folder):
if f.endswith(self.NOTE_SUFFIX):
notes.append(f)
print(f)
return notes
def build_SH_replacement_xml(self, xml):
sh_xml = self.tbus.GetNoteContentsXml(self.sh_uri)
marker = len(self.SH_CONTENT_START)
return self.SH_CONTENT_START + xml + sh_xml[marker:]
def delete_notes(self, notes):
for n in notes:
n = os.path.join(self.MAIN_INBOX, n)
os.remove(n)
def set_SH_xml(self, xml):
self.tbus.SetNoteContentsXml(self.sh_uri, xml)
@property
def sh_uri(self):
return self.tbus.FindStartHereNote()
def _back_up_SH(self):
sh_raw = self.tbus.FindStartHereNote()
sh_name = sh_raw.split('/')[-1] + self.NOTE_SUFFIX
sh_file_path = os.path.join(self.TOMBOY_NOTE_FOLDER, sh_name)
backup_file_path = os.path.join(self.STARTHERE_BACKUPS,
str(datetime.date.today()) + "_" + sh_name)
shutil.copy2(sh_file_path, backup_file_path)
def _horrible_hack_to_get_PlainText_inbox(self):
"""Horrible, lame hack to help me for the short-term as my workflow changes..."""
plaintext_inbox = '/home/jensck/Dropbox/PlainText/Fluidity Inbox.txt'
with open(plaintext_inbox) as inboxfile:
contents = inboxfile.read()
# empty the inbox, since we're done with it
with open(plaintext_inbox, 'w') as inboxfile_again:
inboxfile_again.write("\n")
return saxutils.escape(contents)
def _horrible_hack_to_get_NV_inbox(self):
"""Horrible, lame hack #2 to help me for the short-term as my workflow changes..."""
nv_inbox = '/home/jensck/Dropbox/Notational Data/Start Here.html'
with open(nv_inbox, 'r') as inboxfile:
contents = inboxfile.read()
personal_header = '<b>Personal_Inbox_FTW</b></p>'
doc_footer_base = '\n</body>\n</html>'
personal_start = contents.find(personal_header) + len(personal_header)
personal_end = contents.find(doc_footer_base)
personal_content_raw = contents[personal_start:personal_end]
# prevent annoying-albeit-correct complaints from XML libs about unclosed tags...
personal_content = personal_content_raw.replace('<br>', '<br/>')
# add a fake header and footer back so it can be parsed as XML (well, HTML5)
fake_header = '<!DOCTYPE HTML><html><body>'
personal_content = fake_header + personal_content + doc_footer_base
personal_content = saxutils.escape(_convert_xml_to_text(personal_content, 'body'))
new_contents = contents.replace(personal_content_raw, '')
print(new_contents)
# empty the inbox, since we're done with it
# with open(nv_inbox, 'w') as inboxfile_again:
# inboxfile_again.write(new_contents)
return personal_content
class BoxidatorOld(object):
"""Consolidate my inboxes, including content from an external Tomboy note."""
#shit I might actually want to change at some point
MAIN_INBOX = defs.INBOX_FOLDER
TOMBOY_NOTE_FOLDER = BaseDirectory.save_data_path("tomboy")
FITY_SLIDER_INBOX = defs.NOTE_SLIDER_FOLDER
STARTHERE_BACKUPS = os.path.join(BaseDirectory.save_data_path("boxidate"),
"start_here_backups")
#Tomboy "magic" - these really aren't the Right Way, but ElementTree
#pissed me off, and seemed like overkill when this is all that's needed.
#Also note: THE NEXT THREE LINES GET WRITTEN INTO START HERE,
#so don't screw with it unless you know what you're doing!
TB_CONTENT_START = "<note-content version=\"0.1\">"
TB_CONTENT_END = "</note-content>"
SH_CONTENT_START = TB_CONTENT_START + "Start Here"
NOTE_SUFFIX = ".note"
#what to put inbetween the chunks of content grabbed from each NOTD
PADDING = "\n\n\n"
def __init__(self):
# Set everything up - path names, mostly, a few connections to dbus, etc
self.tbus = dbus_misc.notes_proxy
#get the URI for Start Here
self.sh_uri = self.tbus.FindStartHereNote()
##Tomboy - importing .note files into Start Here
def _back_up_SH(self):
sh_raw = self.tbus.FindStartHereNote()
sh_name = sh_raw.split('/')[-1] + self.NOTE_SUFFIX
sh_file_path = os.path.join(self.TOMBOY_NOTE_FOLDER, sh_name)
backup_file_path = os.path.join(self.STARTHERE_BACKUPS,
str(datetime.date.today()) + "_" + sh_name)
shutil.copy2(sh_file_path, backup_file_path)
def build_aggregate_note(self, notelist):
aggregate = self.PADDING
el = len(self.TB_CONTENT_START)
for n in notelist:
n = os.path.join(self.MAIN_INBOX, n)
c = open(n, 'r').read()
c_begin = c.find(self.TB_CONTENT_START) + el
c_end = c.find(self.TB_CONTENT_END)
aggregate += c[c_begin:c_end] + self.PADDING
return aggregate
def build_note_list(self, folder):
notes = []
for f in os.listdir(folder):
if f.endswith(self.NOTE_SUFFIX):
notes.append(f)
print(f)
return notes
def build_SH_replacement_xml(self, xml):
sh_xml = self.tbus.GetNoteContentsXml(self.sh_uri)
marker = len(self.SH_CONTENT_START)
return self.SH_CONTENT_START + xml + sh_xml[marker:]
def delete_notes(self, notes):
for n in notes:
n = os.path.join(self.MAIN_INBOX, n)
os.remove(n)
def set_SH_xml(self, xml):
self.tbus.SetNoteContentsXml(self.sh_uri, xml)
def import_sidearm_inbox(self, sidearm_inbox, main_slider_inbox, main_inbox):
for gf_info in sidearm_inbox.enumerate_children('*'):
file_name = gf_info.get_name()
gfile = sidearm_inbox.get_child(file_name)
if file_name.endswith(".pkl"):
gfile.move(main_slider_inbox.get_child(file_name),
self.totally_irresponsible_callback,
user_data="calling irresponsible_callback from "
"import_sidearm_inbox for a pickle")
else:
gfile.move(main_inbox.get_child(file_name),
self.totally_irresponsible_callback,
user_data="calling irresponsible_callback from "
"import_sidearm_inbox for a regular file")
try:
sidearm_inbox.find_enclosing_mount().unmount(
self.totally_irresponsible_callback,
user_data="calling irresponsible_callback from "
"import_sidearm_inbox for unmounting sftp_inbox")
except gio.Error as error:
print("Problem unmounting an inbox. Error: ", error)
def consolidate(self):
utils.log_line("Running boxidate.Boxidator.consolidate()",
datetime.datetime.now())
#put us in the right folder to start off with, just in case...
os.chdir(self.MAIN_INBOX)
# Disabled for now
# self.move_contents_to_main_inbox(self.FS_INBOXES, self.MAIN_INBOX)
# print "Contents of external filesystem inboxes moved."
notelist = self.build_note_list(self.MAIN_INBOX)
agg_xml = self.build_aggregate_note(notelist)
self._back_up_SH()
new_sh_xml = self.build_SH_replacement_xml(agg_xml)
#FIXME: still required?
time.sleep(2)
msg = "".join(("Boxidate is adding this to your Start Here note:\n",
new_sh_xml, "\n\n\n"))
utils.log_line(msg, datetime.datetime.now(),
'/home/jensck/fity-data/boxidate.log')
self.set_SH_xml(new_sh_xml)
self.delete_notes(notelist)
# handle the stuff on Sidearm
main_slider_inbox = gio.File(self.FITY_SLIDER_INBOX)
main_inbox = gio.File(self.MAIN_INBOX)
sidearm_sftp_inbox = gio.File(uri=self.SIDEARM_SFTP_INBOX_URI)
try:
# sidearm_sftp_inbox.find_enclosing_mount()
sidearm_sftp_inbox.mount_enclosing_volume(gio.MountOperation(),
self.import_inbox_async_cb, user_data=(sidearm_sftp_inbox,
main_slider_inbox,
main_inbox))
except gio.Error as error:
print("Error while trying to mount sftp inbox: " + str(error))
def import_inbox_async_cb(self, obj=None, result=None, user_data=None):
if user_data is not None:
other_inbox, main_slider_inbox, main_inbox = user_data
self.import_sidearm_inbox(other_inbox, main_slider_inbox, main_inbox)
def _convert_xml_to_text(xml_str, content_element_name, namespaces=None):
def _convert_xml(element):
chunks = []
if element.text:
chunks.append(element.text)
children = element.getchildren()
if len(children) > 0:
for child in children:
chunks.append(_convert_xml(child))
if element.tail:
chunks.append(element.tail)
return "".join(chunks)
root = etree.fromstring(xml_str).getroottree().getroot()
content_element = root.find(content_element_name)
return _convert_xml(content_element)
# NONE OF THIS WORKS, IT'S JUST COPY/PASTE WORK TO SERVE AS A STUB FOR LATER ON...
#class FTPInbox(LocalFilesystemInbox):
#
# def __init__(self):
# ftp = self.get_ftp_conn(self.INBOX_SERVERS, self.FTP_USERNAME)
# if ftp:
# self.get_ftp_inbox_files(ftp, self.REMOTE_INBOX_FOLDER)
# self.delete_ftp_files(ftp)
# self.tear_down_ftp_conn(ftp)
#
# def delete_ftp_files(self, ftp):
# for f in ftp.nlst():
# ftp.delete(f)
#
# def get_ftp_conn(self, servers, username):
# success = False
# ftp_conn = None
# for hostname in servers:
# if not success:
# try:
# ftp_conn = ftplib.FTP(hostname)
# password = self.get_ftp_password(hostname)
# ftp_conn.login(username, password)
# success = True
# except socket.error as e:
# print(("Problem connecting to '{0}', "
# "error given was: {1}").format(hostname, e))
# return ftp_conn
#
# def get_ftp_inbox_files(self, ftp, remote_inbox):
# ftp.cwd(remote_inbox)
# for f in ftp.nlst():
# print("Trying to RETR: " + f)
# ftp.retrbinary('RETR ' + f,
# open(os.path.join(self.MAIN_INBOX, f), 'wb').write)
#
# def tear_down_ftp_conn(self, ftp):
# ftp.quit()
#
# def get_ftp_password(self, server):
# attrs = {"server": server, "protocol": 'ftp', 'user': 'jensck'}
# items = gnomekeyring.find_items_sync(gnomekeyring.ITEM_NETWORK_PASSWORD,
# attrs)
# return items[0].secret
#class SSHInbox(NetworkInbox):
#
# def __init__(self, remote_inbox, hostname, port=22, username=None, password=None):
# """Initialize this SSHInbox.
#
# See superclass for docstring.
# """
#
# # well, that interface got kinda bloaty real fast... bah.
# import paramiko
#
# super(SSHInbox, self).__init__(remote_inbox, hostname, port, username,
# password)
# self.pkey = None
# self.key_filename = None
# self.timeout = None
# self.allow_agent = True
# self.look_for_keys = True
# self.connection_client = paramiko.SSHClient()
#
# # FIXME: bad security... bleh.
# self.connection_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#
# def consolidate(self):
# self._connect()
# self._move_files_to_inbox()
# self._close_connection()
#
# def _close_connection(self):
# self.connection_client.close()
#
# def _connect(self):
# try:
# self.connection_client.connect(self.hostname, self.username,
# self.password)
# except Exception as err:
# print("Error while trying to connect to", self.hostname + ":", str(err))
#
# def _move_files_to_inbox(self):
# fuckyou = self.connection_client.open_sftp()
# print(fuckyou)
##
#################===================================================================
# # basename = os.path.basename(to_upload)
# # print("Downloading " + basename)
# # sftp.put(to_upload, remote_path + basename)
# # sftp.close()
#===================================================================================
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""The main Fluidity app module."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import datetime
import operator
import os
import shutil
import sys
import time
import dbus #@UnusedImport
import dbus.mainloop.glib
import dbus.service
import gobject
import gtk
import kiwi
import pango
import fluidity.ui.dialogs as dialogs
from kiwi.ui.objectlist import Column, ObjectList #pylint: disable-msg=W0611
from kiwi.ui.widgets.combobox import ProxyComboBox #pylint: disable-msg=W0611
from kiwi.ui.widgets.textview import ProxyTextView #pylint: disable-msg=W0611
from fluidity import boxidate
from fluidity import defs
from fluidity import gee_tee_dee
from fluidity import inbox_items
from fluidity import managers
from fluidity import task_export
from fluidity import ui
from fluidity import utils
from fluidity.magic_machine import MagicMachine
from fluidity.note import ProjectNote
class Fluidity(object):
"""Main Fluidity application class."""
def __init__(self):
# first things first...
utils.log_line("Launching Fluidity", datetime.datetime.now())
self._enforce_running_as_singleton(defs.DBUS_BUS_NAME,
defs.DBUS_OBJECT_PATH)
# SAY MY NAME!
gobject.set_prgname(defs.APP_NAME)
gobject.set_application_name(defs.APP_NAME)
self.data_lumbergh = managers.DataManager()
self.data_lumbergh.activate_due_queued()
self.b = gtk.Builder()
self.b.add_from_file(os.path.join(defs.APP_DATA_PATH, 'fluidity.ui'))
self.b.connect_signals(self)
self.map_fields_to_instance_names()
utils.validate_paths()
self._magical = MagicMachine()
self._inbox_manager = managers.InboxManager(self, self.stuff_tree_w,
self.data_lumbergh)
self._inbox_manager.gather_slider_items()
self._rec_manager = managers.RecurrenceManager(self.data_lumbergh)
gobject.idle_add(self._rec_manager.place_recurring_tasks)
jesus = managers.BackupJesus()
gobject.idle_add(jesus.kill_stale_backups)
del(jesus)
self._search_window = dialogs.JumptoSearchDialog(self.data_lumbergh, self)
self.clipboard = gtk.clipboard_get()
self.init_ui()
gobject.timeout_add_seconds(defs.AUTOSAVE_INTERVAL,
self.data_lumbergh.autosave)
self._run_daily_tasks(False)
gtk.gdk.notify_startup_complete()
def _enforce_running_as_singleton(self, bus_name, obj_path):
"""Ensure this app is a singleton; register a 'well-known' D-Bus bus name.
No object paths are currently defined - the sole purpose of registering
the bus name is to ensure that we only ever run one copy of the app.
The main purpose here is to avoid having two copies of the app trying to
modify the data file at once. Possibly hackish, but better than the
stupid thing I had it doing before.
"""
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
session_bus = dbus.SessionBus()
try:
name = dbus.service.BusName(bus_name, session_bus,
allow_replacement=False,
replace_existing=False,
do_not_queue=True)
dbus.service.Object(name, obj_path)
except dbus.exceptions.NameExistsException:
#no good. we're bailing.
dialog = gtk.Dialog("Error",
flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
error_label = gtk.Label()
error_label.set_use_markup(True)
error_msg = "Fluidity is already running."
error_label.set_markup("<big><b>" + error_msg + "</b></big>")
fuck_you_gtk = gtk.Alignment()
fuck_you_gtk.set_padding(12, 24, 12, 12)
fuck_you_gtk.add(error_label)
dialog.get_content_area().pack_start(fuck_you_gtk)
dialog.get_content_area().set_border_width(12)
error_label.show()
fuck_you_gtk.show()
dialog.run()
utils.log_line("Exiting -- found another process with the same "
"D-Bus bus name.", datetime.datetime.now())
sys.exit("Another process has that bus name; " + error_msg)
def add_file_to_prj(self, prj):
chooser = gtk.FileChooserDialog(action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_ADD, gtk.RESPONSE_OK))
chooser.set_property("select-multiple", True)
response = chooser.run()
if response == gtk.RESPONSE_OK:
file_names = chooser.get_filenames()
for f in file_names:
self.data_lumbergh.copy_to_project_folder(f, prj)
#now re-fill the project's files ObjectList
self.fill_prj_support_files_w(prj)
chooser.destroy()
chooser = None
def attach_stuff_to_prj(self, prj_key, stuff):
prj = self.data_lumbergh.prjs[prj_key]
if isinstance(stuff, inbox_items.InboxFile):
self.data_lumbergh.copy_to_project_folder(stuff.path, prj)
elif isinstance(stuff, inbox_items.InboxNote):
prj_ = prj
note = ProjectNote(prj=prj_)
note.add_stuff(stuff)
def complete_project(self, prj):
self.data_lumbergh.change_project_status(prj, "completed")
self.fill_prj_list_w()
def consolidate_inboxes(self, widget):
# FIXME: this shit really belongs in DataManager
# Also, it needs to actually work for other people... *cough*
if os.environ.get("USER") != "jensck":
pass
else:
self.temporarily_disable_widget(widget)
boxidate.consolidate()
self._inbox_manager.gather_slider_items()
self._inbox_manager.add_actual_shit_to_columns()
self.temporarily_disable_widget(widget)
def create_new_aof(self):
d = self.b.get_object("new_aof_dialog")
e = self.b.get_object("new_aof_name_w")
if d.run() == gtk.RESPONSE_APPLY:
aof_name = e.get_text()
e.set_text("")
self.data_lumbergh.create_new_aof(aof_name)
self.fill_aofs_w(self.aof_filter_w, self.data_lumbergh.aof_names)
self.fill_aofs_w(self.prj_details_aofs_w,
self.data_lumbergh.aof_names, False)
d.hide()
def delete_na(self, na):
prj = self.prj_list_w.get_selected()
na_index = self.prj_details_na_list_w.index(na)
self.data_lumbergh.delete_na(na, prj)
self.fill_prj_details_na_list_w(self.prj_list_w.get_selected())
self.prj_details_na_list_w.select_paths([na_index - 1])
def delete_prj(self, prj):
self.data_lumbergh.delete_prj(prj)
self.fill_prj_list_w()
def display_prj_notes(self, prj_):
ProjectNote(prj=prj_).show()
def edit_extant_na(self, na):
#FIXME: review this later - is it doing what we intend?
# also, in case I forget - this isn't a datamanager issue - the NAD should
# take care of the actual data question with
nad = dialogs.NewNextActionDialog(self, self.data_lumbergh)
nad.edit_extant_na(na)
nad = None
def file_stuff_as_reference(self, stuff):
title_text = "Please select the folder where you would like to move this file to"
chooser = gtk.FileChooserDialog(title=title_text,
action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
buttons=(gtk.STOCK_CANCEL,
gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE_AS,
gtk.RESPONSE_OK))
ok_button = chooser.get_child().get_children()[1].get_children()[0]
ok_button.set_label("Move file")
home_dir = os.getenv("HOME")
chooser.set_current_folder_uri("file://" + home_dir)
response = chooser.run()
if response == gtk.RESPONSE_OK:
new_location = chooser.get_filename()
try:
shutil.move(stuff.path, os.path.join(new_location, stuff.summary))
except IOError:
print("Something has gone wrong. Oops.")
chooser.destroy()
chooser = None
self._inbox_manager.complete_processing(stuff)
def fill_aofs_w(self, widget, aofs, all_item=True):
widget.clear()
if all_item:
widget.append_item('All')
for i in sorted(aofs):
widget.append_item(i)
widget.append_item(defs.NO_AOF_ASSIGNED)
widget.select_item_by_position(0)
def fill_engage_context_w(self, widget):
widget.clear()
self.engage_context_w.append_item('Any')
contexts = self.data_lumbergh.get_contexts()
for i in contexts:
self.engage_context_w.append_item(i)
self.engage_context_w.set_active(0)
def fill_engage_na_list(self):
energy = self.engage_energy_level_w.get_selected_label()
max_time = self.engage_time_available_w.get_selected_label()
due_only = self.engage_due_today_filter_w.props.active
today = datetime.date.today()
# FIXME: when things are loading up for the first time, we get
# NoneType problems - i need to eventually fix this properly.
if max_time:
if "+" in max_time:
# catch-all for "60+" minutes
# FIXME: stop using a "magic" value here?
max_time = 1000.0
else:
max_time = float(max_time)
context = self.engage_context_w.get_selected()
candidate_nas = []
active_nas = self.data_lumbergh.get_na_for_each_active_prj()
# and of those, get all the ones which meet our filter criteria
for n in active_nas:
if (context == "Any" and 'agenda' not in n.context.lower() or
n.context == context):
if n.time_est > max_time:
continue
na_energy = ui.ENERGY_VALUES_TO_LABELS[n.energy_est]
if energy != "Any" and na_energy != energy:
continue
if due_only and (n.due_date is None or n.due_date > today):
continue
candidate_nas.append(n)
# next, sort the filtered candidates
candidate_nas.sort(key=operator.attrgetter('context'))
candidate_nas.sort(key=operator.attrgetter('age', 'time_est', 'energy_est'),
reverse=True)
candidate_nas.sort(key=operator.attrgetter('sort_date', 'priority'))
# Clear the list and re-populate it appropriately
self.engage_na_list.clear()
for na in candidate_nas:
self.engage_na_list.append(na)
total_time, tasks_count = 0, 0
# FIXME: review this - should I really be hardcoding the exclusion
# of Agendas?
for na in self.engage_na_list:
context = na.context
if context is not None and "agenda" not in context.lower():
total_time = int(total_time + na.time_est)
tasks_count += 1
hours = total_time // 60
mins = total_time % 60
self.engage_current_totals_w.set_text(
defs.ENGAGE_TOTALS_TEMPLATE.format(tasks_count, hours, mins))
self.engage_na_list.select_paths([0])
def fill_na_list_w(self, prj=None):
if not prj:
prj = self.prj_list_w.get_selected()
self.fill_prj_details_na_list_w(prj)
def fill_prj_details(self, prj):
self.prj_details_na_list_w.clear()
for widget in (self.prj_details_due_date_w,
self.prj_details_queue_date_w,
self.prj_details_waiting_for_w,
self.prj_details_waiting_for_since_w):
widget.props.text = ""
prj = self.prj_list_w.get_selected()
if prj:
self.fill_prj_details_na_list_w(prj)
self.fill_prj_support_files_w(prj)
if prj.due_date:
self.prj_details_due_date_w.set_text(
prj.due_date.strftime(defs.GTK_DATE_TEXT_TEMPLATE))
if prj.queue_date:
self.prj_details_queue_date_w.set_text(
prj.queue_date.strftime(defs.GTK_DATE_TEXT_TEMPLATE))
if prj.waiting_for_text:
self.prj_details_waiting_for_w.set_text(prj.waiting_for_text)
if prj.waiting_for_since:
self.prj_details_waiting_for_since_w.set_text(
prj.waiting_for_since.strftime(defs.GTK_DATE_TEXT_TEMPLATE))
self.prj_details_aofs_w.select_item_by_label(
self.data_lumbergh.get_prj_aof_names(prj)[0])
translated = ui.translate_priority(prj.priority)
self.prj_details_priority_w.select_item_by_label(translated)
#FIXME: put this in dm??? does this need fixing?
def fill_prj_details_na_list_w(self, prj):
self.prj_details_na_list_w.clear()
for n in prj.next_actions:
self.prj_details_na_list_w.append(n)
self.prj_details_incubating_na_list_w.clear()
for n in prj.incubating_next_actions:
self.prj_details_incubating_na_list_w.append(n)
def fill_prj_list_w(self, area_name=None, rfilter=None):
if area_name == None:
area_name = self.aof_filter_w.get_selected()
if rfilter == None:
rfilter = self.get_prj_review_status_filter()
# when aof selector contents are getting shuffled around, 'area_name'
# will be None, which of course causes problems.
if area_name:
self.prj_list_w.clear()
prjs = self.data_lumbergh.get_prjs_by_aof(area_name, rfilter)
self.prj_list_w.extend(prjs)
self.prj_list_w.select_paths([0])
pdbox = self.b.get_object("project_details_vbox")
if len(self.prj_list_w) <= 0:
pdbox.hide()
else:
pdbox.show()
def fill_prj_support_files_w(self, prj):
self.prj_support_files_w.clear()
file_list = self.data_lumbergh.get_file_list_for_prj(prj)
for f in file_list:
row = ui.ProjectSupportFileRow(f)
self.prj_support_files_w.append(row)
self.prj_support_files_w.sort_by_attribute("isdir", order=gtk.SORT_ASCENDING)
self.prj_support_files_w.sort_by_attribute("name_lowercase",
order=gtk.SORT_ASCENDING)
def fill_stuff_details(self, obj):
if isinstance(obj, inbox_items.InboxNote):
self.clarify_stuff_details_notebook.set_current_page(0)
self.clarify_notes_details_summary_w.set_text(obj.summary)
if obj.details:
self.clarify_notes_details_details_w.update(obj.details)
else:
self.clarify_notes_details_details_w.update("")
self.clarify_stuff_details_notebook.show()
self.clarify_file_as_reference_w.props.sensitive = False
self.clarify_add_to_read_review_w.props.sensitive = False
elif isinstance(obj, inbox_items.InboxFile):
self.clarify_stuff_details_notebook.set_current_page(2)
self.clarify_file_details_name_w.set_text(obj.summary)
self.clarify_file_details_type_w.set_text(obj.mime_type)
self.clarify_file_details_size_w.set_text(obj.size)
self.clarify_file_details_path_w.set_text(obj.parent.path)
self.clarify_file_details_notes_w.update(obj.notes)
self.clarify_file_details_icon_w.set_from_pixbuf(obj.icon)
self.clarify_stuff_details_notebook.show()
self.clarify_file_as_reference_w.props.sensitive = True
self.clarify_add_to_read_review_w.props.sensitive = True
# self.clarify_file_details_mime_nb_w.show()
# if obj.generic_type == 'text':
# self.clarify_file_info_text_preview_w.update(obj.get_preview())
# self.clarify_file_details_mime_nb_w.set_current_page(1)
# if obj.generic_type == 'image':
# self.clarify_file_info_image_thumbnail_w.set_from_pixbuf(obj.get_preview())
# self.clarify_file_details_mime_nb_w.set_current_page(2)
# else:
# self.clarify_file_details_mime_nb_w.hide()
else:
self.clarify_stuff_details_notebook.hide()
def get_prj_review_status_filter(self):
group = self.b.get_object("review_active_w").get_group()
for b in group:
if b.get_active():
if gtk.Buildable.get_name(b) == 'review_active_w':
return "active"
elif gtk.Buildable.get_name(b) == 'review_incubating_w':
return "incubating"
elif gtk.Buildable.get_name(b) == 'review_waiting_for_w':
return "waiting_for"
elif gtk.Buildable.get_name(b) == 'review_queued_w':
return "queued"
elif gtk.Buildable.get_name(b) == 'review_completed_w':
return "completed"
def incubate_project(self, prj):
self.data_lumbergh.change_project_status(prj, "incubating")
self.fill_prj_list_w()
def mark_project_as_waiting_for(self, prj):
# throw out a dialog to ask for the waiting_for_text and waiting_for_since
wfd = dialogs.WaitingForDialog()
wf_results = wfd.get_waiting_for_info()
prj.waiting_for_since = wf_results[0]
prj.waiting_for_text = wf_results[1]
self.data_lumbergh.change_project_status(prj, "waiting_for")
self.fill_prj_list_w()
def init_engage_na_list(self, obj_list):
obj_list.set_columns(
[Column('uuid', data_type=str, visible=False),
Column('complete', title=' ', data_type=bool, editable=True),
Column('formatted_summary', title="Summary", data_type=str,
use_markup=True, searchable=True, expand=True,
ellipsize=pango.ELLIPSIZE_END),
Column('url_icon', title=' ', data_type=gtk.gdk.Pixbuf),
Column('notes_icon', title=' ', data_type=gtk.gdk.Pixbuf),
Column('context', title="Context", data_type=str),
Column('due_date', title='Due date', data_type=datetime.date),
Column('time_est', title='Time', data_type=float),
Column('energy_est_word', title='Energy', data_type=str),
Column('age', title='Age', data_type=str)])
def init_prj_details_na_list_w(self, obj_list):
obj_list.set_columns([Column('uuid', data_type=str, visible=False),
Column('complete', data_type=bool, editable=True),
Column('context', data_type=str),
Column('formatted_summary', data_type=str,
use_markup=True, searchable=True),
Column('due_date', data_type=str)])
obj_list.set_headers_visible(False)
def init_prj_list_w(self, obj_list):
obj_list.set_columns([Column('alert', data_type=gtk.gdk.Pixbuf,
visible=True),
Column('summary', data_type=str, searchable=True),
Column('key_name', data_type=str, visible=False),
Column('priority', data_type=int, visible=False)])
obj_list.set_headers_visible(False)
def init_prj_support_files_w(self, obj_list):
# I have no idea why 23 worked best.
obj_list.set_columns([Column('icon', width=23, data_type=gtk.gdk.Pixbuf),
Column('file_name', data_type=str, searchable=True,
expand=True),
Column('full_path', data_type=str, visible=False),
Column('name_lowercase', data_type=str,
visible=False),
Column('isdir', data_type=bool, visible=False)])
obj_list.set_headers_visible(False)
def init_ui(self):
"""Collection of mostly one-liners to set some UI details."""
self.fill_aofs_w(self.prj_details_aofs_w, self.data_lumbergh.aof_names,
False)
self.init_prj_list_w(self.prj_list_w)
self.fill_aofs_w(self.aof_filter_w, self.data_lumbergh.aof_names)
self.init_prj_details_na_list_w(self.prj_details_na_list_w)
self.init_prj_details_na_list_w(self.prj_details_incubating_na_list_w)
self.workflow_nb.set_show_tabs(False)
self.init_prj_support_files_w(self.prj_support_files_w)
self.init_engage_na_list(self.engage_na_list)
self.show_correct_project_action_buttons()
self.engage_energy_level_w.select_item_by_position(0)
# FIXME: wow, this blows.
self._inbox_manager.add_actual_shit_to_columns()
self.clarify_stuff_details_notebook.set_show_tabs(False)
self.stuff_tree_w.get_treeview().props.enable_search = False
# show Review tab by default when starting up
self.b.get_object("show_review_tab").set_active(True)
self.engage_na_list.get_treeview().connect('button-press-event',
self.engage_na_list_click_cb)
self.clarify_file_details_mime_nb_w.set_show_tabs(False)
self.clarify_file_details_mime_nb_w.hide()
def jump_to_search_result(self, prj_key, na_uuid=None):
status_widget_map = (("active", "review_active_w"),
("incubating", "review_incubating_w"),
("waiting_for", "review_waiting_for_w"),
("queued", "review_queued_w"),
("completed", "review_completed_w"))
# First, "clear" AOF selector, select the right prj status,
# so we can actually see the project/na
self.aof_filter_w.select_item_by_position(0)
self.b.get_object("show_review_tab").set_active(True)
prj = self.data_lumbergh.prjs[prj_key]
for status, widget in status_widget_map:
if prj.status == status:
self.b.get_object(widget).set_active(True)
break
self.prj_list_w.select(prj, scroll=True)
if na_uuid:
self.b.get_object("prj_details_notebook").set_current_page(0)
for na in self.prj_details_na_list_w:
if na.uuid == na_uuid:
self.prj_details_na_list_w.select(na, scroll=True)
gobject.idle_add(self.prj_details_na_list_w.grab_focus)
break
def map_fields_to_instance_names(self):
"""Collection of one-liners to set up convenient names for UI elements"""
self.window = self.b.get_object("main_window")
self.aof_filter_w = self.b.get_object("aof_filter_w")
self.clarify_add_to_read_review_w = \
self.b.get_object("clarify_add_to_read_review_w")
self.clarify_file_as_reference_w = \
self.b.get_object("clarify_file_as_reference_w")
self.clarify_image_preview = self.b.get_object("clarify_image_preview")
self.clarify_nb = self.b.get_object("clarify_notebook")
self.clarify_notes_copy_summary_w = \
self.b.get_object("clarify_notes_copy_summary_w")
self.clarify_notes_details_details_w = \
self.b.get_object("clarify_notes_details_details_w")
self.clarify_notes_details_summary_w = \
self.b.get_object("clarify_notes_details_summary_w")
self.clarify_stuff_details_notebook = \
self.b.get_object("clarify_stuff_details_notebook")
self.engage_context_w = self.b.get_object("engage_context_w")
self.engage_current_totals_w = \
self.b.get_object("engage_current_totals_w")
self.engage_energy_level_w = self.b.get_object("engage_energy_level_w")
self.engage_na_list = self.b.get_object("engage_na_list")
self.engage_due_today_filter_w = \
self.b.get_object("engage_due_today_filter_w")
self.engage_time_available_w = \
self.b.get_object("engage_time_available_w")
self.new_prj_d = self.b.get_object("new_prj_dialog")
self.prj_details_aofs_w = self.b.get_object("prj_details_aofs_w")
self.prj_details_due_date_w = self.b.get_object("prj_details_due_date_w")
self.prj_details_incubating_na_list_w = \
self.b.get_object("prj_details_incubating_na_list_w")
self.prj_details_na_list_w = self.b.get_object("prj_details_na_list_w")
self.prj_details_priority_w = self.b.get_object("prj_details_priority_w")
self.prj_details_queue_date_w = \
self.b.get_object("prj_details_queue_date_w")
self.prj_list_w = self.b.get_object("prj_list_w")
self.prj_queue_date_hbox = self.b.get_object("prj_queue_date_hbox")
self.prj_support_files_w = self.b.get_object("prj_support_files_w")
self.prj_details_waiting_for_w = \
self.b.get_object("prj_details_waiting_for_w")
self.prj_details_waiting_for_since_w = \
self.b.get_object("prj_details_waiting_for_since_w")
self.review_project_status_filter_w = \
self.b.get_object("review_project_status_filter_w")
self.stuff_tree_w = self.b.get_object("stuff_tree_w")
self.waiting_for_table = self.b.get_object("waiting_for_table")
self.workflow_nb = self.b.get_object("workflow_notebook")
self.clarify_file_details_name_w = self.b.get_object("clarify_file_details_name_w")
self.clarify_file_details_type_w = self.b.get_object("clarify_file_details_type_w")
self.clarify_file_details_size_w = self.b.get_object("clarify_file_details_size_w")
self.clarify_file_details_path_w = self.b.get_object("clarify_file_details_path_w")
self.clarify_file_details_notes_w = self.b.get_object("clarify_file_details_notes_w")
self.clarify_file_details_mime_nb_w = self.b.get_object("clarify_file_details_mime_nb_w")
self.clarify_file_details_icon_w = self.b.get_object("clarify_file_details_icon_w")
self.clarify_file_info_text_preview_w = self.b.get_object("clarify_file_info_text_preview_w")
self.clarify_file_info_image_thumbnail_w = self.b.get_object("clarify_file_info_image_thumbnail_w")
def move_na_position(self, objlist, prj, position):
nas = objlist.get_selected_rows()
if len(nas) == 1:
na = nas[0]
old_index = objlist.index(na)
if position == "up":
if old_index > 0:
del(prj.next_actions[old_index])
prj.next_actions.insert(old_index - 1, na)
elif position == "down":
if old_index + 1 < len(objlist):
del(prj.next_actions[old_index])
prj.next_actions.insert(old_index + 1, na)
elif position == "first":
if old_index > 0:
del(prj.next_actions[old_index])
prj.next_actions.insert(0, na)
elif position == "last":
if old_index + 1 < len(objlist):
del(prj.next_actions[old_index])
prj.next_actions.append(na)
self.fill_prj_details_na_list_w(prj)
self.prj_details_na_list_w.select(na)
def process_stuff_as_na(self, selected_stuff, stuff_summary=None,
stuff_details=None, incubate_=False):
# FIXME: ugh. this is so NOT ok. fix this also. .... *sigh*
prj = self.data_lumbergh.prjs['singletons']
# FIXME: there's no WAY this is the right way to do this - why are we
# getting kiwi.ValueUnset when the kiwi TextView is empty, instead of
# an empty string?
if not isinstance(stuff_details, str):
stuff_details = None
self.start_new_na(stuff_summary, prj, stuff_details, incubate=incubate_,
stuff=selected_stuff)
self._inbox_manager.complete_processing(selected_stuff)
def process_stuff_as_prj(self, stuff, summary=None, details=None,
status="active"):
# FIXME: there's no WAY this is the right way to do this - why are we
# getting kiwi.ValueUnset when the kiwi TextView is empty, instead of
# an empty string?
if isinstance(stuff, inbox_items.InboxStuff):
self.start_new_prj(summary, status, notes=details,
stuff_obj=stuff)
self._inbox_manager.complete_processing(stuff)
def queue_project(self, prj):
qdialog = dialogs.QueueProjectDialog()
qdate = qdialog.get_datetime()
# and in case the magic date we got was invalid...
while not qdialog.valid:
qdialog = dialogs.QueueProjectDialog(True)
qdate = qdialog.get_datetime()
# i.e.: did we hit cancel?
if qdate:
self.data_lumbergh.change_project_status(prj, "queued", qdate)
self.fill_prj_list_w()
def quit(self):
# FIXME: when I'm finally using signals (like I should be... *cough*),
# this will be deprecated
processed = self._inbox_manager._tree.get_descendants(
self._inbox_manager._row_processed_stuff)
self.data_lumbergh.dump_processed_stuff_notes(processed)
self.data_lumbergh.cleanup_before_exit()
utils.log_line("Exiting normally.", datetime.datetime.now())
dbus.SessionBus().release_name(defs.DBUS_BUS_NAME)
gtk.main_quit()
def remove_file_from_prj(self, prj):
file_list = self.prj_support_files_w.get_selected_rows()
label = self.b.get_object("file_name_label")
title_label = self.b.get_object("delete_file_header_label")
if len(file_list) == 1:
file_name = os.path.split(file_list[0].full_path)[1]
template = self.b.get_object("stupid_file_template_label").get_text()
label.set_text(template % file_name)
title_label_text = """<span weight="bold"
size="x-large">Delete this file?</span>"""
else:
template = self.b.get_object(
"stupid_multiple_files_template_label").get_text()
label.set_text(template)
#FIXME: this is waaaaaaaaaaaay too ghetto, even for me.
title_label_text = """<span weight="bold"
size="x-large">Delete these files?</span>"""
title_label.set_text(title_label_text)
title_label.set_use_markup(True)
d = self.b.get_object("delete_prj_file_dialog")
d.set_focus(self.b.get_object("cancel_delete_prj_file_w"))
response = d.run()
if response == gtk.RESPONSE_OK:
d.hide()
for f in file_list:
self.data_lumbergh.remove_file_from_prj(f.file_name, prj)
#now re-fill the project's files ObjectList
self.fill_prj_support_files_w(prj)
else:
d.hide()
def search(self, query):
self._search_window.search(query)
def select_clarify_tab(self, widget):
self.clarify_nb.set_current_page(self.clarify_nb.page_num(widget))
def set_aof_w_text(self, widget, prj):
aof_text = self.data_lumbergh.get_prj_aof_names(prj)[0]
widget.select_item_by_label(aof_text)
def set_clipboard_text(self, text):
self.clipboard.set_text(text)
self.clipboard.store()
def _set_date_w_values(self, dt, widget):
# UPDATE: disabling the try/except so I can find out what exceptions
# actually get thrown, ffs
# try:
#this will fail and thus short-circuit if the date is 'None'
date_text = dt.strftime(defs.GTK_DATE_TEXT_TEMPLATE)
widget.set_text(date_text)
# FIXME: more ghetto garbage from me, because I'm tired and slothful
if "due_date" in gtk.Buildable.get_name(widget):
self.prj_list_w.get_selected().due_date = dt
elif "queue_date" in gtk.Buildable.get_name(widget):
self.prj_list_w.get_selected().queue_date = dt
elif "waiting_for" in gtk.Buildable.get_name(widget):
self.prj_list_w.get_selected().waiting_for_since = dt
widget.date = dt
# except:
# widget.date = None
def set_prj_waiting_for(self, prj, wf_status_text):
prj.waiting_for_text = wf_status_text
def _set_valid_date_w(self, widget):
if widget.get_text() == "":
widget.date = None
else:
# get_magic_date() returns None on failure, so we're safe either way here
widget.date = self._magical.get_magic_date(widget.get_text())
# get_magic_date() didn't understand the mystery meat you fed it.
if widget.date == None:
widget.set_text(defs.UNRECOGNIZED_DATE_TEXT)
else:
self._set_date_w_values(widget.date, widget)
def show_correct_project_action_buttons(self):
for widget in self.b.get_object("project_actions_bbox").get_children():
widget.show()
status = self.get_prj_review_status_filter()
if status == "active":
self.b.get_object("activate_prj_w").hide()
# self.prj_queue_date_hbox.hide()
self.waiting_for_table.hide()
elif status == "incubating":
self.b.get_object("incubate_prj_w").hide()
# self.prj_queue_date_hbox.hide()
self.waiting_for_table.hide()
elif status == "waiting_for":
self.b.get_object("prj_waiting_for_w").hide()
self.waiting_for_table.show()
elif status == "queued":
self.b.get_object("queue_prj_w").hide()
# self.prj_queue_date_hbox.show()
self.waiting_for_table.hide()
elif status == "completed":
self.b.get_object("mark_prj_complete_w").hide()
# self.prj_queue_date_hbox.hide()
self.waiting_for_table.hide()
def start_new_na(self, new_na_summary, prj, na_notes=None, incubate=False,
stuff=None):
if isinstance(stuff, inbox_items.InboxFile):
new_na_summary = stuff.summary
nad = dialogs.NewNextActionDialog(self, self.data_lumbergh)
nad.start_new_na(new_na_summary, prj, na_notes, incubate=incubate,
stuff=stuff)
# jesus this is so not the right place for this. Fity needs a rewrite.
# ...
# ...reeeaaaal bad.
if prj.summary == 'singletons' and isinstance(stuff,
inbox_items.InboxFile):
self.data_lumbergh.copy_to_project_folder(stuff.path, prj)
def start_new_prj(self, new_prj_name, status=None, notes=None, stuff_obj=None):
if not status:
status = self.get_prj_review_status_filter()
if self.aof_filter_w.get_selected() != "All" and \
self.b.get_object("show_review_tab").get_active():
aofs = self.aof_filter_w.get_selected()
else:
aofs = defs.NO_AOF_ASSIGNED
project = gee_tee_dee.Project(new_prj_name)
pd = dialogs.NewProjectDialog(self, self.data_lumbergh)
exit_hooks = []
if status == "waiting_for":
exit_hooks.append('waiting_for')
elif status == "queued":
exit_hooks.append('queued')
pd.open_new_prj_dialog(project, status, aofs, notes, exit_hooks)
if isinstance(stuff_obj, inbox_items.InboxFile):
pd.add_files_to_files_list(stuff_obj.path)
def sync_nas_and_notes(self):
na_list = self.data_lumbergh.get_na_for_each_active_prj()
na_list.sort(key=operator.attrgetter('time_est', 'energy_est'),
reverse=True)
na_list.sort(key=operator.attrgetter('sort_date', 'priority', 'context'))
note_maker = task_export.NoteMaker()
note_xml = note_maker.create_final_xml(na_list)
note_maker.set_tomboy_xml(note_xml)
def temporarily_disable_widget(self, widget):
if widget.get_property('sensitive'):
widget.set_sensitive(False)
# FIXME: horrible, cheap hack to get around the fact that I don't
# know how to make & use my own signals yet. *sigh*
else:
widget.set_sensitive(True)
def _open_na_url_or_note(self, treeview, event):
path = treeview.get_path_at_pos(int(event.x), int(event.y))
if not path:
msg = "got bad 'url clicked' event: {0}\nEvent x: {1}\nEvent y: {2}\n"
msg = msg.format(event, event.x, event.y)
utils.log_line(msg, debug=True)
else:
row_num = path[0][0]
col = path[1]
obj_list = treeview.get_parent()
if col.attribute == 'url_icon':
obj_list.select_paths([row_num])
url = obj_list.get_selected().url
if url is not None:
gtk.show_uri(gtk.gdk.Screen(), url, event.time)
if col.attribute == 'notes_icon':
obj_list.select_paths([row_num])
if obj_list.get_selected().notes is not None:
self.edit_extant_na(obj_list.get_selected())
def _run_daily_tasks(self, run_now=True):
if run_now:
self.data_lumbergh.activate_due_queued()
self._rec_manager.place_recurring_tasks()
self.data_lumbergh.save_data()
jesus = managers.BackupJesus()
jesus.kill_stale_backups()
# technically "midnight tonight" is really "00:00" tomorrow
tomorrow = datetime.date.today() + datetime.timedelta(days=1)
midnight = time.mktime(tomorrow.timetuple())
# fudge on the time a little bit; make sure we always run after midnight
seconds_to_midnight = int(midnight - time.time() + 30)
# make sure we don't run again after our same interval - instead,
# just call ourselves again and keep creating new intervals.
log_msg = ("Running _run_daily_tasks(), and I am under the "
"impression believe there are %s seconds to midnight.")
utils.log_line(log_msg % seconds_to_midnight, datetime.datetime.now())
gobject.timeout_add_seconds(seconds_to_midnight, self._run_daily_tasks)
return False
def _type_ahead_combo(self, combo, gdk_keyval):
ui.type_ahead_combo(combo, gdk_keyval)
#CALLBACKS
def activate_na_w_clicked_cb(self, widget, data=None):
nas = self.prj_details_incubating_na_list_w.get_selected_rows()
if len(nas) > 0:
prj = self.prj_list_w.get_selected()
self.data_lumbergh.activate_nas(nas, prj.key_name)
self.fill_prj_details_na_list_w(prj)
def activate_prj_w_clicked_cb(self, widget, data=None):
self.data_lumbergh.change_project_status(self.prj_list_w.get_selected(),
"active")
self.fill_prj_list_w()
def archive_completed_singletons_w_clicked_cb(self, widget, data=None):
self.data_lumbergh.archive_completed_singletons()
self.fill_prj_details_na_list_w(self.prj_list_w.get_selected())
def areas_of_focus_w_changed_cb(self, widget, data=None):
self.fill_prj_list_w()
def areas_of_focus_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def can_has_quit_box_grab_focus_cb(self, widget, data=None):
#FIXME someday..
# oh dear. this isn't a good thing. I forsee a lot of ghetto-tastic
# action like this in my future though, because I can't seem to set up
# an accel group in Glade that actually WORKS
self.quit()
def clarify_notes_copy_both_w_clicked_cb(self, widget, data=None):
text = self.clarify_notes_details_summary_w.get_text() + "\n\n" + \
self.clarify_notes_details_details_w.read()
self.set_clipboard_text(text)
def clarify_notes_copy_summary_w_clicked_cb(self, widget, data=None):
self.set_clipboard_text(self.clarify_notes_details_summary_w.get_text())
def clarify_create_new_na_clicked_cb(self, widget, data=None):
self.process_stuff_as_na(self.stuff_tree_w.get_selected(),
self.clarify_notes_details_summary_w.get_text(),
self.clarify_notes_details_details_w.read())
def clarify_create_prj_clicked_cb(self, widget, data=None):
selected_stuff = self.stuff_tree_w.get_selected()
details = None
if isinstance(selected_stuff, inbox_items.InboxNote):
summary = self.clarify_notes_details_summary_w.get_text()
maybe = self.clarify_notes_details_details_w.read()
if maybe != kiwi.ValueUnset:
details = maybe
else:
summary = selected_stuff.summary
self.process_stuff_as_prj(selected_stuff, summary, details)
def clarify_file_as_reference_w_clicked_cb(self, widget, data=None):
stuff = self.stuff_tree_w.get_selected()
self.file_stuff_as_reference(stuff)
def clarify_incubate_na_w_clicked_cb(self, widget, data=None):
self.process_stuff_as_na(self.stuff_tree_w.get_selected(),
self.clarify_notes_details_summary_w.get_text(),
self.clarify_notes_details_details_w.read(),
incubate_=True)
def clarify_incubation_prj_w_clicked_cb(self, widget, data=None):
selected_stuff = self.stuff_tree_w.get_selected()
details = None
if isinstance(selected_stuff, inbox_items.InboxNote):
summary = self.clarify_notes_details_summary_w.get_text()
maybe = self.clarify_notes_details_details_w.read()
if maybe != kiwi.ValueUnset:
details = maybe
else:
summary = selected_stuff.summary
self.process_stuff_as_prj(selected_stuff, summary, details,
status="incubating")
def clarify_queue_project_w_clicked_cb(self, widget, data=None):
selected_stuff = self.stuff_tree_w.get_selected()
details = None
if isinstance(selected_stuff, inbox_items.InboxNote):
summary = self.clarify_notes_details_summary_w.get_text()
maybe = self.clarify_notes_details_details_w.read()
if maybe != kiwi.ValueUnset:
details = maybe
else:
summary = selected_stuff.summary
self.process_stuff_as_prj(selected_stuff, summary, details,
status="queued")
def clarify_stuff_details_open_w_clicked_cb(self, widget, data=None):
gtk.show_uri(gtk.gdk.Screen(), self.stuff_tree_w.get_selected().uri,
int(time.time()))
def clarify_trash_stuff_w_clicked_cb(self, widget, data=None):
self._inbox_manager.complete_processing(self.stuff_tree_w.get_selected())
def clarify_waiting_for_prj_w_clicked_cb(self, widget, data=None):
selected_stuff = self.stuff_tree_w.get_selected()
details = None
if isinstance(selected_stuff, inbox_items.InboxNote):
summary = self.clarify_notes_details_summary_w.get_text()
maybe = self.clarify_notes_details_details_w.read()
if maybe != kiwi.ValueUnset:
details = maybe
else:
summary = selected_stuff.summary
self.process_stuff_as_prj(selected_stuff, summary, details,
status="waiting_for")
def edit_na_w_clicked_cb(self, widget, data=None):
na = self.prj_details_na_list_w.get_selected_rows()[0]
self.edit_extant_na(na)
def edit_project_notes_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.display_prj_notes(prj)
def engage_context_w_changed_cb(self, widget, data=None):
self.fill_engage_na_list()
def engage_context_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def engage_energy_level_w_changed_cb(self, widget, data=None):
self.fill_engage_na_list()
def engage_energy_level_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def engage_na_list_cell_edited_cb(self, widget, obj=None, attribute=None):
self.engage_na_list.refresh()
def engage_na_list_focus_in_event_cb(self, widget, data=None):
index = widget.index(widget.get_selected())
# this is a bit absurd, but it's required to get the right behavior... strange.
widget.select_paths([index])
gobject.idle_add(widget.grab_focus)
def engage_na_list_row_activated_cb(self, widget, obj=None):
self.edit_extant_na(obj)
# FIXME: ZOMG FIXME!! (fix WHAT?)
def engage_sync_w_clicked_cb(self, widget, data=None):
self.temporarily_disable_widget(widget)
self.sync_nas_and_notes()
self.temporarily_disable_widget(widget)
def engage_time_available_w_changed_cb(self, widget, data=None):
self.fill_engage_na_list()
def engage_time_available_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def enter_new_na_w_activate_cb(self, widget, data=None):
text = widget.get_text() # strange. if I don't do this, somehow 'widget'
# gets reassigned after calling start_new_na??
widget.set_text("")
self.start_new_na(text, self.prj_list_w.get_selected())
def enter_new_prj_w_activate_cb(self, widget, data=None):
t = widget.get_text()
widget.set_text("")
self.start_new_prj(t)
def fidy_window_accelgroup_accel_activate_cb(self, window, data1=None,
data2=None, data3=None):
#FIXME: just testing.
# print "\n\n\n\n\n"
# for d in [data1, data2, data3]: print d
# print window
# print "\n\n\n\n\n"
pass
def clarify_add_stuff_to_prj_w_clicked_cb(self, widget, data=None):
stuff = self.stuff_tree_w.get_selected()
reference_search_box = dialogs.ReferenceAttacherSearchDialog(
self.data_lumbergh, self, stuff)
reference_search_box.search("")
self._inbox_manager.complete_processing(stuff)
def clarify_add_to_read_review_w_clicked_cb(self, widget, data=None):
stuff = self.stuff_tree_w.get_selected()
self.data_lumbergh.file_stuff_as_read_review(stuff, defs.READ_REVIEW_PATH)
self._inbox_manager.complete_processing(stuff)
def incubate_na_w_clicked_cb(self, widget, data=None):
nas = self.prj_details_na_list_w.get_selected_rows()
if len(nas) > 0:
prj = self.prj_list_w.get_selected()
self.data_lumbergh.incubate_nas(nas, prj.key_name)
self.fill_prj_details_na_list_w(prj)
def incubate_prj_w_clicked_cb(self, widget, data=None):
self.incubate_project(self.prj_list_w.get_selected())
def incubate_stuff_clicked_cb(self, widget, data=None):
self.select_clarify_tab(self.b.get_object("incubation_frame"))
def main_window_destroy_cb(self, widget, data=None):
self.quit()
def mark_prj_complete_w_clicked_cb(self, widget, data=None):
self.complete_project(self.prj_list_w.get_selected())
def new_aof_w_clicked_cb(self, widget, data=None):
self.create_new_aof()
def open_project_support_folder_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
uri = self.data_lumbergh.get_project_folder_uri(prj)
gtk.show_uri(gtk.gdk.Screen(), uri, int(time.time()))
def prj_add_file_w_clicked_cb(self, widget, data=None):
self.add_file_to_prj(self.prj_list_w.get_selected())
#FIXME: all this shouldn't be in a callback - put into a proper method
def prj_delete_w_clicked_cb(self, widget, data=None):
selected_row_num = self.prj_list_w.get_selected_row_number()
if selected_row_num + 1 == len(self.prj_list_w):
selected_row_num = selected_row_num - 1
prj = self.prj_list_w.get_selected()
label = self.b.get_object("project_name_label")
template = self.b.get_object("stupid_template_label").get_text()
label.set_text(template % prj.summary)
d = self.b.get_object("delete_prj_dialog")
d.set_focus(self.b.get_object("cancel_delete_prj_w"))
if d.run() == gtk.RESPONSE_OK:
d.hide()
self.delete_prj(prj)
self.prj_list_w.select_paths([selected_row_num])
gobject.idle_add(self.prj_list_w.grab_focus)
else:
d.hide()
def prj_details_aofs_w_content_changed_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
if prj != None:
# don't try to set anything if the AOF combo was just changed by
# selecting a new prj, instead of the user clicking the AOF combo
aof = self.data_lumbergh.get_prj_aof_names(prj)[0]
if widget.get_selected_label() != aof:
self.data_lumbergh.set_prj_aofs(prj, widget.get_selected_label())
def prj_details_na_list_w_cell_edited_cb(self, widget, data=None, wtf=None):
self.prj_details_na_list_w.refresh()
def prj_details_na_list_w_key_press_event_cb(self, widget, data=None):
if gtk.gdk.keyval_name(data.keyval) == "Delete":
nas = self.prj_details_na_list_w.get_selected_rows()
if len(nas) == 1:
self.delete_na(nas[0])
def prj_details_na_list_w_row_activated_cb(self, widget, obj=None, data=None):
na = self.prj_details_na_list_w.get_selected_rows()[0]
self.edit_extant_na(na)
def prj_details_priority_w_content_changed_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
prj.priority = ui.translate_priority(widget.get_selected_label())
def prj_details_priority_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def prj_details_set_waiting_for_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.set_prj_waiting_for(prj, widget.get_text())
def prj_list_w_focus_in_event_cb(self, widget, data=None):
index = self.prj_list_w.index(self.prj_list_w.get_selected())
#this seems sort of absurd, but it's required to get the right behavior...
self.prj_list_w.select_paths([index])
gobject.idle_add(self.prj_list_w.grab_focus)
def prj_list_w_selection_changed_cb(self, widget, data=None):
if len(self.prj_list_w) > 0:
self.fill_prj_details(self.prj_list_w.get_selected())
def prj_remove_file_w_clicked_cb(self, widget, data=None):
self.remove_file_from_prj(self.prj_list_w.get_selected())
def prj_support_files_w_key_press_event_cb(self, widget, data=None):
if gtk.gdk.keyval_name(data.keyval) == "Delete":
self.remove_file_from_prj(self.prj_list_w.get_selected())
def prj_support_files_w_row_activated_cb(self, widget, data=None):
header = "file://"
selected = widget.get_selected_rows()
# don't do anything if multiple files are selected
if len(selected) == 1:
path = selected[0].full_path
gtk.show_uri(gtk.gdk.Screen(), header + path, int(time.time()))
def prj_waiting_for_w_clicked_cb(self, widget, data=None):
self.mark_project_as_waiting_for(self.prj_list_w.get_selected())
def queue_prj_w_clicked_cb(self, widget, data=None):
self.queue_project(self.prj_list_w.get_selected())
#FIXME: push to separate method
def review_filter_clicked_cb(self, widget, data=None):
self.fill_prj_list_w()
self.show_correct_project_action_buttons()
self.prj_list_w.select_paths([0])
gobject.idle_add(self.prj_list_w.grab_focus)
def remove_na_w_clicked_cb(self, widget, data=None):
nas = self.prj_details_na_list_w.get_selected_rows()
if len(nas) == 1:
self.delete_na(nas[0])
def set_prj_date_cb(self, widget, data=None):
self._set_valid_date_w(widget)
def show_clarify_tab_toggled_cb(self, widget, data=None):
self.workflow_nb.set_current_page(0)
#FIXME: push to separate method
def show_engage_tab_toggled_cb(self, widget, data=None):
self.engage_time_available_w.select_item_by_label("60+")
self.engage_energy_level_w.select_item_by_label("Any")
self.fill_engage_context_w(self.engage_context_w)
self.workflow_nb.set_current_page(2)
gobject.idle_add(self.engage_context_w.grab_focus)
#FIXME: push to separate method
def show_review_tab_toggled_cb(self, widget, data=None):
self.workflow_nb.set_current_page(1)
#focus the prj list
gobject.idle_add(self.prj_list_w.grab_focus)
def stuff_tree_w_key_press_event_cb(self, widget, data=None):
if gtk.gdk.keyval_name(data.keyval) == "Delete":
self._inbox_manager.complete_processing(
self.stuff_tree_w.get_selected())
def stuff_tree_w_selection_changed_cb(self, widget, data=None):
self.fill_stuff_details(widget.get_selected())
# FIXME: remove when not needed
def ohnoes_w_clicked_cb(self, widget, data=None):
self.fill_engage_na_list()
def achanged_cb(self, widget, data=None):
self.fill_engage_na_list()
def hbox10_grab_focus_cb(self, widget, data=None):
# FIXME: testing (kind of) - fix it for godssakes
# this is a hack to open URLs in the Engage tab, because I don't yet
# understand how AccelGroups work. ;-P
na = self.engage_na_list.get_selected()
if na.url:
gtk.show_uri(gtk.gdk.Screen(), na.url, int(time.time()))
def clarify_consolidate_inboxes_w_clicked_cb(self, widget, data=None):
self.consolidate_inboxes(widget)
def move_na_down_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.move_na_position(self.prj_details_na_list_w, prj, 'down')
def move_na_first_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.move_na_position(self.prj_details_na_list_w, prj, 'first')
def move_na_last_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.move_na_position(self.prj_details_na_list_w, prj, 'last')
def move_na_up_w_clicked_cb(self, widget, data=None):
prj = self.prj_list_w.get_selected()
self.move_na_position(self.prj_details_na_list_w, prj, 'up')
def search_w_activate_cb(self, widget, data=None):
self.search(widget.get_text())
widget.set_text("")
def stuff_tree_w_mnemonic_activate_cb(self, widget, data=None):
gobject.idle_add(widget.grab_focus)
def search_w_grab_focus_cb(self, widget, data=None):
pass
def na_reset_age_menuitem_activate_cb(self, widget, data=None):
selected = self.engage_na_list.get_selected()
selected.creation_date = datetime.datetime.now()
def engage_na_list_right_click_cb(self, widget, item=None, event=None):
menu = self.b.get_object('na_context_menu')
menu.popup(None, None, None, event.button, event.get_time())
def engage_na_list_click_cb(self, widget, event):
self._open_na_url_or_note(widget, event)
return False
def na_queue_to_cb(self, menu_item):
self.data_lumbergh.queue_singleton_na(self.engage_na_list.get_selected(),
menu_item.get_label())
self.fill_engage_na_list()
def engage_na_list_key_press_event_cb(self, widget, data=None):
if gtk.gdk.keyval_name(data.keyval) == "Delete":
na = widget.get_selected()
self.data_lumbergh.engage_na_deleter(na.uuid)
self.fill_engage_na_list()
def delete_na_menuitem_activate_cb(self, widget, data=None):
na = self.engage_na_list.get_selected()
# FIXME: Add in a confirmation dialog here?
self.data_lumbergh.engage_na_deleter(na.uuid)
self.fill_engage_na_list()
def add_stuff_w_activate_cb(self, widget, data=None):
from fluidity import slider
slider_app = slider.Slider()
slider_app.window.show()
slider_app.fill_prj_list_w()
del(slider_app)
def _engage_due_today_filter_w_toggled_cb(self, widget, data=None):
self.fill_engage_na_list()
def _run():
g = Fluidity()
g.window.show()
gtk.main()
def _run_profiled():
profile_path = os.path.join(os.path.expanduser('~'), 'profile.out')
import cProfile
cProfile.run("_run()", profile_path)
def run():
_run()
# _run_profiled()
if __name__ == "__main__":
print("""HEY YOU: Yes, you, the user -- DON'T RUN THIS DIRECTLY! Use the
launching script 'fluidity' in your system path (e.g.: in /usr/bin/), or if
you're running straight out of the folder from the .tar.gz file you grabbed,
then look for the script in the "bin" folder.
...or you could do the *really* wacky thing and click on the Fluidity item in
your GNOME/KDE menu. But that's not worth any nerd points, now is it?""")
run()
| Python |
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Total hack to export Fity tasks to a Tomboy note."""
from __future__ import absolute_import, division, print_function
__author__ = "Jens Knutson"
import time
from xml.sax import saxutils
from fluidity import dbus_misc
from fluidity import utils
class NoteMaker(object):
"""Create a Tomboy-format .note file to be sent to Conboy on a Maemo device
Yep, it's that specific. ;P
"""
# set up some templates. Hooray for hardcoded naughtiness!
NOTE_TITLE = "@Next Actions from Fluidity__"
# Note title, list
NOTE_CONTENT_T = '<note-content version="0.1">{0}\n\n{1}\n\n\n</note-content>'
SECTION_T = "\n\n<bold><size:large>{0}</size:large></bold>\n<list>{1}</list>"
NA_STRING_T = '{0} --- {1} mins . {1} . {2}\n'
BULLET_T = '<list-item dir="ltr">{0}</list-item>'
BULLET_LIST_T = '<list>{0}</list>'
def create_final_xml(self, na_list):
sections_xml = ""
context_sections = self._create_context_sections(na_list)
for section in sorted(context_sections.keys()):
section_title = "@" + section.capitalize()
# wtf? where are the newlines coming from?
section_title = section_title.replace('\n', '')
sections_xml += self.SECTION_T.format(section_title,
context_sections[section])
final_xml = self.NOTE_CONTENT_T.format(self.NOTE_TITLE, sections_xml)
return final_xml
def set_tomboy_xml(self, final_xml):
uri = dbus_misc.notes_proxy.FindNote(self.NOTE_TITLE)
dbus_misc.notes_proxy.SetNoteContentsXml(uri, final_xml)
# this is retarded, but apparently necessary... otherwise tomboy doesn't
# write the new XML to disk. *@!(*&$)##$(*&
time.sleep(4)
dbus_misc.notes_proxy.DisplayNote(uri)
time.sleep(4)
dbus_misc.notes_proxy.HideNote(uri)
def _create_context_sections(self, na_list):
sections = {}
for na in na_list:
task_str = self._create_task_string(na)
task_str = self.BULLET_T.format(task_str)
# just use the context name as the dict_key
section_key = utils.format_for_dict_key(na.context)
if section_key not in sections:
sections[section_key] = ""
sections[section_key] += task_str
return sections
def _create_task_string(self, na):
clean_summary = saxutils.escape(na.summary)
formatted_summary = self._set_priority(na, clean_summary)
# save some space...
if na.energy_est_word == "Normal":
energy_est = "e= "
elif na.energy_est_word == "High":
energy_est = "e! "
elif na.energy_est_word == "Low":
energy_est = "e- "
# we want it to look like this:
# summary - time-est - energy-est - due date
# where summary has been formatted according to priority
task_str = self.NA_STRING_T.format(formatted_summary,
str(int(na.time_est)),
energy_est,
str(na.due_date))
task_str += self._set_notes_and_url(na, task_str)
return task_str
def _set_notes_and_url(self, na, task_str):
if na.url or na.notes:
sub_list = (self.BULLET_T.format("URL: " + saxutils.escape(na.url))
if na.url else "")
if na.notes:
# more cargo-culting - I have no fucking idea why I have to do
# this, except that if I don't, I can't get newlines in my
# notes field. Bah!
notes = saxutils.escape(na.notes)
# if notes.startswith("* "):
# notes = notes[2:]
# notes = notes.replace("\n* ", "\no ")
# NOTE: Unicode is FUN! ....and necessary. Damned foreigners!
# FURTHER NOTE: relax, spazzy-pants, I'm not actually hateful,
# just very lazy.
notes = notes.replace("\n", "
")
sub_list += self.BULLET_T.format(notes)
return self.BULLET_LIST_T.format(sub_list) + '\n'
else:
return ""
def _set_priority(self, na, task_str):
if na.priority == 1:
task_str = "<bold>" + task_str + "</bold>"
elif na.priority == 3:
task_str = "<italic>" + task_str + "</italic>"
return task_str
#class TomboyTest(object):
# def __init__(self):
# self.tb_remote = self._bus.get_object('org.gnome.Tomboy',
# '/org/gnome/Tomboy/RemoteControl')
# self.dm = managers.DataManager()
#
# # set up some templates. Hooray for hardcoded naughtiness!
# self.NOTE_TITLE = "@Next Actions from Fluidity__"
# # Note title, list
# self.NOTE_CONTENT_T = ('<note-content version="0.1">{0}\n\n{1}\n\n\n'
# '</note-content>')
# self.SECTION_T = ("\n\n<bold><size:large>{0}</size:large></bold>\n"
# "<list>{1}</list>")
# self.NA_STRING_T = '{0} --- {1} mins . {2} . {3}\n'
# self.BULLET_T = '<list-item dir="ltr">{0}</list-item>'
# self.BULLET_LIST_T = '<list>{0}</list>'
#
# #FIXME: REMOVE THIS LATER?
# self.MAIN_INBOX = "/home/jensck/Inbox"
# self.TOMBOY_NOTE_FOLDER = "/home/jensck/.local/share/tomboy"
# self.FITY_SLIDER_INBOX = "/home/jensck/Inbox/.fity_note-slider"
# self.SIDEARM_INBOX = "/media/FAMILIAR/sidearm-inbox"
# self.CONBOY_NOTE_FOLDER = "/media/FAMILIAR/.conboy"
# self.STARTHERE_BACKUP_FOLDER = ("/home/jensck/.local/share/boxidate/"
# "start_here_backups")
# # Tomboy "magic" - these really aren't the Right Way, but ElementTree
# # was pissing me off, and seemed like overkill for something this small
# #Also note: THE NEXT THREE LINES GET WRITTEN INTO START HERE,
# #so don't screw with it unless you know what you're doing!
# self.TB_CONTENT_START = "<note-content version=\"0.1\">"
# self.TB_CONTENT_END = "</note-content>"
# self.SH_CONTENT_START = self.TB_CONTENT_START + "Start Here"
# self.NOTE_SUFFIX = ".note"
# #what to put inbetween the chunks of content grabbed from each NOTD
# self.PADDING = "\n\n\n"
#
# def _back_up_note(self, uri, backup_path, use_unix_timestamp=False):
# filename = uri.split('/')[-1] + self.NOTE_SUFFIX
# full_path = self.TOMBOY_NOTE_FOLDER + os.sep + filename
# if use_unix_timestamp:
# timestamp = str(time.time())
# else:
# timestamp = str(datetime.date.today())
# backup_file_path = backup_path + os.sep + timestamp + "_" + filename
# shutil.copy2(full_path, backup_file_path)
#
# def main(self):
# note_maker = NoteMaker(self.tb_remote)
# print "getting note URI"
# note_uri = self.tb_remote.FindNote(self.NOTE_TITLE)
# print "URI for " + self.NOTE_TITLE + " is: " + note_uri
# print "Getting na_list"
# na_list = self.dm.get_na_for_each_active_prj()
# na_list.sort(key=operator.attrgetter('time_est', 'energy_est'),
# reverse=True)
# na_list.sort(key=operator.attrgetter('sort_date', 'priority', 'context'))
#
# print "backing up file"
# self._back_up_note(note_uri, '/home/jensck/tomboy-test', True)
# new_note_xml = note_maker.create_final_xml(na_list)
# print "setting new XML"
# note_maker.set_tomboy_xml(new_note_xml)
# print "backing up again, after."
# self._back_up_note(note_uri, '/home/jensck/tomboy-test', True)
#if __name__ == '__main__':
# tt = TomboyTest()
# tt.main()
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Misc. D-Bus-related functions & an app-wide note proxy"""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import dbus
import dbus.mainloop.glib
from fluidity import defs
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
_session_bus = dbus.SessionBus()
# D-Bus setup - making sure we don't lose our damned Tomboy/Gnote connection ;P
notes_proxy = None
# see dbus.SessionBus.watch_name_owner for why this is needed.
def set_notes_proxy(bus_name):
global notes_proxy
if bus_name != "":
notes_proxy = _session_bus.get_object(bus_name, defs.NOTES_OBJECT_PATH)
_session_bus.watch_name_owner(defs.NOTES_BUS_NAME, set_notes_proxy)
set_notes_proxy(defs.NOTES_BUS_NAME)
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import datetime
import os
import shutil
import yaml
from fluidity import defs
class FirstTimeBot(object):
def __init__(self):
pass
def create_initial_files_and_paths(self):
# check for initial data file - if missing, copy in the default one
if not os.path.exists(defs.USER_DATA_MAIN_FILE):
shutil.copy(os.path.join(defs.APP_DATA_PATH,
defs.USER_DATA_MAIN_FNAME),
defs.USER_DATA_MAIN_FILE)
# if no recurrence yaml file exists, create it
if not os.path.exists(defs.RECURRENCE_DATA):
self._create_initial_recurrence_file(defs.RECURRENCE_DATA)
# and make everything else, too, if necessary.
for folder in defs.ALL_DATA_FOLDERS:
if not os.path.exists(folder):
os.makedirs(folder)
# now copy in a warning to the Projects folder
prj_warning_msg_path = \
os.path.join(defs.MAIN_PRJ_SUPPORT_FOLDER,
defs.PROJECT_FOLDER_DELETION_WARNING_FILE_NAME)
if not os.path.exists(prj_warning_msg_path):
shutil.copy(defs.PROJECT_FOLDER_DELETION_WARNING_PATH,
prj_warning_msg_path)
# FIXME: once it's needed, copy in a note to the new Inbox folder
def _create_initial_recurrence_file(self, full_path):
data = {'daily': [], 'weekly': [], 'monthly': [],
'last_run': datetime.date.today()}
print("Creating initial recurrence file...")
with open(full_path, 'w') as yfile:
yaml.dump(data, yfile, Dumper=yaml.CDumper, default_flow_style=False)
| Python |
#-*- coding:utf-8 -*-
# 'relevance' code:
#
# Copyright (C) 2009 Ulrik Sverdrup <ulrik.sverdrup@gmail.com>
# 2008 Christian Hergert <chris@dronelabs.com>
# 2007 Chris Halse Rogers, DR Colkitt
# David Siegel, James Walker
# Jason Smith, Miguel de Icaza
# Rick Harding, Thomsen Anders
# Volker Braun, Jonathon Anderson
#
# All the rest:
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""This module provides Fluidity "magic", including smart(ish) text parsing for rapid
input fields, as well as matching/formatting of related strings
based on relevance. The code originates from Gnome-Do (in C#).
* Python port by Christian Hergert
* Module updated by Ulrik Sverdrup to clean up and dramatically speed up
the code, by using more pythonic constructs as well as doing less work.
* Lots of spazzy PEP8 fixes by Jens Knutson
Compatibility: Python 2.4 and later, including Python 3
"""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import datetime
import string #pylint: disable-msg=W0402
import time
from parsedatetime import parsedatetime as pdt
# Py3k compat
try:
range = xrange #pylint: disable-msg=W0622
except NameError:
pass
def format_common_substrings(s, query, format_clean=None, format_match=None):
"""Creates a new string highlighting matching substrings.
Returns: a formatted string
>>> format_common_substrings('hi there dude', 'hidude',
... format_match=lambda m: "<b>%s</b>" % m)
'<b>hi</b> there <b>dude</b>'
>>> format_common_substrings('parallelism', 'lsm', format_match=str.upper)
'paralleLiSM'
"""
format_clean = format_clean or (lambda x: x)
format_match = format_match or (lambda x: x)
format_me = lambda x: x and format_clean(x)
if not query:
return format_me(s)
ls = s.lower()
# find overall range of match
first, last = _find_best_match(ls, query)
if first == -1:
return format_me(s)
# find longest perfect match, put in slc
for slc in range(len(query), 0, -1):
if query[:slc] == ls[first:first + slc]:
break
nextkey = query[slc:]
head = s[:first]
match = s[first: first + slc]
matchtail = s[first + slc: last]
tail = s[last:]
# we use s[0:0], which is "" or u""
result = s[0:0].join((format_me(head), format_match(match),
format_common_substrings(matchtail, nextkey,
format_clean, format_match),
format_me(tail)))
result = result.replace("&", "&").replace("&amp;", "&")
return result
def score(s, query):
"""A relevance score for the string ranging from 0 to 1
@s: a string to be scored
@query: a string query to score against
`s' is treated case-insensitively while `query' is interpreted literally,
including case and whitespace.
Returns: a float between 0 and 1
>>> print(score('terminal', 'trml'))
0.735098684211
>>> print(score('terminal', 'term'))
0.992302631579
>>> print(score('terminal', 'try'))
0.0
>>> print(score('terminal', ''))
1.0
"""
if not query:
return 1.0
lower = s.lower()
# Find the shortest possible substring that matches the query
# and get the ration of their lengths for a base score
first, last = _find_best_match(lower, query)
if first == -1:
return .0
skore = len(query) / (last - first) # don't re-use the name 'score'...
# Now we weight by string length so shorter strings are better
skore *= .7 + len(query) / len(s) * .3
# Bonus points if the characters start words
good = 0
bad = 1
first_count = 0
for i in range(first, last - 1):
if lower[i] in " -":
if lower[i + 1] in query:
first_count += 1
else:
bad += 1
# A first character match counts extra
if query[0] == lower[0]:
first_count += 2
# The longer the acronym, the better it scores
good += first_count * first_count * 4
# Better yet if the match itself started there
if first == 0:
good += 2
# Super duper bonus if it is a perfect match
if query == lower:
good += last * 2 + 4
skore = (skore + 3 * good / (good + bad)) / 4
# This fix makes sure that perfect matches always rank higher
# than split matches. Perfect matches get the .9 - 1.0 range
# everything else lower
if last - first == len(query):
skore = .9 + .1 * skore
else:
skore = .9 * skore
return skore
def _find_best_match(s, query):
"""Find the shortest substring of @s containing all the characters
of the query, in order.
@s: a string to be searched
@query: a string query to search for in @s
Returns: a two-item tuple containing the start and end indicies of
the match. No match returns (-1,-1).
>>> _find_best_match('terminal', 'trml')
(0, 8)
>>> _find_best_match('total told', 'tl')
(2, 5)
>>> _find_best_match('terminal', 'yl')
(-1, -1)
"""
best_match = -1, -1
# Find the last instance of the last character of the query
# since we never need to search beyond that
last_char = s.rfind(query[-1])
# No instance of the character?
if last_char == -1:
return best_match
# Loop through each instance of the first character in query
index = s.find(query[0])
query_length = len(query)
last_index = last_char - len(query) + 1
while 0 <= index <= last_index:
# See if we can fit the whole query in the tail
# We know the first char matches, so we don't check it.
cur = index + 1
qcur = 1
while qcur < query_length:
# find where in the string the next query character is
# if not found, we are done
cur = s.find(query[qcur], cur, last_char + 1)
if cur == -1:
return best_match
cur += 1
qcur += 1
# Take match if it is shorter. If it's a perfect match, we are done.
if best_match[0] == -1 or (cur - index) < (best_match[1] -
best_match[0]):
best_match = (index, cur)
if cur - index == query_length:
break
index = s.find(query[0], index + 1)
return best_match
class MagicMachine(object):
# because it makes the magics!
# (later addition) ...and it makes it from the blood of my labors! YAY
def __init__(self, datamgr=None):
self.pdtCal = pdt.Calendar()
if datamgr:
self.data_lumbergh = datamgr
def get_magic_date(self, muggle_text):
if muggle_text != "":
parse_results = self.pdtCal.parse(muggle_text)
#pdtCal.parse returns a tuple: the second item is an int, 1 or 0,
#indicating if it could make sense of the input it was fed
if parse_results[1]:
ts = time.mktime(parse_results[0])
magic_date = datetime.date.fromtimestamp(ts)
return magic_date
return None
def get_magic_context(self, text):
if text == "":
return text
t_cmp = self._prepare_for_context_comparison(text)
contexts = self.data_lumbergh.get_contexts()
for c in contexts:
c_cmp = self._prepare_for_context_comparison(c)
if c_cmp.startswith(t_cmp):
return c
# nothing matched...
text = '@' + t_cmp.capitalize()
return text
def get_magic_task(self, muggle_text):
if not muggle_text:
return None
else:
magic_task = {}
token_marker = None
muggle_text = self._strip_dupe_spaces(muggle_text)
# NOTE: the order below is important! don't change it unless you
# understand what you're doing!
method_list = (self._set_energy_est, self._set_time_est,
self._set_high_priority, self._set_context,
self._set_due_date, self._set_low_priority,
self._set_url)
for method in method_list:
muggle_text = self._strip_dupe_spaces(muggle_text)
muggle_text, token_marker = method(muggle_text, magic_task,
token_marker)
# FIXME: this is probably stupid somehow - should probably make this
# part of setting the context in the first place...
if 'context' in magic_task:
magic_task['context'] = self.get_magic_context(magic_task['context'])
# set summary with what's left
magic_task['summary'] = muggle_text
return magic_task
def _prepare_for_context_comparison(self, text):
text = text.replace('@', '')
return text.lower()
def _strip_dupe_spaces(self, dirty_string):
d_list = dirty_string.split()
while True:
try:
d_list.remove('')
except ValueError:
break
clean_string = " ".join(d_list)
return clean_string
def _set_due_date(self, text, task, marker):
#print "*** entering _set_due_date ", text
dmark = " due "
if marker != None:
m = text.rfind(dmark, marker - 1)
else:
m = text.rfind(dmark)
if m > 0:
date_text = text[m + len(dmark):]
magic_date = self.get_magic_date(date_text)
if magic_date:
text = text[:m]
task['due_date'] = magic_date
return text, marker
def _set_energy_est(self, text, task, marker):
"""' e[!-]' - mark the position of this token if it's lower than the
previous, then process, then remove it"""
temp_text = text.lower()
if " e!" in temp_text:
new_marker = temp_text.rfind(' e!')
task['energy_est'] = 2
text = text[:new_marker] + text[new_marker + 3:]
# first test is so we don't catch "e-mail", etc
elif text.endswith(" e-") or " e- " in temp_text:
new_marker = temp_text.rfind(' e-')
task['energy_est'] = 0
text = text[:new_marker] + text[new_marker + 3:]
if marker:
if - 1 < new_marker < marker:
marker = new_marker
return text, marker
def _set_time_est(self, text, task, marker):
"""' [0-1]m' - mark the position of this token into "global" var, then
process it, then remove it"""
#print "*** entering _set_time_est ", text
time_token = None
#apparently single quotes fuck things up??
tlist = text.split()
for w in tlist:
if w.endswith("m"):
for c in w[:-1]:
if c not in string.digits:
break
time_token = w
if time_token:
task['time_est'] = float(time_token[:-1])
new_marker = text.rfind(time_token)
tlen = len(time_token)
text = text[:new_marker] + text[new_marker + tlen:]
if marker:
if - 1 < new_marker < marker:
marker = new_marker
return text, marker
def _set_high_priority(self, text, task, marker):
"""This is separate from low priority because low priority is much more
difficult to parse. If we can just get this one out of the way, it
makes things much easier."""
#print "*** entering _set_high_priority ", text
if " !" in text:
new_marker = text.rfind(' !')
task['priority'] = 1
text = text[:new_marker] + text[new_marker + 2:]
if marker:
if - 1 < new_marker < marker:
marker = new_marker
return text, marker
def _set_low_priority(self, text, task, marker):
"""This is separate from high priority because... see comments re:
setting high priority."""
#print "*** entering _set_low_priority ", text
# FIXME: make it actually work for " - "
if 'priority' not in task:
if text.endswith(" -") or text.endswith(" - "):
task['priority'] = 3
text = text[:-2]
return text, marker
def _set_url(self, text, task, marker):
#print "*** entering _set_url ", text
tlist = text.split()
for i in tlist:
if "://" in i:
task['url'] = i
return text, marker
# sweet twitching fuck this needs cleaning up... ew.
def _set_context(self, text, task, marker):
#print "*** entering _set_context ", text
#context - the last instance of a substring that starts with ' @[a-z]'
#first, find our best candidate - if we don't find one, just bail out
candidate = None
urls = []
for w in text.split():
if w.startswith('@'):
candidate = w
if "://" in w:
urls.append(w)
# is the candidate also the last item in the list? if so, we have to
# assume that this is the correct item
# FIXME: bah. don't do this.
# UPDATE: disabling the try/except so I can find out what exceptions
# actually get thrown, ffs
# try:
if candidate == text.split()[-1]:
task['context'] = text.split()[-1]
marker = text.rfind(candidate)
text = text[:marker]
return text, marker
# except:
# print "your awesome maintainer is not, in fact, awesome."
# and if we have nothing, or if we end in a punctuation character:
if not candidate or candidate[-1] in ['!', '?', '.']:
return text, marker
# context not yet found..
candidate_index = text.rfind(candidate)
# if we came after another marker at this point, we know we're safe
if marker:
if candidate_index >= marker:
marker = candidate_index
right = text.find(' ', marker)
context = text[marker:right]
task['context'] = context
text = text.replace(context, marker, 1)
return text, marker
# context still not found yet, moving on...
# strip URLs, we don't want to consider them for this
temp_text = text
for u in urls:
temp_text = temp_text.replace(u, '')
# don't accidentally pick up low priority for the later "between due
# and us" test
temp_text.replace(" - ", "")
due_index = text.rfind(" due ")
if due_index > candidate_index:
if candidate_index + len(candidate) != due_index:
# at this point we should never have anything between us and
# the due_index
return text, marker
right = candidate_index + len(candidate)
remaining_after_due = text[candidate_index:right]
if remaining_after_due == "":
# candidate can't be a context
return text, marker
# FINALLY. christ... we have to assume we're a context here
tlist = text.split()
tlist.reverse()
tlist.remove(candidate)
tlist.reverse()
text = " ".join(tlist)
task['context'] = candidate
marker = candidate_index
return text, marker
| Python |
# pylint: disable-msg=C0103
from collections import namedtuple
_MAJOR = 0
_MINOR = 3
_MICRO = 0
_vers_ntuple = namedtuple('version_info', 'description major minor micro')
_vers_nums = _MAJOR, _MINOR, _MICRO
__version_info__ = _vers_ntuple(".".join([str(i) for i in _vers_nums]), *_vers_nums)
__version__ = __version_info__.description
# clean up the namespace now that we're done...
del namedtuple, _MAJOR, _MINOR, _MICRO, _vers_ntuple, _vers_nums, i
# thank you, Fluendo!
def fuck_you_too_pygobject():
"""GObject introspection: you can use any dynamic language you want, as long
as it's Javascript.
"""
if gobject.pygobject_version > (2, 26, 0):
# Kiwi is not compatible yet with the changes introduced in
# http://git.gnome.org/browse/pygobject/commit/?id=84d614
# Basically, what we do is to revert the changes in _type_register of
# GObjectMeta at least until kiwi works properly with new pygobject
from gobject._gobject import type_register #@UnresolvedImport
def _type_register(cls, namespace):
## don't register the class if already registered
if '__gtype__' in namespace:
return
if not ('__gproperties__' in namespace or
'__gsignals__' in namespace or
'__gtype_name__' in namespace):
return
# Do not register a new GType for the overrides, as this would sort
# of defeat the purpose of overrides...
if cls.__module__.startswith('gi.overrides.'):
return
type_register(cls, namespace.get('__gtype_name__'))
gobject.GObjectMeta._type_register = _type_register
return True
try:
import gobject
fuck_you_too_pygobject()
except ImportError:
pass
| Python |
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Miscellaneous classes for Fluidity's needs
...and OMG, look: it's code I'm not totally ashamed of, huzzah!
"""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
from xml.sax import saxutils
from fluidity import dbus_misc
from fluidity import defs
class Error(Exception):
def __init__(self, value):
super(Error, self).__init__()
self.value = value
self.message = "Undefined error: "
def __str__(self):
return self.message + self.value
class NoteAlreadyExistsError(Error):
def __init__(self, note_title):
super(NoteAlreadyExistsError, self).__init__(note_title)
self.message = "Note with this title already exists: "
class NoUriSetError(Error):
def __init__(self, value=""):
super(NoUriSetError, self).__init__(value)
self.message = "No URI set; a uri must be specified."
class NoteNotFoundError(Error):
def __init__(self, value):
super(NoteNotFoundError, self).__init__(value)
self.message = "Could not find note with uri: "
class Note(object):
"""Tomboy Note object representation."""
# This is REALLY not the right way to do this, but I didn't understand how
# to do it with ElementTree until /after/ I created this hack. I'll fix
# it later.
BULLET_LIST_SINGLE_ITEM = '<list><list-item dir="ltr">{0}\n</list-item></list>'
BULLET_LIST_WITH_SUB_ITEM = ('<list><list-item dir="ltr">{0}\n<list>'
'<list-item dir="ltr">{1}</list-item></list>'
'</list-item></list>')
# NOTE: THE NEXT THREE LINES GET WRITTEN INTO YOUR NOTE,
# so don't screw with it unless you know what you're doing!
CONTENT_START_MARKER_NO_NS = """<note-content version="0.1">"""
TB_NAMESPACE = 'http://beatniksoftware.com/tomboy'
CONTENT_START_MARKER_WITH_NS = \
'<note-content xmlns="' + TB_NAMESPACE + '" version="0.1">'
CONTENT_END_MARKER = "</note-content>"
NOTE_SUFFIX = ".note"
def __init__(self, title=None, uri=None):
"""Initializes the Note object.
One, and only one, in (uri, title) must be specified.
Args:
uri: complete URI of the note
title: title of the note
"""
super(Note, self).__init__()
specify_msg = "'title' or 'uri' must be specified."
assert (title or uri) and not (title and uri), specify_msg
if uri:
assert "://" in uri, "URI format is wrong -- received: " + uri
remote = self._get_note_remote()
if uri:
title = remote.GetNoteTitle(uri)
elif title:
uri = remote.FindNote(title)
self.uri = uri
self.title = title
def delete(self):
"""Delete note from Tomboy/Gnote... PERMANENTLY. There is undo!"""
remote = self._get_note_remote()
remote.DeleteNote(self.uri)
def insert_xml_content(self, new_content, start_marker=None):
"""Insert new_content into note's XML content.
Args:
new_content: string with the Tomboy-formatted XML to be inserted
start_marker: string containing Tomboy-formatted XML to use as a
marker for the insertion point for new_content
If start_marker is not specified, new_content will be inserted at the
beginning of the note, immediately following the newline after the
note's title.
Returns: (nothing - method acts directly on the note's content)
"""
remote = self._get_note_remote()
note_xml = remote.GetNoteContentsXml(self.uri)
if self.CONTENT_START_MARKER_WITH_NS in note_xml:
content_start_marker = self.CONTENT_START_MARKER_WITH_NS
elif self.CONTENT_START_MARKER_NO_NS in note_xml:
content_start_marker = self.CONTENT_START_MARKER_NO_NS
else:
fail_msg = ("TOMBOY FAIL: they changed the note contents tag "
"format... AGAIN.")
raise Exception(fail_msg)
if not start_marker:
start_marker = "".join(content_start_marker,
saxutils.escape(self.title), "\n")
start_index = note_xml.find(start_marker)
if start_index != -1:
insertion_point = start_index + len(start_marker)
new_xml = (note_xml[:insertion_point] +
new_content +
note_xml[insertion_point:])
remote.SetNoteContentsXml(self.uri, new_xml)
else:
raise Exception("FAIL. Somehow we had another problem with "
"the <note content> tag. Again.")
def show(self):
"""Display this Note."""
if self.uri:
if not self._get_note_remote().DisplayNote(self.uri):
raise NoteNotFoundError(self.uri)
else:
raise NoUriSetError()
def create_note(self):
"""Create a new Tomboy note with self.title as the title"""
remote = self._get_note_remote()
if remote.FindNote(self.title):
raise NoteAlreadyExistsError(self.title)
self.uri = remote.CreateNamedNote(self.title)
return self.uri
# def set_note_dimensions(self, width, height):
# """Set note XML width and height values to `width` and `height`."""
# remote = self._get_note_remote()
# dimensions = {'width': str(width), 'height': str(height)}
# # all this fucking around with unicode and the document header is to get
# # lxml to STFU. Yeah, this is probably The Wrong Way(TM), once again,
# # but that's how I roll, y0: ghetto-tastic shitball code. If you want
# # to understand why each of these changes was made, remove this
# # jiggery pokery and you'll see what I mean. ;-P
# note_xml = remote.GetNoteCompleteXml(self.uri).encode('utf-8').split('\n')
# if note_xml[0].startswith('<?xml'):
# doc_header = note_xml[0:2] + '\n' #.replace("utf-16", "utf-8") + '\n'
# note_xml = "\n".join(note_xml[1:])
## # fix up the xml so lxml.etree doesn't bitch about the content being
## # unicode with an encoding declaration, or that the declaration is wrong
## note_xml = str(note_xml.replace('<?xml version="1.0" encoding="utf-16"?>',
## '<?xml version="1.0" encoding="utf-8"?>'))
# with open('/home/jensck/pre_processing.xml', 'w') as prefile:
# prefile.write(note_xml)
# ntree = etree.fromstring(note_xml)
# for dim in dimensions:
# dimension_element = ntree.findall('.//{%s}%s' % (self.TB_NAMESPACE,
# dim))[0]
# dimension_element.text = dimensions[dim]
# removeme = doc_header + etree.tostring(ntree)
# remote.SetNoteCompleteXml(self.uri, removeme)
# with open('/home/jensck/post_processing.xml', 'w') as postfile:
# postfile.write(removeme)
# removeme = remote.GetNoteCompleteXml(self.uri)
# with open('/home/jensck/post_change.xml', 'w') as changefile:
# changefile.write(removeme)
# remote = self._get_note_remote()
# old_width_tags = "<width>450</width>"
# new_width_tags = "<width>{0}</width>".format(width)
# fuck_you_all = remote.GetNoteCompleteXml(self.uri)
# removeme = fuck_you_all.replace(old_width_tags, new_width_tags)
# removeme = unicode(removeme.replace("utf-16", "utf-8"))
# removeme.replace('<note-content version="0.1">Ya mamma smokes crack!',
# '<note-content version="0.1">Ya mamma smokes crack!\n'
# 'She got a burnin yearnin and theres no goin back!')
# remote.SetNoteCompleteXml(self.uri, removeme)
# with open('/home/jensck/post_change.xml', 'w') as changefile:
# changefile.write(removeme)
def _build_bullets(self, stuff):
"""Returns a Tomboy XML-formatted bullet list from a str."""
stuff = saxutils.escape(stuff)
return self.BULLET_LIST_SINGLE_ITEM.format(stuff.replace("\n", "
"))
def _get_note_remote(self):
"""Return a dbus "RemoteControl" proxy for talking to Tomboy/GNote.
This is a method instead of just an attribute to avoid any possible
pickling issues.
"""
return dbus_misc.notes_proxy
class ProjectNote(Note):
NOTES_FROM_INBOX_HEADER = ("<bold><size:huge>Raw/Unprocessed notes"
"</size:huge></bold>\n")
PROJECT_NOTE_TITLE_TEMPLATE = "Prj: "
def __init__(self, uri=None, title=None, prj=None, notes_for_new_prj=None):
"""Initializes the ProjectNote object.
One of (uri, title, prj) must be specified.
Args:
uri: complete URI of the requested note.
title: title of the note to open - NOTE: title will automatically
have ProjectNote.PROJECT_NOTE_TITLE_TEMPLATE pre-pended to it.
prj: a Project.
notes_for_new_prj: Optional; used as the note contents for a new
Project, to be initially inserted into the new note. Can be
either as Tomboy XML or plain text.
"""
# The prefix added to the note title is to prevent note name collisions
# with pre-existing notes.
if prj:
title = self.PROJECT_NOTE_TITLE_TEMPLATE + prj.summary
self.prj = prj
elif not title.startswith(self.PROJECT_NOTE_TITLE_TEMPLATE):
title = self.PROJECT_NOTE_TITLE_TEMPLATE + title
# FIXME: I've no idea where this problem is being introduced, but I
# have approximately zero desire to figure it out right now. bah!
title = title.replace(" ", " ")
try:
super(ProjectNote, self).__init__(title, uri)
except AssertionError:
msg = "One of: 'title', 'uri', or 'prj' must be specified."
assert prj is not None, msg
remote = self._get_note_remote()
self._new_prj_note_template_uri = \
remote.FindNote(defs.NEW_PROJECT_NOTE_TEMPLATE_NOTE_TITLE)
if notes_for_new_prj:
assert prj is not None, ("You must specify a Project object in "
"ProjectNote.__init__ to use "
"notes_for_new_prj")
self.title = title
self.create_note()
remote.AddTagToNote(self.uri,
self._get_prj_status_tags(prj.status)[0])
# newline padding added just to make it look a bit nicer.
self.add_stuff(notes_for_new_prj)
def add_stuff(self, stuff):
"""Add content from an InboxStuff object to the note, as bullets.
Creates a new note with self.title if a note w/that title can't be found.
Args:
stuff: any inbox_stuff.InboxStuff object (or anything with
string 'summary' and 'details' attributes).
"""
remote = self._get_note_remote()
if not remote.FindNote(self.title):
self.create_note()
stuff_bullets = self._build_bullets(stuff)
note_xml = remote.GetNoteContentsXml(self.uri)
if note_xml.find(self.NOTES_FROM_INBOX_HEADER) == -1:
# we didn't find the header, so add it to the content before
# handing it down to be inserted.
# make it the starting marker as an arg
stuff_bullets = ("\n" + self.NOTES_FROM_INBOX_HEADER +
stuff_bullets + "\n")
self.insert_xml_content(stuff_bullets)
else:
# we already have the header, so don't insert it, but do pass it
# as the start marker.
self.insert_xml_content(stuff_bullets, self.NOTES_FROM_INBOX_HEADER)
def change_prj_status(self, new_status):
"""Move Note to the correct notebook based on new_status."""
remote = self._get_note_remote()
old_tag, new_tag = self._get_prj_status_tags(self.prj.status, new_status)
remote.RemoveTagFromNote(self.uri, old_tag)
remote.AddTagToNote(self.uri, new_tag)
def create_note(self, use_template=True):
self.uri = super(ProjectNote, self).create_note()
if use_template:
self._replace_note_contents_with_prj_template()
return self.uri
def show(self):
"""Display the note; if not found, create new note using self.title."""
# Explicitly NOT making this recursive -- if something fails here, I
# don't want storms of exceptions or floods of dbus traffic.
try:
super(ProjectNote, self).show()
except (NoteNotFoundError, NoUriSetError):
self.uri = self.create_note()
super(ProjectNote, self).show()
def _build_bullets(self, stuff):
"""Returns a Tomboy XML-formatted bullet list from a Stuff note or str."""
if not hasattr(stuff, 'summary'):
bullets_xml = super(ProjectNote, self)._build_bullets(stuff)
else:
stuff.summary = saxutils.escape(stuff.summary)
stuff.details = saxutils.escape(stuff.details)
if stuff.details:
stuff.summary = stuff.summary.replace("\n", "
")
stuff.details = stuff.details.replace("\n", "
")
bullets_xml = self.BULLET_LIST_WITH_SUB_ITEM.format(stuff.summary,
stuff.details)
else:
stuff.summary = stuff.summary.replace("\n", "
")
bullets_xml = self.BULLET_LIST_SINGLE_ITEM.format(stuff.summary)
return bullets_xml
def _get_prj_status_tags(self, *statuses):
"""Return notebook tags for project status(es)."""
tag_results = []
tags = {"active": "system:notebook:Projects - active",
"queued": "system:notebook:Projects - queued",
"incubating": "system:notebook:Projects - incubating",
"waiting_for": "system:notebook:Projects - waiting for",
"completed": "system:notebook:Projects - completed"}
for status in statuses:
tag_results.append(tags[status])
return tag_results
def _replace_note_contents_with_prj_template(self):
"""REPLACES a note's contents with the Project template note's contents.
USE WITH CARE: when called, this method will *replace* the
contents of your note without any notice or warning.
"""
remote = self._get_note_remote()
template_content = \
remote.GetNoteContentsXml(self._new_prj_note_template_uri)
note_contents = template_content.replace(
defs.NEW_PROJECT_NOTE_TEMPLATE_NOTE_TITLE,
saxutils.escape(self.title))
remote.SetNoteContentsXml(self.uri, note_contents)
| Python |
#!/usr/bin/python -O
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Contains the Slider app class"""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
import operator
import os
import time
import gtk
from collections import namedtuple
from kiwi.ui.objectlist import Column, ObjectList #pylint: disable-msg=W0611
from kiwi.ui.widgets.combobox import ProxyComboBox #pylint: disable-msg=W0611
from fluidity import defs
from fluidity import gee_tee_dee
from fluidity import ui
from fluidity import utils
from fluidity.magic_machine import MagicMachine
from fluidity.managers import DataManager
class Slider(object):
def __init__(self, separate_process=False):
"""Initialize this Slider.
Args:
separate_process: If True, gtk.main_quit() will be called when the
window closes.
"""
utils.log_line("Starting Slider", datetime.datetime.now())
self._is_separate_process = separate_process
self._b = gtk.Builder()
self._b.add_from_file(os.path.join(defs.APP_DATA_PATH, 'slider.ui'))
self._b.connect_signals(self)
self.data_lumbergh = DataManager()
self._magical = MagicMachine(self.data_lumbergh)
self._magic_checked = False
self._map_fields_to_instance_names()
self._init_ui()
def fill_prj_list_w(self):
prj_list = self.data_lumbergh.get_prjs_by_aof("All", "incomplete")
prj_list.sort(key=operator.attrgetter('status', 'summary'))
for prj in prj_list:
pr = ProjectRow(prj.formatted_summary, prj.status, prj.key_name)
if prj.summary == 'singletons':
singletons_prj = pr
self.prj_list_w.append(pr)
self.prj_list_w.select(singletons_prj)
def _add_note_to_prj(self):
# FIXME: make this actually do something.
print("CAN HAS PRJ NOTE? YES! CAN HAZ!")
def _autocomplete_context(self, widget):
context = self._magical.get_magic_context(widget.get_text())
widget.set_text(context)
def _create_inbox_note(self):
note = {'summary': self.summary_w.get_text(),
'details': self.note_details_w.get_buffer().props.text}
file_name = (note['summary'][:50].replace(os.sep, '') +
str(time.time()) + "-note.pkl")
file_path = os.path.join(defs.NOTE_SLIDER_FOLDER, file_name)
with open(file_path, 'wb') as pickle_file:
pickle.dump(note, pickle_file, pickle.HIGHEST_PROTOCOL)
self._quit()
def _create_incoming_na(self, na):
"""Create an incoming NextAction, if we have valid data for one in the form."""
if self._validate_fields():
prj = self.prj_list_w.get_selected()
if prj and prj.key_name != "singletons":
self._write_na(na, prj.key_name, "-na.pkl")
elif self.queue_to_w.date:
print(self.queue_to_w.date)
if not na.queue_date:
na.queue_date = self.queue_to_w.date
self._write_na(na, 'queued_singletons', "-queued_na.pkl")
elif prj.key_name == "singletons":
self._write_na(na, 'singletons', "-na.pkl")
def _create_incoming_obj(self):
self.DO_IT_w.grab_focus()
if not self.expandotron.props.expanded:
if not self._magic_checked:
magic_na = self._get_magic_na(self.summary_w.props.text)
if magic_na:
self._fill_fields_from_magic(magic_na)
self._create_incoming_na(magic_na)
else:
self._create_inbox_note()
else:
if self.create_inbox_note_w.get_active():
self._create_inbox_note()
elif self.add_note_to_prj_w.get_active():
self._add_note_to_prj()
elif self.create_na_w.get_active():
na = self._create_na_from_fields()
if na:
self._create_incoming_na(na)
def _create_na_from_fields(self):
na = None
summary = self.summary_w.get_text()
if summary:
na = gee_tee_dee.NextAction(summary) # set summary
na.context = self.context_w.get_text() # set context
na.time_est = self.time_est_w.get_value() # set time_est
na.energy_est = self._na_energy_est # set energy_est
na.priority = self._get_priority() # set priority
if self.due_date_w.date:
na.due_date = self.due_date_w.date # set due_date
if self.queue_to_w.date:
na.queue_date = self.queue_to_w.date # set queue date
url = self.url_w.get_text() # set url
if url != "":
na.url = url
notes = self.na_notes_w.get_buffer().props.text # set notes
na.notes = notes if notes != "" else None
return na
def _fill_fields_from_magic(self, magic_na):
self.summary_w.props.text = magic_na.summary
p_o_a = namedtuple("PropObjAttr", "property object attribute")
poas = [p_o_a(prop, obj, attr) for prop, obj, attr in
(("context", self.context_w.props, 'text'),
("time_est", self.time_est_w.props, 'value'),
("energy_est", self, '_na_energy_est'),
("priority", self, '_na_priority_est'),
("due_date", self.time_est_w, 'date'),
("queue_date", self.queue_to_w, 'date'),
("url", self.url_w.props, 'text'),
("notes", self.na_notes_w.get_buffer().props, 'text'),)]
for poa in poas:
value = magic_na.__getattribute__(poa.property)
if value or value == 0:
poa.object.__setattr__(poa.attribute, value)
self._validate_fields()
def _get_magic_na(self, summary_text):
self._magic_checked = True
mt = self._magical.get_magic_task(summary_text)
has_magic = False
# if our dict doesn't have these keys, our magic failed, and we
# should show the dialog instead
magic_keys = ['context', 'time_est', 'energy_est', 'priority', 'due_date']
for key in mt.keys():
if key in magic_keys:
has_magic = True
# 'url' left out of magic_keys prior b/c it's not an integral part of a
# NextAction; adding it back in now so we can use it as a generic attr list
magic_keys.append('url')
if has_magic:
na = gee_tee_dee.NextAction(mt['summary'])
for key in mt.keys():
if key in magic_keys:
na.__setattr__(key, mt[key])
return na
else:
return None
def _get_priority(self):
label = self.priority_w.get_selected_label()
return ui.PRIORITY_LABELS_TO_VALUES[label]
def _init_prj_list_w(self, obj_list):
obj_list.set_columns([Column('formatted_summary', title='Summary',
data_type=str, use_markup=True,
searchable=True, expand=True),
Column('status', data_type=str)])
def _init_ui(self):
self._init_prj_list_w(self.prj_list_w)
self._b.get_object("energy_w").select_item_by_position(1)
self._b.get_object("priority_w").select_item_by_position(1)
self._b.get_object("time_est_w").set_value(defs.DEFAULT_TIME_EST)
# set "Create Next Action" as the default mode
self.create_na_w.clicked()
# give the date fields their date name, as None
self.due_date_w.date = None
self.queue_to_w.date = None
self._set_ui_mode(self.create_na_w)
def _map_fields_to_instance_names(self):
self.add_na_label = self._b.get_object("add_na_label")
self.add_note_label = self._b.get_object("add_note_label")
self.add_note_to_prj_w = self._b.get_object("add_note_to_prj_w")
self.context_w = self._b.get_object("context_w")
self.create_inbox_note_w = self._b.get_object("create_inbox_note_w")
self.create_na_w = self._b.get_object("create_na_w")
self.DO_IT_w = self._b.get_object("DO_IT_w")
self.due_date_w = self._b.get_object("due_date_w")
self.energy_est_w = self._b.get_object("energy_w")
self.expandotron = self._b.get_object("expandotron")
self.na_notes_w = self._b.get_object("na_notes_w")
self.na_table = self._b.get_object("na_table")
self.note_details_box = self._b.get_object("note_details_box")
self.note_details_w = self._b.get_object("note_details_w")
self.priority_w = self._b.get_object("priority_w")
self.prj_list_box = self._b.get_object("prj_list_box")
self.prj_list_w = self._b.get_object("prj_list_w")
self.queue_to_w = self._b.get_object("queue_to_w")
self.summary_w = self._b.get_object("summary_w")
self.time_est_w = self._b.get_object("time_est_w")
self.url_w = self._b.get_object("url_w")
self.window = self._b.get_object("ntng_dialog")
@property
def _na_energy_est(self):
label = self.energy_est_w.get_selected_label()
return ui.ENERGY_LABELS_TO_VALUES[label]
@_na_energy_est.setter
def _na_energy_est(self, value): #pylint: disable-msg=E0102
label = ui.ENERGY_VALUES_TO_LABELS[value]
self.energy_est_w.select_item_by_label(label)
@property
def _na_priority_est(self):
label = self.priority_w.get_selected_label()
return ui.PRIORITY_LABELS_TO_VALUES[label]
@_na_priority_est.setter
def _na_priority_est(self, value): #pylint: disable-msg=E0102
label = ui.PRIORITY_VALUES_TO_LABELS[value]
self.priority_w.select_item_by_label(label)
def _quit(self):
utils.log_line("Exiting Slider normally.", datetime.datetime.now())
# don't quit if we're not actually running as a separate process.. heh.
if self._is_separate_process:
gtk.main_quit()
else:
self.window.destroy()
def _set_ui_mode(self, widget):
summary_text = self.summary_w.get_text()
if gtk.Buildable.get_name(widget) == "create_na_w":
self.na_table.show()
self.note_details_box.hide()
self.prj_list_box.show()
self.add_na_label.show()
self.add_note_label.hide()
if summary_text != "":
self.context_w.grab_focus()
if not self._magic_checked and summary_text:
magic = self._get_magic_na(summary_text)
if magic:
self._fill_fields_from_magic(magic)
elif gtk.Buildable.get_name(widget) == "create_inbox_note_w":
self.na_table.hide()
self.note_details_box.show()
self.prj_list_box.hide()
if summary_text != "":
self.note_details_w.grab_focus()
elif gtk.Buildable.get_name(widget) == "add_note_to_prj_w":
self.na_table.hide()
self.note_details_box.show()
self.prj_list_box.show()
self.add_na_label.hide()
self.add_note_label.show()
if summary_text != "":
self.note_details_w.grab_focus()
def _set_valid_date_w(self, widget):
if widget.get_text() == "":
widget.date = None
else:
# We'll get None on failure here, so we're safe either way
widget.date = self._magical.get_magic_date(widget.get_text())
# get_magic_date() didn't understand the mystery meat you fed it.
if widget.date == None:
widget.set_text(defs.UNRECOGNIZED_DATE_TEXT)
else:
date_text = widget.date.strftime(defs.GTK_DATE_TEXT_TEMPLATE)
widget.set_text(date_text)
def _validate_fields(self):
# fuck it. I don't care how unreadable, unmaintainable, or otherwise
# shameful this is. i just want it done.
if self.summary_w.get_text() == "":
self.summary_w.grab_focus()
self.expandotron.set_expanded(True)
return False
context = self.context_w.get_text()
# if we have an @ at the beginning, don't make that the "capitalize" char
if context != "":
context = context[0] + context[1:].capitalize()
self.context_w.set_text(context)
if " " in context or not context.startswith('@'):
self.expandotron.set_expanded(True)
self.context_w.grab_focus()
return False
if self.due_date_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
self.expandotron.set_expanded(True)
self.due_date_w.grab_focus()
return False
if self.queue_to_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
self.expandotron.set_expanded(True)
self.queue_to_w.grab_focus()
return False
return True
def _write_na(self, na, prj_key, ext):
to_dump = {'prj_key': prj_key, 'na_obj': na}
fname = "".join((utils.format_for_dict_key(na.summary)[:50],
str(time.time()), ext))
with open(os.path.join(defs.NOTE_SLIDER_FOLDER, fname), 'wb') as pfile:
pickle.dump(to_dump, pfile, pickle.HIGHEST_PROTOCOL)
self._quit()
# CALLBACKS
def context_w_focus_out_event_cb(self, widget, data=None):
self._autocomplete_context(widget)
def date_w_focus_out_event_cb(self, widget, data=None):
self._set_valid_date_w(widget)
def DO_IT_w_clicked_cb(self, widget, data=None):
self._create_incoming_obj()
def expandotron_activate_cb(self, widget, data=None):
"""Ensures the UI is set up correctly & the best widget is focused."""
if not widget.props.expanded:
self.summary_w.grab_focus()
else:
for w in (self.create_inbox_note_w, self.create_na_w,
self.add_note_to_prj_w):
if w.get_active():
self._set_ui_mode(w)
break
# hml, i.e.: "high, medium, low"
def hml_combo_key_press_event_cb(self, widget, data=None):
ui.type_ahead_combo(widget, data)
# "mode toggles" here meaning "the toggle buttons effecting app mode"
def mode_toggles_clicked_cb(self, widget, data=None):
if widget.get_active() and self.expandotron.props.expanded:
self._set_ui_mode(widget)
def na_notes_expander_activate_cb(self, widget, data=None):
if widget.get_expanded():
self._previously_focused = self.window.get_focus()
self.na_notes_w.grab_focus()
else:
self._previously_focused.grab_focus()
def prj_list_w_focus_in_event_cb(self, widget, data=None):
# Why do we have to grab focus after we're... focused? bleh.
# BIZZARRO! I LOVE YOu@
selected_row = widget.get_selected_row_number()
if not selected_row:
selected_row = 0
widget.grab_focus()
widget.select_paths([selected_row])
widget._select_and_focus_row(selected_row)
def prj_list_w_row_activated_cb(self, widget, data=None):
self._create_incoming_obj()
def quit_cb(self, widget, data=None):
self._quit()
def summary_w_key_press_event_cb(self, widget, data=None):
# if the keypress is "down arrow", expand the Expand-o-tron
if (gtk.gdk.keyval_name(data.keyval) == "Down"
and self.expandotron.props.expanded == False):
self.expandotron.set_expanded(True)
def url_w_focus_out_event_cb(self, widget, data=None):
url = widget.get_text()
if "://" not in url and url != "":
widget.set_text("http://" + url)
def url_w_icon_press_cb(self, widget, icon=None, event=None):
self.url_w_focus_out_event_cb(widget)
gtk.show_uri(gtk.gdk.Screen(), widget.get_text(), int(time.time()))
class ProjectRow(object):
# pylint: disable-msg=R0903
def __init__(self, formatted_summary, status, key_name):
self.formatted_summary = formatted_summary
self.status = status.capitalize()
self.key_name = key_name
def run():
slider_app = Slider(True)
slider_app.window.show()
slider_app.fill_prj_list_w()
gtk.main()
if __name__ == "__main__":
print("""HEY YOU: Yes, you, the user -- DON'T RUN THIS DIRECTLY! Use the
launching script 'slider' in your system path (e.g.: in /usr/bin/), or if you're
running straight out of the folder from the .tar.gz file you grabbed, then look
for the script in the "bin" folder.""")
run()
| Python |
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""All the Inbox item classes."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import gio
import glib
import gtk
from fluidity import gio_fml
class InboxStuff(object):
"""Used exclusively to do 'isinstance' calls on subclasses."""
# FIXME: icky and unpythonic.]
pass
class InboxFile(gio_fml.FityFile, InboxStuff):
def __init__(self, file_name):
super(InboxFile, self).__init__(file_name)
self.icon = self._init_icon()
self.summary = self.basename
def _init_icon(self):
"""Return an icon pixbuf for this Stuff."""
pixbuf = None
# first, if it's an image, try to get a thumbnail.
if self.generic_type == "image":
thumbnail_path = \
self._gfile_info.get_attribute_as_string('thumbnail::path')
if thumbnail_path:
thumbnail_path = thumbnail_path.replace('/normal/', '/large/')
if gio.File(thumbnail_path).query_exists():
thumbnail = gtk.Image()
thumbnail.set_from_file(thumbnail_path)
pixbuf = thumbnail.get_pixbuf()
if not pixbuf:
# thumbnail FAIL
icon_theme = gtk.icon_theme_get_for_screen(gtk.gdk.Screen())
names = self._gfile_info.get_icon().props.names
for stock_name in names:
try:
pixbuf = icon_theme.load_icon(stock_name, 48,
gtk.ICON_LOOKUP_USE_BUILTIN)
break
except glib.GError:
pass
if not pixbuf:
# just do what's guaranteed to work
pixbuf = icon_theme.load_icon('text-x-generic', 48,
gtk.ICON_LOOKUP_USE_BUILTIN)
return pixbuf
def get_preview(self):
"""Return a "preview" of the file's contents. WARNING: NASTY HACK.
Return type will vary based on content type.
"""
if self.generic_type == "image":
# thumbnail_path = \
# self._gfile_info.get_attribute_as_string('thumbnail::path')
# thumbnail_path = thumbnail_path.replace('/normal/', '/large/')
thumbnail = gtk.Image()
thumbnail.set_from_file(self._gfile.get_path())
return thumbnail.get_pixbuf()
elif self.generic_type == "audio":
return None
elif self.generic_type == "text":
with open(self._gfile.get_path(), 'r') as content:
if self._gfile_info.get_size() <= 2048:
return content.read()
else:
return content.read(2048)
# pylint: disable-msg=R0903
class InboxNote(InboxStuff):
def __init__(self, summary, details=None):
self.summary = summary
self.details = details
class InboxEmail(InboxStuff):
def __init__(self, summary, details=None):
raise NotImplementedError("Not yet implemented")
# self.summary = summary
# self.details = details
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""GIO: makes I/O less painful. Except when it doesn't."""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = 'Jens Knutson'
import os
import shutil
import gio
from xdg import BaseDirectory
class MoveError(Exception):
def __init__(self, message, path):
msg = "Could not move the file {0}. Inner exception: {1}"
self._message = msg.format(path, message)
def __str__(self):
return self._message
class FityFileProxy(object):
"""Proxy base class masquerading as gio.File.
Know what would be great, though? It'd be great if I could ACTUALLY SUBCLASS
gio.File! NOPE, WAIT, THAT'S IMPOSSIBLE, SORRY.
"""
# FIXME: remove preceding temper tantrum
def __init__(self, gio_file):
self._gfile = gio_file.dup()
def __getattr__(self, attr):
return getattr(self._gfile, attr)
class FityFile(FityFileProxy):
"""Pythonic wrapper for icky, stanky, eat-da-poopoo gio.File & gio.FileInfo."""
# I would have just subclassed this instead of wrapping, but that's somehow
# not possible.
#
# Seriously, try it. FREE BEERS here in Minneapolis with me for the first
# person to email me with how to subclass gio.File without resorting to
# Total Evil(TM). Actually, there might still be beer in it for them if
# it's a particularly clever or cute flavor of Evil.
def __init__(self, file_name=None, gio_file=None):
"""Initialize this obj, using full path `file_name` or gio.File `gio_file`.
Either file_name or gio_file must be provided, but not both.
Args:
file_name: optional if you specify gio_file. The full path name of the
file to represent with this object.
gio_file: optional if you specify file_name. A gio.File instance for
the file to represent with this object.
"""
if file_name and gio_file:
raise Exception("Only ONE of 'file_name' or 'gio_file' should be set.")
elif file_name:
gio_file = gio.File(file_name)
super(FityFile, self).__init__(gio_file)
self._gfile_info_real = None # lazy-load this stuff as a property later
self._icon = None
@property
def basename(self):
return self._gfile.get_basename()
@property
def exists(self):
return self._gfile.query_exists()
@property
def ext(self):
return os.path.splitext(self.basename)[1]
@property
def generic_type(self):
return self.mime_type.split('/')[0]
@property
def is_dir(self):
filetype = self._gfile.query_file_type(gio.FILE_MONITOR_NONE,
gio.Cancellable())
return filetype == gio.FILE_TYPE_DIRECTORY
@property
def mime_type(self):
return self._gfile_info.get_content_type()
@property
def notes(self):
notes_ = self._gfile_info.get_attribute_string('metadata::annotation')
if notes_ is None:
return ""
else:
return notes_
@property
def path(self):
return self._gfile.get_path()
@property
def parent(self):
return FityFile(gio_file=self._gfile.get_parent())
@property
def size(self):
return self._get_human_file_size()
@property
def uri(self):
return self._gfile.get_uri()
@property
def _gfile_info(self):
if not self._gfile_info_real:
self._gfile_info_real = self._gfile.query_info('*')
return self._gfile_info_real
def find_enclosing_mount(self, cancellable=None):
return self._gfile.find_enclosing_mount(cancellable)
def get_child(self, fname):
"""Return a FityFile from `self`'s child file/folder, `fname`."""
if self.is_dir:
return FityFile(gio_file=self._gfile.get_child(fname))
else:
# FIXME: write a real exception for this
raise ValueError("I am not a directory, I can't have children.")
def get_children(self):
# FIXME: this can timeout under some conditions which I can't currently
# identify. Exception info:
# gio.Error: DBus error org.freedesktop.DBus.Error.NoReply:
# Did not receive a reply. Possible causes include:
# the remote application did not send a reply,
# the message bus security policy blocked the reply,
# the reply timeout expired,
# or the network connection was broken.
for info in self._gfile.enumerate_children('*'):
gfile = self._gfile.get_child(info.get_name())
yield FityFile(gio_file=gfile)
def copy(self, destination, create_parent_dirs=False):
"""Copy this file/folder to FityFile instance `destination`.
Args:
destination: FityFile instance with the desired path
"""
# orig_path = self.path
if create_parent_dirs:
if not destination.parent.exists:
print("Making dir(s):", destination.parent)
destination.parent.make_directory_with_parents()
self._gfile.copy(destination._gfile)
def make_directory_with_parents(self):
self._gfile.make_directory_with_parents(gio.Cancellable())
def move(self, destination):
"""Move this file/folder to FityFile instance `destination`.
Args:
destination: FityFile instance with the desired path
Raises:
MoveError: if destination.path already exists
"""
orig_path = self.path
try:
if self.is_dir:
# FIXME: I'm pretty sure the following comment is actually wrong re:
# *moving* files...
# gotta use shutil because fucking gio still doesn't do
# do recursion. pathetic...
shutil.move(self.path, destination.path)
else:
self._gfile.move(destination._gfile)
self._gfile = destination._gfile
except gio.Error as g_err:
raise MoveError(g_err.message, orig_path)
def mount_enclosing_volume(self, mount_operation=None, callback=None,
flags=gio.FILE_COPY_NONE, cancellable=None,
user_data=None):
"""Mount the enclosing volume for `self`.
Stolen from the PyGObject docs:
"The mount_enclosing_volume() method starts a mount_operation, mounting
the volume that contains the file location. When this operation has
completed, callback will be called with user_data, and the operation
can be finalized with gio.File.mount_enclosing_volume_finish().
If cancellable is not None, then the operation can be cancelled by
triggering the cancellable object from another thread. If the operation
was cancelled, the error gio.ERROR_CANCELLED will be returned."
Args:
mount_operation: a gio.MountOperation - one will be created for you if
you don't pass one in.
callback: a function to call when the operation is complete - if None,
'lambda *args: None' will irresponsibly be used instead.
flags: optional -- gio file copy flags - defaults to gio.FILE_COPY_NONE
cancellable: optional -- a gio.Cancellable. Defaults to NONE
user_data: optional -- any data to pass to `callback` when the mount
operation completes. Defaults to None.
"""
mount_operation = mount_operation if mount_operation else gio.MountOperation()
callback = callback if callback else lambda *args: None
self._gfile.mount_enclosing_volume(mount_operation, callback, flags,
cancellable, user_data)
def trash(self):
# FIXME: what if there are dupe files in the trash... what about the
# file name mangling??
# stupid uninheritable BS *grumblegrumble*
trash_path = BaseDirectory.xdg_data_home + "/Trash/files/"
self._gfile.trash()
self._gfile = gio.File(trash_path + self._gfile.get_basename())
def _get_human_file_size(self):
size_names = ("bytes", "KB", "MB", "GB", "TB")
raw_size = self._gfile_info.get_size()
for name in size_names:
if raw_size > 1024:
raw_size = raw_size / 1024
else:
return "{0} {1}".format(round(raw_size, 1), name)
def __repr__(self):
oldrepr = super(FityFile, self).__repr__().rstrip('>')
things = oldrepr, ": ", str(self._gfile).strip('<>'), '>'
return "".join(things)
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Misc. app-wide constants."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import os
import string
import sys
from xdg import BaseDirectory
# Py3K compat.
if not hasattr(string, 'lowercase'):
string.lowercase = string.ascii_lowercase
def _find_app_data_path():
"""Determine (hackishly) if we're running from a proper install or not."""
data_path = ""
exec_path = os.path.dirname(os.path.realpath(sys.modules[__name__].__file__))
uninstalled_data_path = os.path.join(os.path.dirname(exec_path), 'data')
if os.path.exists(uninstalled_data_path):
data_path = uninstalled_data_path
else:
data_path = os.path.join(sys.prefix, "share", "fluidity")
return data_path
def _get_read_review_path():
# This is rather primitive, but I refuse to do more than this
# for now - it'll work fine in 90%+ of cases.
path = ""
dirs_file = os.path.join(os.getenv('HOME'), BaseDirectory.xdg_config_dirs[0],
'user-dirs.dirs')
with open(dirs_file, 'r') as dirs:
for line in dirs:
if "XDG_DOCUMENTS_DIR" in line:
path = line
path = path.strip()
path = path.replace("$HOME", os.getenv('HOME'))
path = path.replace('"', '')
path = path.split('=')[1]
path = os.path.join(path, "Read-Review")
return path
APP_NAME = 'Fluidity'
DBUS_BUS_NAME = 'org.solemnsilence.Fluidity'
DBUS_OBJECT_PATH = '/org/solemnsilence/Fluidity'
FITY_EPOCH = 1230768000.0
CREATION_EPOCH = 1262325600.0
### NOTES APP STUFF ###
# FIXME: this is lame. Figure out /real/ Tomboy vs. Gnote handling later
# For now, the value below must be either "tomboy" or "gnote" (all in lowercase)
NOTES_APP = "Tomboy"
NOTES_BUS_NAME = 'org.gnome.' + NOTES_APP
NOTES_OBJECT_PATH = '/org/gnome/' + NOTES_APP + '/RemoteControl'
NEW_PROJECT_NOTE_TEMPLATE_NOTE_TITLE = "Projects - active Notebook Template"
### MISC TEXT FIELD VALUES AND TEMPLATES ###
# FIXME: almost all of these should go somewhere else as I refactor Fity
AUTOSAVE_INTERVAL = int(60 * 1) # minutes between autosaves of the data file
GTK_DATE_TEXT_TEMPLATE = "%B %d, %Y"
DEFAULT_TIME_EST = 10.0
UNRECOGNIZED_DATE_TEXT = "(date unrecognized)"
# represents "there is no AOF assigned to this project", i.e.: "unfiled"
NO_AOF_ASSIGNED = "No AOF Assigned"
ENGAGE_TOTALS_TEMPLATE = "Tasks shown: {0} Total time: {1}h:{2}m"
ARCHIVED_SINGLETONS_TIME_TMPLT = '-%Y-%m-%d-%H:%M'
SANITARY_CHARS = string.lowercase + string.digits + " "
### PATHS ###
HOME_DIR = os.path.expanduser("~")
APP_DATA_PATH = _find_app_data_path()
USER_DATA_PATH = BaseDirectory.save_data_path("fluidity")
LOG_FILE_PATH = os.path.join(USER_DATA_PATH, 'fluidity_debug.log')
RECURRENCE_DATA = os.path.join(USER_DATA_PATH, 'recurring_tasks.yaml')
USER_DATA_MAIN_FNAME = 'fluidity.pkl'
USER_DATA_MAIN_FILE = os.path.join(USER_DATA_PATH, USER_DATA_MAIN_FNAME)
PROCESSED_STUFF_FILE_NAME = 'processed_stuff.pkl'
BACKUPS_PATH = os.path.join(USER_DATA_PATH, "backups")
ARCHIVED_SINGLETONS_FNAME = 'archived_singletons{0}.pkl'
# PROJECT SUPPORT FILE PATHS
READ_REVIEW_PATH = _get_read_review_path()
INBOX_FOLDER = os.path.join(HOME_DIR, "Inbox")
NOTE_SLIDER_FOLDER = os.path.join(USER_DATA_PATH, 'slider-inbox')
MAIN_PRJ_SUPPORT_FOLDER = os.path.join(HOME_DIR, "Projects")
ACTIVE_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Active")
COMPLETED_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Completed")
INCUBATING_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Incubating")
QUEUED_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Queued")
WAITING_FOR_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Waiting For")
SINGLETON_FILES = os.path.join(ACTIVE_FOLDER, "singletons")
PROJECT_FOLDER_DELETION_WARNING_FILE_NAME = "DO_NOT_DELETE_THIS_FOLDER.txt"
PROJECT_FOLDER_DELETION_WARNING_PATH = \
os.path.join(APP_DATA_PATH, PROJECT_FOLDER_DELETION_WARNING_FILE_NAME)
# doesn't include USER_DATA_PATH since BaseDirectory.save_data_path takes
# care of ensuring that path exists
# FIXME: once a global Inbox folder is implemented for people other than me
ALL_DATA_FOLDERS = [NOTE_SLIDER_FOLDER,
MAIN_PRJ_SUPPORT_FOLDER,
ACTIVE_FOLDER,
COMPLETED_FOLDER,
INCUBATING_FOLDER,
QUEUED_FOLDER,
WAITING_FOR_FOLDER,
BACKUPS_PATH]
IGNORED_INBOX_PATHS = ["0 - Eventually sort. bah",
"1 - To be processed when Fity is ready",
"3 - Torrents",
"2 - Receipts to process",
"90 Day Storage"]
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""View/Display objects for use with Kiwi ObjectList/ObjectTrees, etc."""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = 'Jens Knutson'
import abc
import datetime
import os
import gio
import glib
import gtk
from xml.sax import saxutils
from fluidity import defs
from fluidity import utils
ICON_THEME = gtk.icon_theme_get_for_screen(gtk.gdk.Screen())
ALERT_ICON_PIXBUF = ICON_THEME.load_icon('gtk-dialog-warning', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
FAKE_ICON_PIXBUF = gtk.gdk.pixbuf_new_from_file(
os.path.join(defs.APP_DATA_PATH, '16x16_trans.png'))
NOTE_ICON_PIXBUF = ICON_THEME.load_icon('text-x-generic', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
URL_ICON_PIXBUF = ICON_THEME.load_icon('emblem-web', 16,
gtk.ICON_LOOKUP_USE_BUILTIN)
ENERGY_LABELS_TO_VALUES = {"High": 2, "Normal": 1, "Low": 0}
ENERGY_VALUES_TO_LABELS = utils.invert_dict(ENERGY_LABELS_TO_VALUES)
PRIORITY_LABELS_TO_VALUES = {"High": 1, "Normal": 2, "Low": 3}
PRIORITY_VALUES_TO_LABELS = utils.invert_dict(PRIORITY_LABELS_TO_VALUES)
# HACK: SMART OR SMRT?
def translate_priority(priority):
"""Give a string, get an int; give an int, get a string."""
if isinstance(priority, basestring):
mapping = PRIORITY_LABELS_TO_VALUES
elif isinstance(priority, int):
mapping = PRIORITY_VALUES_TO_LABELS
return mapping[priority]
def type_ahead_combo(combo, gdk_keyval):
keyval = gtk.gdk.keyval_name(gdk_keyval.keyval)
# we don't want to match on shit like "Alt_R", etc. this is a cheesy way
# of doing it, but it has worked so far.
if len(keyval) == 1:
selected = combo.get_selected_label()
combo_strings = combo.get_model_strings()
selected_index = combo_strings.index(selected) + 1
selection_range = range(len(combo_strings))[selected_index:]
for i in selection_range:
temp_string = combo_strings[i].replace('@', '').lower()
if (temp_string.startswith(keyval) or
temp_string.startswith(keyval.lower())):
combo.select_item_by_label(combo_strings[i])
return True
# no joy, it wasn't in the remainder of the list; start from the
# beginning then
for s in combo_strings:
temp_i = s.replace('@', '').lower()
if temp_i.startswith(keyval) or temp_i.startswith(keyval.lower()):
combo.select_item_by_label(s)
return True
class DisplayABC(object):
__metaclass__ = abc.ABCMeta
def __init__(self, data_src):
self._data_src = data_src
def __getattr__(self, attr):
if hasattr(self._data_src, attr):
return self._data_src.__getattribute__(attr)
def __setattr__(self, attr, value):
# FIXME: I'm sure someone will explain to me at some point why this is
# Evil or Stupid, but until then, it'll have to do.
if attr == "_data_src":
obj = self
elif hasattr(self._data_src, attr):
obj = self._data_src
else:
obj = self
object.__setattr__(obj, attr, value)
class DisplayNextAction(DisplayABC):
def __init__(self, data_src):
super(DisplayNextAction, self).__init__(data_src)
# DISPLAY/UI-RELATED BITS
@property
def formatted_summary(self):
# FIXME: this is gheeeeeeeeetttoooooooooooooo. icky icky icky icky icky.
# I think this can be done properly with Kiwi's column format setting?
fs = saxutils.escape(self.summary)
formats = {1: '<b>{0}</b>', 3: '<span weight="light">{0}</span>',
'complete': '<span strikethrough="true">{0}</span>'}
if self.priority in (1, 3):
fs = formats[self.priority].format(fs)
if self.complete:
fs = formats['complete'].format(fs)
return fs
@property
def energy_est_word(self):
return ENERGY_VALUES_TO_LABELS[self.energy_est]
@property
def notes_icon(self):
icon = NOTE_ICON_PIXBUF if self.notes else FAKE_ICON_PIXBUF
return icon
# FIXME: this should really be sort_due_date or something, shouldn't it?
@property
def sort_date(self):
# FIXME: this is pretty lame...
# i.e.: we don't have a due date...
due = self.due_date if self.due_date else datetime.date.fromordinal(1000000)
return due
@property
def url_icon(self):
icon = URL_ICON_PIXBUF if self.url else FAKE_ICON_PIXBUF
return icon
class DisplayProject(DisplayABC):
def __init__(self, data_src):
super(DisplayProject, self).__init__(data_src)
# DISPLAY/UI-ONLY -- i.e.: these should be in a different class...
@property
def alert(self):
"""Indicate if the project is in an "alert" status."""
if self._status != "active":
return FAKE_ICON_PIXBUF
else:
for na in self.next_actions:
if not na.complete:
return FAKE_ICON_PIXBUF
return ALERT_ICON_PIXBUF
@property
def formatted_summary(self):
formats = {1: '<b>{0}</b>', 2: '{0}', 3: '<span weight="light">{0}</span>'}
fs = saxutils.escape(self.summary)
return formats[self.priority].format(fs)
class ProjectSupportFileRow(object):
def __init__(self, full_path):
self.icon = self._get_icon_pixbuf(full_path)
self.full_path = full_path
self.file_name = os.path.split(full_path)[1]
self.name_lowercase = self.file_name.lower()
#we want to sort folders first, but after that we don't care
if os.path.isdir(full_path):
self.isdir = True
else:
self.isdir = False
def _get_icon_pixbuf(self, file_path):
it = gtk.icon_theme_get_default()
#short-circuit on folders, since it fails otherwise... strange.
if os.path.isdir(file_path):
return it.load_icon('folder', 16, gtk.ICON_LOOKUP_USE_BUILTIN)
content_type = gio.content_type_guess(file_path)
type_names = gio.content_type_get_icon(content_type).get_names()
for stock_id in type_names:
# jesus fscking christ. this is stoopid... for GTK's sake, I HOPE
# this is the wrong way to do this
try:
pixbuf = it.load_icon(stock_id, 16, gtk.ICON_LOOKUP_USE_BUILTIN)
return pixbuf
except glib.GError:
pass
# FAIL. just return something completely generic.
pixbuf = it.load_icon('text-x-generic', 16, gtk.ICON_LOOKUP_USE_BUILTIN)
return pixbuf
| Python |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2009 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Dialog controller classes for Fluidity."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import datetime
import os
import time
import gobject
import gtk
import pango
from kiwi.ui.objectlist import Column
from fluidity import defs
from fluidity import gee_tee_dee
from fluidity import inbox_items
from fluidity import ui
from fluidity import utils
from fluidity.magic_machine import MagicMachine
from fluidity.note import ProjectNote
class GeeTeeDeeDialog(object):
GTK_BUILDER_FILENAME = None
def __init__(self, caller, datamgr):
self._builder = gtk.Builder()
self._builder.add_from_file(os.path.join(defs.APP_DATA_PATH,
self.GTK_BUILDER_FILENAME))
self._builder.connect_signals(self)
#set up some instance names & objects
self._map_fields_to_instance_names()
self._caller = caller
self._data_lumbergh = datamgr
self._magical = MagicMachine(datamgr)
def _get_priority(self):
text = self._priority_w.get_selected_label()
return ui.PRIORITY_LABELS_TO_VALUES[text]
def _set_date_w_values(self, dt, widget):
# FIXME: refactor - call it "set_date_w_text" or something, make it apply
# both to this and queue_to (Goodo. hehe.)
try:
#this will fail and thus short-circuit if the date is 'None'
date_text = dt.strftime(defs.GTK_DATE_TEXT_TEMPLATE)
widget.set_text(date_text)
widget.date = dt
except:
widget.date = None
def _set_valid_date_w(self, widget):
if widget.get_text() == "":
widget.date = None
else:
# get_magic_date() returns None on failure, so we're safe either way here
widget.date = self._magical.get_magic_date(widget.get_text())
if widget.date == None:
# get_magic_date() didn't understand the mystery meat you fed it.
widget.set_text(defs.UNRECOGNIZED_DATE_TEXT)
else:
#FIXME: hmm... that's kinda... goofy. review later
self._set_date_w_values(widget.date, widget)
class NewProjectDialog(GeeTeeDeeDialog):
GTK_BUILDER_FILENAME = 'new_prj_dialog.ui'
def __init__(self, caller, datamgr):
super(NewProjectDialog, self).__init__(caller, datamgr)
self._note = None
def add_files_to_files_list(self, file_path=None):
if file_path is None:
chooser = gtk.FileChooserDialog(
action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_ADD, gtk.RESPONSE_OK))
chooser.set_property("select-multiple", True)
response = chooser.run()
if response == gtk.RESPONSE_OK:
for f in chooser.get_filenames():
row = ui.ProjectSupportFileRow(f)
self._files_list_w.append(row)
chooser.destroy()
chooser = None
else:
row = ui.ProjectSupportFileRow(file_path)
self._files_list_w.append(row)
self._files_list_w.sort_by_attribute("isdir", order=gtk.SORT_ASCENDING)
self._files_list_w.sort_by_attribute("name_lowercase", order=gtk.SORT_ASCENDING)
def fill_na_list_w(self, fuck_you):
self._new_prj_na_list_w.clear()
for n in self._prj.next_actions:
self._new_prj_na_list_w.append(n)
last_item = len(self._new_prj_na_list_w) - 1
self._new_prj_na_list_w.select_paths([last_item])
def open_new_prj_dialog(self, prj, status, aofs, prj_notes=None, on_exit=None):
"""Open a new Project dialog.
Args:
prj: the new Project instance you want to build up
status: project status - must be one of: "active"... (bleh)
..... finishing this later.
on_exit: a dict to hold various options when we close the dialog
"""
self._prj = prj
self._prj.status = status
self.on_exit_hooks = on_exit
#FIXME: this is incongruent with how I start up the NA dialog
self._init_ui(prj.summary, aofs)
self._do_magic(prj.summary)
self._dialog.show()
if prj_notes:
self._note = ProjectNote(prj=self._prj, notes_for_new_prj=prj_notes)
self._note.show()
def _build_aof_list(self):
if self._aof_w.get_selected() == defs.NO_AOF_ASSIGNED:
return []
else:
return [utils.format_for_dict_key(self._aof_w.get_selected())]
def _build_due_date(self):
return self._magical.get_magic_date(self._due_date_w.get_text())
def _build_file_list(self):
file_list = []
# ZOMG I am so in love with Kiwi right now
for f in self._files_list_w:
file_list.append(f.full_path)
return file_list
def _build_prj(self, prj):
for a in self._build_aof_list(): # set aofs
prj.aofs.append(a)
prj.summary = self._name_w.get_text()
due = self._build_due_date() # set due date
if due:
prj.due_date = due
if len(self._prj.next_actions) == 0: # set next_actions
self._set_na_list()
prj.priority = self._get_priority() # set priority
qd = self._build_queue_date() # set queue_date
if qd:
prj.queue_date = qd
if prj.queue_date: # set status - **must be set
prj.status = "queued" # *after* queue_date, or this
return prj # could be inaccurate!
def _build_queue_date(self):
#FIXME: gotta make this actually do something, too...
#self._queue_date_w.get_text()
return None
def _cancel_prj(self):
self._dialog.hide()
if self._note:
self._note.delete()
def _create_prj(self, prj):
if self._validate_me_please_i_needs_it():
prj = self._build_prj(prj)
prj.file_list = self._build_file_list()
self._dialog.hide()
#FIXME: gawd, this is awful. must fiiiix.
self._data_lumbergh.prjs[prj.key_name] = prj
for a in prj.aofs:
if a != "":
#FIXME: surely this can't be what I intended with DataManager ;P
self._data_lumbergh.aofs[a]['projects'].append(prj.key_name)
for f in prj.file_list:
self._data_lumbergh.copy_to_project_folder(f, prj)
# if f.full_path.startswith(defs.INBOX_FOLDER):
# gio.File(f).trash()
# handle on_exit hooks
if self.on_exit_hooks:
if 'queued' in self.on_exit_hooks:
self._caller.queue_project(prj)
elif 'waiting_for' in self.on_exit_hooks:
self._caller.mark_project_as_waiting_for(prj)
# prj.file_list is intended to be "disposable", soooo...
del(prj.file_list)
self._data_lumbergh.save_data()
self._caller.fill_prj_list_w()
# FIXME: re-enable this at some point... *sigh*
# selected = self._caller.prj_list_w.index(prj)
# self._caller.prj_list_w.select_paths([selected])
# i.e.: are we on the Review tab?
if self._caller.workflow_nb.get_current_page() == "1":
gobject.idle_add(self._caller.prj_list_w.grab_focus)
def _do_magic(self, prj_name):
#get magic task (i.e.: 'mt')
mt = self._magical.get_magic_task(prj_name)
self._name_w.set_text(mt['summary'])
#FIXME: this is hackish, might break eventually if I don't clean it up
if 'priority' in mt:
plabel = ui.PRIORITY_VALUES_TO_LABELS[mt['priority']]
self._priority_w.select_item_by_label(plabel)
if 'due_date' in mt:
dtext = mt['due_date'].strftime(defs.GTK_DATE_TEXT_TEMPLATE)
self._due_date_w.set_text(dtext)
def _init_files_list_w(self, obj_list):
#I have no idea why 23 worked best.
obj_list.set_columns(
[Column('icon', width=23, data_type=gtk.gdk.Pixbuf),
Column('file_name', data_type=str, searchable=True, expand=True),
Column('full_path', data_type=str, visible=False),
Column('name_lowercase', data_type=str, visible=False),
Column('isdir', data_type=bool, visible=False)])
obj_list.set_headers_visible(False)
def _init_ui(self, prj_name, aof):
#set the prj name and the areas of focus
self._name_w.set_text(prj_name)
#HACK: oh god... so, so sad. One day I'll learn proper OO design. *sigh*
self._caller.fill_aofs_w(self._aof_w, self._data_lumbergh.aof_names, False)
if aof != "":
self._aof_w.select_item_by_label(aof)
self._init_files_list_w(self._files_list_w)
self._init_new_prj_na_list_w(self._new_prj_na_list_w)
self._new_prj_na_summary_w.grab_focus()
self._priority_w.select_item_by_label("Normal")
def _init_new_prj_na_list_w(self, obj_list):
obj_list.set_columns([Column('uuid', data_type=str, visible=False),
Column('context', data_type=str),
Column('formatted_summary', data_type=str,
use_markup=True, searchable=True),
Column('due_date', data_type=str)])
obj_list.set_headers_visible(False)
def _map_fields_to_instance_names(self):
self._dialog = self._builder.get_object("new_prj_dialog")
self._name_w = self._builder.get_object("new_prj_name_w")
self._new_prj_na_summary_w = self._builder.get_object("new_prj_na_summary_w")
self._priority_w = self._builder.get_object("new_prj_priority_w")
self._due_date_w = self._builder.get_object("new_prj_due_date_w")
self._files_list_w = self._builder.get_object("files_list_w")
self._aof_w = self._builder.get_object("new_prj_aof_w")
self._new_prj_na_list_w = self._builder.get_object("new_prj_na_list_w")
def _set_na_list(self):
if self._new_prj_na_summary_w.props.text:
# i.e.: we haven't appended our NA yet
n = gee_tee_dee.NextAction(self._new_prj_na_summary_w.props.text)
mt = self._magical.get_magic_task(n.summary)
for name in mt.keys():
try:
n.__setattr__(name, mt[name])
except:
pass
# and finally...
self._prj.next_actions.append(n)
def _type_ahead_combo(self, combo, gdk_keyval):
ui.type_ahead_combo(combo, gdk_keyval)
def _validate_me_please_i_needs_it(self):
# check that name line isn't blank
if self._name_w.get_text() == "":
return False
# then verify that the due date isn't fucked
if self._due_date_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
self._due_date_w.grab_focus()
return False
#FIXME: add immediate prj queueing here
# if self._queue_to_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
# self._queue_to_w.grab_focus()
# return False
return True
#CALLBACKS
def files_list_w_key_press_event_cb(self, widget, data=None):
if gtk.gdk.keyval_name(data.keyval) == "Delete":
self._files_list_w.remove(self._files_list_w.get_selected())
def files_list_w_row_activated_cb(self, widget, data=None):
uri_header = "file://"
selected = widget.get_selected_rows()
# don't do anything if multiple files are selected
if len(selected) == 1:
path = selected[0].full_path
gtk.show_uri(gtk.gdk.Screen(), uri_header + path, int(time.time()))
def new_prj_add_w_clicked_cb(self, widget, data=None):
widget.grab_focus()
self._create_prj(self._prj)
def new_prj_aof_w_content_changed_cb(self, widget, data=None):
pass
def new_prj_aof_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def new_prj_cancel_w_clicked_cb(self, widget, data=None):
self._cancel_prj()
def new_prj_due_date_w_focus_out_event_cb(self, widget, data=None):
self._set_valid_date_w(widget)
def new_prj_files_add_w_clicked_cb(self, widget, data=None):
self.add_files_to_files_list()
def new_prj_files_remove_w_clicked_cb(self, widget, data=None):
for f in self._files_list_w.get_selected_rows():
self._files_list_w.remove(f)
def new_prj_na_edit_w_clicked_cb(self, widget, data=None):
na = self._new_prj_na_list_w.get_selected()
if na:
nad = NewNextActionDialog(self, self._data_lumbergh)
nad.edit_extant_na(na)
def new_prj_na_remove_w_clicked_cb(self, widget, data=None):
self._new_prj_na_list_w.remove(self._new_prj_na_list_w.get_selected())
def new_prj_na_summary_w_activate_cb(self, widget, data=None):
if self._new_prj_na_summary_w.get_text() != "":
nad = NewNextActionDialog(self, self._data_lumbergh)
nad.start_new_na(self._new_prj_na_summary_w.get_text(), self._prj)
self._new_prj_na_summary_w.set_text("")
def new_prj_notes_w_clicked_cb(self, widget, data=None):
# FIXME: shit, what happens when a user changes the name of the project
# after creating their notes? I should probably write a "rename prj"
# method somewhere...
if not self._note:
self._note = ProjectNote(prj=self._prj)
self._note.show()
def new_prj_priority_w_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
class NewNextActionDialog(GeeTeeDeeDialog):
# FIXME: this whole class is a fucking mess now. fix it. ...grumble grumble...
# ..stupid asynchronous operations... grumble...grumble... bah!
GTK_BUILDER_FILENAME = 'new_na_dialog.ui'
def __init__(self, caller, datamgr):
super(NewNextActionDialog, self).__init__(caller, datamgr)
self._init_ui()
# FIXME: see notes in main app module about the idea that this might be
# a really stupid thing to do.
# FIXME: surely there's a better way to do this. Someone smarter than me
# can figure it out, I'm sure. ;-P
self._evil_global_variable_indicating_that_this_is_an_extant_na = False
def edit_extant_na(self, na):
"""Edit a given NextAction in-place"""
#just set up the dialog...
#ZOMG this is so ghettotastic. Must fix soon.
label_text = """<span size="x-large"><b>Edit Next Action</b></span>"""
self._title_label.set_text(label_text)
self._title_label.set_use_markup(True)
self._na = na
self._evil_global_variable_indicating_that_this_is_an_extant_na = True
#fill in the dialog fields from the given na
self._populate_fields_from_na(self._na)
if na.notes:
self._builder.get_object("notes_expander").set_expanded(True)
self._focus_first_editing_widget()
self._dialog.show()
def start_new_na(self, summary, prj, na_notes=None, status=None,
incubate=False, stuff=None):
self._prj = prj
self._na = gee_tee_dee.NextAction(summary)
self.status = status
self.incubate_flag = incubate
self.stuff = stuff
if na_notes:
self._na.notes = na_notes
self._notes_w.get_buffer().props.text = na_notes
self._builder.get_object("notes_expander").set_expanded(True)
if self._do_magic(self._na, prj):
# if this passes, we have nothing more to do, so we're safe to
# just quit/do nothing
prj.next_actions.append(self._na)
self._caller.fill_na_list_w(prj)
else:
if not incubate and \
(self._prj.summary == "singletons" or self.status == "queued"):
self._queue_to_w.show()
self._builder.get_object("queue_to_label").show()
self._summary_w.set_text(summary)
if isinstance(self.stuff, inbox_items.InboxFile):
url = "file://" + os.path.join(defs.SINGLETON_FILES,
self.stuff.summary)
self._url_w.set_text(url)
self._dialog.show()
# And now, we sit in waiting for user input. We're so lonely!
def _autocomplete_context(self, widget):
magic_context = self._magical.get_magic_context(widget.get_text())
widget.set_text(magic_context)
def _creates_na_for_realz(self):
self._finish_editing_na_w.grab_focus()
if self._validate_me_im_so_insecure():
self._dialog.hide()
self._set_na_properties_from_fields(self._na)
if self._prj.summary == "singletons" and self._na.queue_date:
print("queue date: ", self._na.queue_date,
type(self._na.queue_date))
self._data_lumbergh.add_queued_singleton(self._na)
elif self.incubate_flag:
self._prj.incubating_next_actions.append(self._na)
else:
self._prj.next_actions.append(self._na)
def _do_magic(self, na, prj):
mt = self._magical.get_magic_task(na.summary)
has_magic = False
# if our dict doesn't have these keys, our magic failed, and we
# should show the dialog instead
magic_keys = ['context', 'time_est', 'energy_est', 'priority', 'due_date']
for key in mt.keys():
if key in magic_keys:
has_magic = True
# 'url' was left out of magic_keys before, since it doesn't really qualify
# an integral part of a NextAction, but now that we've tested, add it back in
magic_keys.append('url')
if has_magic:
for key in mt.keys():
if key in magic_keys:
na.__setattr__(key, mt[key])
na.summary = mt['summary']
# and finally...
return True
else:
return False
def _extant_na_finalize_changes(self):
self._finish_editing_na_w.grab_focus()
#called from the "click the OK button" callback
if self._validate_me_im_so_insecure():
self._dialog.hide()
# FIXME: perform magic here - turn the below into an 'if' between
# using magic and using the fields which have been filled out
self._set_na_properties_from_fields(self._na)
#FIXME: and this is just plain /wrong/. Naughty naughty naughty.
# this is why I have to learn some kind of signal creation techinque
# OOOOH OH OH OH , ORRRRRR, I can just pass in a single callback...?
# pull in one particular method from the caller for the called object
# to refer back to as a conduit back into that caller. Is that crack?
# ....orrrrrrrrrr..... the data manager is the one that knows this shit
# and can take care of the "your shit changed, so update it motherfucker"
# kinds of tasks.
#
# Longest. Comments. Evar.
self._caller.fill_na_list_w()
def _focus_first_editing_widget(self):
"""Worst. Method name. Evar."""
if self._summary_w.get_text() != "" and self._context_w.get_text() == "":
self._dialog.set_focus(self._context_w)
else:
self._dialog.set_focus(self._summary_w)
def _get_energy_est(self):
text = self._energy_est_w.get_selected_label()
return ui.ENERGY_LABELS_TO_VALUES[text] if text else None
def _init_ui(self):
#set up a few defaults...
self._energy_est_w.select_item_by_label("Normal")
self._priority_w.select_item_by_label("Normal")
self._time_est_w.set_value(10)
# each of these names will refer to the actual datetime.date object for
# the widget it's attached to. If the user enters no data, or if that
# data is unrecognizable by MagicMachine, they'll stay 'None'
self._queue_to_w.date = None
self._due_date_w.date = None
def _map_fields_to_instance_names(self):
self._dialog = self._builder.get_object("new_na_dialog")
self._summary_w = self._builder.get_object("summary_w")
self._context_w = self._builder.get_object("context_w")
self._time_est_w = self._builder.get_object("time_est_w")
self._energy_est_w = self._builder.get_object("energy_w")
self._priority_w = self._builder.get_object("priority_w")
self._due_date_w = self._builder.get_object("due_date_w")
self._url_w = self._builder.get_object("url_w")
self._notes_w = self._builder.get_object("notes_w")
self._queue_to_w = self._builder.get_object("queue_to_w")
self._title_label = self._builder.get_object("dialog_title_label")
self._finish_editing_na_w = self._builder.get_object("finish_editing_na_w")
def _populate_fields_from_na(self, na):
self._summary_w.set_text(na.summary) # set summary
if na.context: # set context
self._context_w.set_text(na.context)
self._time_est_w.set_value(na.time_est) # set time_est
self._set_energy_est_w(na.energy_est) # set energy_est
self._set_priority_w(na.priority) # set priority
self._set_date_w_values(na.due_date, self._due_date_w) # set due_date
if na.url:
self._url_w.set_text(na.url) # set url
if na.notes: # set notes
self._notes_w.get_buffer().props.text = na.notes
def _set_date_w_values(self, dt, widget):
# FIXME: refactor - call it "set_date_w_text" or something, make it apply
# both to this and queue_to (Goodo. hehe.)
try:
#this will fail and thus short-circuit if the date is 'None'
date_text = dt.strftime(defs.GTK_DATE_TEXT_TEMPLATE)
widget.set_text(date_text)
widget.date = dt
except:
widget.date = None
def _set_energy_est_w(self, energy):
etext = ui.ENERGY_VALUES_TO_LABELS[energy]
self._energy_est_w.select_item_by_label(etext)
def _set_na_properties_from_fields(self, na):
na.summary = self._summary_w.get_text() # set summary
na.context = self._context_w.get_text() # set context
na.time_est = self._time_est_w.get_value() # set time_est
#FIXME: need to actually send the appropriate int, not the selected string
na.energy_est = self._get_energy_est() # set energy_est
na.priority = self._get_priority() # set priority
# set due_date
na.due_date = self._due_date_w.date if self._due_date_w.date else None
na.queue_date = self._queue_to_w.date if self._queue_to_w.date else None
notes = self._notes_w.get_buffer().props.text # set notes
na.notes = notes if notes else None
na.url = self._url_w.props.text
def _set_priority_w(self, priority):
ptext = ui.PRIORITY_VALUES_TO_LABELS[priority]
self._priority_w.select_item_by_label(ptext)
def _type_ahead_combo(self, combo, gdk_keyval):
ui.type_ahead_combo(combo, gdk_keyval)
def _validate_me_im_so_insecure(self):
# this is just a final validating pass - the "focus out" event takes care
# of the real validation, and it covers both interactive use and most of
# what we would otherwise need to do here.
#fuck it. I don't care how unreadable, unmaintainable, or otherwise
#shameful this is. i just want it done.
if self._summary_w.get_text() == "":
self._summary_w.grab_focus()
return False
context = self._context_w.get_text()
if context != "":
# i.e.: if we *do* have an @ at the beginning, don't make that the "capitalize" char
context = context[0] + context[1:].capitalize()
self._context_w.set_text(context)
if " " in context or not context.startswith('@'):
self._context_w.grab_focus()
return False
if self._due_date_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
self._due_date_w.grab_focus()
return False
if self._queue_to_w.get_text() == defs.UNRECOGNIZED_DATE_TEXT:
self._queue_to_w.grab_focus()
return False
#everything was fine, go ahead.
return True
# CALLBACKS
def cancel_w_clicked_cb(self, widget, data=None):
self._dialog.hide()
def context_w_focus_out_event_cb(self, widget, data=None):
self._autocomplete_context(widget)
def date_w_focus_out_event_cb(self, widget, data=None):
self._set_valid_date_w(widget)
def finish_editing_na_w_clicked_cb(self, widget, data=None):
if self._evil_global_variable_indicating_that_this_is_an_extant_na:
self._extant_na_finalize_changes()
else:
self._creates_na_for_realz()
# hml, i.e.: "high, medium, low"
def hml_combo_key_press_event_cb(self, widget, data=None):
self._type_ahead_combo(widget, data)
def notes_expander_activate_cb(self, widget, data=None):
if self._builder.get_object("notes_expander").get_expanded():
#FIXME: this is bad policy.
self._summary_w.grab_focus()
else:
self._notes_w.grab_focus()
def url_w_focus_out_event_cb(self, widget, data=None):
url = widget.get_text()
if "://" not in url and url != "":
widget.set_text("http://" + url)
def url_w_icon_press_cb(self, widget, icon=None, event=None):
self.url_w_focus_out_event_cb(widget)
print("URI: " + widget.get_text())
gtk.show_uri(gtk.gdk.Screen(), widget.get_text(), int(time.time()))
class ReassignProjectCategoryDialog(object):
GTK_BUILDER_FILENAME = None
def __init__(self, set_entry_invalid=False):
self._builder = gtk.Builder()
self._builder.add_from_file(os.path.join(defs.APP_DATA_PATH,
self.GTK_BUILDER_FILENAME))
self._builder.connect_signals(self)
self._map_fields_to_instance_names()
self._magical = MagicMachine()
self._set_calendar_widget_date(self.calendar_w, datetime.date.today())
self.valid = False
def _set_calendar_widget_date(self, cal_widget, date_obj):
for pair in (("year", date_obj.year),
("month", date_obj.month - 1), # stupid fscking calendar widget
("day", date_obj.day)):
cal_widget.set_property(pair[0], pair[1])
def _validate_date_entry_w(self, entry):
self.date_result = self._magical.get_magic_date(entry.get_text())
if self.date_result:
self.valid = True
entry.set_text(self.date_result.strftime(defs.GTK_DATE_TEXT_TEMPLATE))
self._set_calendar_widget_date(self.calendar_w, self.date_result)
else:
self.valid = False
entry.set_text(defs.UNRECOGNIZED_DATE_TEXT)
class QueueProjectDialog(ReassignProjectCategoryDialog):
GTK_BUILDER_FILENAME = 'queue_prj_dialog.ui'
def __init__(self, set_entry_invalid=False):
super(QueueProjectDialog, self).__init__(set_entry_invalid)
if set_entry_invalid:
self.queue_date_entry_w.set_text(defs.UNRECOGNIZED_DATE_TEXT)
else:
self.queue_date_entry_w.set_text("")
def get_datetime(self):
self.date_result = None
result = self._dialog.run()
if result == gtk.RESPONSE_OK:
self._dialog.hide()
# causes the text field to validate, and if valid, set the date. woo.
# possibly an evil way to do it, but I really care. I do! I do care!
# Look how much. ... Look. Look how much I care.
self._queue_w.grab_focus()
return self.date_result
else:
self._dialog.hide()
# we hit cancel and thus don't care if the result was valid.
self.valid = True
def _map_fields_to_instance_names(self):
self._dialog = self._builder.get_object("queue_prj_dialog")
self._queue_w = self._builder.get_object("queue_w")
self.calendar_w = self._builder.get_object("queue_prj_calendar_w")
self.queue_date_entry_w = self._builder.get_object("queue_prj_date_entry_w")
# CALLBACKS
def queue_prj_calendar_w_day_selected_cb(self, widget, data=None):
self.date_result = _get_date_from_stupid_calendar_widget(widget)
self.valid = True
self.queue_date_entry_w.set_text(self.date_result.strftime(
defs.GTK_DATE_TEXT_TEMPLATE))
def queue_prj_calendar_w_day_selected_double_click_cb(self, widget, data=None):
self.date_result = _get_date_from_stupid_calendar_widget(widget)
self.valid = True
# and close the dialog
self._queue_w.activate()
def queue_prj_date_entry_w_focus_out_event_cb(self, widget, data=None):
if widget.get_text() != "":
self._validate_date_entry_w(widget)
class WaitingForDialog(ReassignProjectCategoryDialog):
GTK_BUILDER_FILENAME = 'waiting_for_dialog.ui'
def __init__(self, set_entry_invalid=False):
super(WaitingForDialog, self).__init__(set_entry_invalid)
if set_entry_invalid:
self.waiting_for_date_entry_w.set_text(defs.UNRECOGNIZED_DATE_TEXT)
else:
self.waiting_for_date_entry_w.set_text("")
def get_waiting_for_info(self):
self.date_result = None
result = self._dialog.run()
if result == gtk.RESPONSE_OK:
self._dialog.hide()
# causes the text field to validate, and if valid, set the date. woo.
# possibly an evil way to do it, but like I really care. I do! I do care!
# Look how much. ... Look. Look how much I care.
self.mark_as_waiting_for_w.grab_focus()
return (self.date_result, self.waiting_for_text_w.get_text())
else:
self._dialog.hide()
# we hit cancel and thus don't care if the result was valid.
self.valid = True
def _map_fields_to_instance_names(self):
self._dialog = self._builder.get_object("waiting_for_dialog")
self.mark_as_waiting_for_w = self._builder.get_object("mark_as_waiting_for_w")
self.calendar_w = self._builder.get_object("waiting_for_calendar_w")
self.waiting_for_date_entry_w = self._builder.get_object("waiting_for_date_entry_w")
self.waiting_for_text_w = self._builder.get_object("waiting_for_text_w")
# CALLBACKS
def waiting_for_calendar_w_day_selected_cb(self, widget, data=None):
self.date_result = _get_date_from_stupid_calendar_widget(widget)
self.valid = True
self.waiting_for_date_entry_w.set_text(self.date_result.strftime(
defs.GTK_DATE_TEXT_TEMPLATE))
def waiting_for_calendar_w_day_selected_double_click_cb(self, widget, data=None):
self.date_result = _get_date_from_stupid_calendar_widget(widget)
if self.waiting_for_text_w.get_text() != "":
self.valid = True
# and close the dialog
self.mark_as_waiting_for_w.activate()
def waiting_for_date_entry_w_focus_out_event_cb(self, widget, data=None):
if widget.get_text() != "":
self._validate_date_entry_w(widget)
class SearchBase(object):
"""Abstract class for search dialog."""
# FIXME: MAKE THIS AN ACTUAL ABC
def __init__(self, data_manager, caller):
self._dm = data_manager
self._builder = gtk.Builder()
self._caller = caller
def search(self, query):
self._build_new_window()
self._window.show_all()
# this triggers the changed event for the gtk.Entry, thus kicking off the search
self._query_box_w.set_text(query)
def _arkless_flood(self, widgets): # pylint: disable-msg=W0611
for i in reversed(range(len(self._ux_widgets))):
try:
self._ux_widgets[i].destroy()
self._ux_widgets[i] = None
except AttributeError:
pass
def _build_new_window(self):
"""I fail at teh GTK."""
self._builder.add_from_file(os.path.join(defs.APP_DATA_PATH,
'search_dialog.ui'))
self._builder.connect_signals(self)
#set up some instance names & objects
self._map_fields_to_instance_names()
self._results_w.set_columns([Column('summary_formatted', data_type=str,
ellipsize=pango.ELLIPSIZE_END,
expand=True, searchable=True,
use_markup=True),
Column('result_type_formatted', data_type=str,
use_markup=True),
Column('prj_key', data_type=str, visible=False),
Column('result_type', data_type=str, visible=False)])
self._results_w.set_headers_visible(False)
# pre-emptive optimization? no idea. I am a total hack.
self._ux_widgets = [self._window, self._top_vbox, self._query_box_w,
self._include_completed_w, self._results_w,
self._include_nas_w]
self._include_nas_w.props.active = True
def _fill_results_list(self):
if len(self._query_box_w.get_text()) > 2:
self._results_w.clear()
results = self._search()
for r in results:
self._results_w.append(r)
if len(self._results_w) > 0:
self._results_w.select_paths([0])
del(results)
def _map_fields_to_instance_names(self):
self._window = self._builder.get_object('search_window_w')
self._query_box_w = self._builder.get_object('query_box_w')
self._include_completed_w = self._builder.get_object('include_completed_w')
self._include_nas_w = self._builder.get_object('include_nas_w')
self._open_result_w = self._builder.get_object('open_result_w')
self._results_w = self._builder.get_object('results_w')
self._top_vbox = self._builder.get_object('top_vbox')
def _search(self):
raise NotImplementedError
#CALLBACKS
def include_completed_w_toggled_cb(self, widget, data=None):
self._fill_results_list()
def include_nas_w_toggled_cb(self, widget, data=None):
self._fill_results_list()
def open_result_w_clicked_cb(self, widget, data=None):
raise NotImplementedError
def query_box_w_changed_cb(self, widget, data=None):
self._fill_results_list()
def results_w_mnemonic_activate_cb(self, widget, data=None):
gobject.idle_add(widget.grab_focus)
def results_w_row_activated_cb(self, widget, data=None):
self.open_result_w_clicked_cb(widget, data)
def search_window_w_activate_default_cb(self, widget, data=None):
# FIXME: ... I'm not exactly sure why this is here in the first place.
pass
def search_window_w_destroy_cb(self, widget, data=None):
self._arkless_flood(self._ux_widgets)
class JumptoSearchDialog(SearchBase):
def _jump_to_result(self):
self._window.hide_all()
#FIXME: this is stoopid but I'm too brain dead to do it The Right Way
#right now, and I want it done and over with so I can release 0.1
selected = self._results_w.get_selected()
self._caller.jump_to_search_result(selected.prj_key, selected.na_uuid)
gobject.idle_add(self._arkless_flood, self._ux_widgets)
def _search(self):
return self._dm.search(self._query_box_w.get_text(),
self._include_completed_w.get_active(),
self._include_nas_w.get_active())
#CALLBACKS
def open_result_w_clicked_cb(self, widget, data=None):
self._jump_to_result()
class ReferenceAttacherSearchDialog(SearchBase):
def __init__(self, data_manager, caller, stuff):
self._stuff = stuff
super(ReferenceAttacherSearchDialog, self).__init__(data_manager, caller)
def search(self, query):
super(ReferenceAttacherSearchDialog, self).search(query)
self._include_nas_w.hide()
self._include_nas_w.props.visible = False
def _attach_to_matching_project(self, match, stuff):
self._window.hide_all()
#FIXME: this is stoopid but I'm too brain dead to do it The Right Way
#right now, and I want it done and over with so I can release 0.1
self._caller.attach_stuff_to_prj(match.prj_key, stuff)
gobject.idle_add(self._arkless_flood, self._ux_widgets)
def _build_new_window(self):
super(ReferenceAttacherSearchDialog, self)._build_new_window()
self._open_result_w.set_label("_Attach to Project")
def _search(self):
return self._dm.search(self._query_box_w.get_text(),
self._include_completed_w.get_active(), False)
#CALLBACKS
def open_result_w_clicked_cb(self, widget, data=None):
match = self._results_w.get_selected()
self._attach_to_matching_project(match, self._stuff)
def _get_date_from_stupid_calendar_widget(cal_widget):
raw = cal_widget.get_date()
#FIXME: ewwwwwww. do this The Right Way (if there's a better one...)
ts = time.mktime((raw[0], raw[1] + 1, raw[2], 0, 0, 0, 0, 0, 0))
return datetime.date.fromtimestamp(ts)
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for PekWM
Generates a dynamic menu for PekWM using the freedesktop.org standards
Usage:
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
"""
__author__ = "Michael Rice , Rudolf Kastl , Antonio Gomes"
__version__ = "$Revision: 1.0 $"
__date__ = "$Date: 2006/10/12 18:20:10 $"
__license__ = "GPL"
import xdg.Menu,xdg.DesktopEntry
import getopt,os,sys
def usage():
print __doc__
def checkWm(entry):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def parseMenu(menu,depth=1):
print "%s Submenu = \"%s\" {" % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry)
if entry.Show == False: continue
print "%sEntry = \"%s\" { Actions = \"Exec %s &\" } " % ( (depth*"\t"), \
entry.DesktopEntry.getName().encode("utf8"), \
entry.DesktopEntry.getExec().split()[0])
elif isinstance(entry,xdg.Menu.Separator):
print "%sSeparator {}" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s}" % (depth*"\t")
def main(argv):
lang = os.getenv("LANG","C")
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang="])
except getopt.GetoptError:
usage()
raise SystemExit
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
raise SystemExit
elif opt in ("-l", "--lang"):
lang = arg
menu=xdg.Menu.parse()
print "Dynamic {"
parseMenu(menu)
print "}"
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for Fluxbox
Generates a menu for Fluxbox using the freedesktop.org standards
Usage: fluxbox-fdo-menugen.py [options]
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
-f ..., --file=... output the menu into a file. Default = ~/.fluxbox/menu
-t ..., --theme=... what icon theme you want to use
--with-icons do not put icons for applications in the menu
--stdout output the menu to standard output
--submenu output to be used as an include/submenu with fluxbox
--with-backgrounds creates a background menu. Default background_paths =
~/.fluxbox/backgrounds, /usr/share/wallpapers,
/usr/share/backgrounds
--backgrounds-only do not regenerate menu, only do the bg menu.
--bg-path= path to location to look for images
example: --bg-path=~/pics
may be used with --backgrounds-only but --bg-path=
must be first: --bg-path=~/some/path --backgrounds-only
A nice example string to use: fluxbox-fdo-menugen.py --with-icons --with-backgrounds --bg-path=~/some/path
To update only the backgrounds: fluxbox-fdo-menugen.py --bg-path=~/some/path --backgrounds-only
"""
__author__ = "Rudolf Kastl , Antonio Gomes, Michael Rice"
__version__ = "$Revision: 1.2 $"
__date__ = "$Date: 2006/10/09 23:20:10 $"
__license__ = "GPL"
import os,re,sys,glob,getopt
import xdg.Menu,xdg.DesktopEntry,xdg.IconTheme
from os.path import isfile
def usage():
print __doc__
def header(wm="fluxbox"):
return """
[begin] (Fluxbox)
[exec] (Web Browser) {htmlview}
[exec] (Email) {evolution}
[exec] (Terminal) {$TERM}
[exec] (Irc) {xchat}
[separator]\n"""
def footer(wm="fluxbox"):
return """
[submenu] (Fluxbox Menu)
[config] (Configure)
[submenu] (System Styles) {Choose a style...}
[stylesdir] (/usr/share/fluxbox/styles)
[stylesdir] (/usr/share/commonbox/styles/)
[end]
[submenu] (User Styles) {Choose a style...}
[stylesdir] (~/.fluxbox/styles)
[end]
[workspaces] (Workspace List)
[submenu] (Tools)
[exec] (Window name) {xprop WM_CLASS|cut -d \" -f 2|xmessage -file - -center}
[exec] (Screenshot - JPG) {import screenshot.jpg && display -resize 50% screenshot.jpg}
[exec] (Screenshot - PNG) {import screenshot.png && display -resize 50% screenshot.png}
[exec] (Run) {fbrun }
[exec] (Regen Menu) {fluxbox-generate_menu --with-icons}
[end]
[submenu] (Window)
[restart] (kde) {startkde}
[restart] (openbox) {openbox}
[restart] (gnome) {gnome-session}
[end]
[exec] (Lock screen) {xscreensaver-command -lock}
[commanddialog] (Fluxbox Command)
[reconfig] (Reload config)
[restart] (Restart)
[separator]
[exit] (Exit)
[end]
[end]\n"""
def checkWm(entry, wm="fluxbox"):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def findIcon(icon, theme):
"""Finds the path and filename for the given icon name
e.g. gaim --> /usr/share/pixmaps/gaim.png
e.g. fart.png --> /usr/share/pixmaps/fart.png
"""
retval=str(xdg.IconTheme.getIconPath(icon, 48, theme))
if retval == "None":
retval=""
return (retval + "").encode('utf8')
def parseMenu(menu,wm,use_icons,theme,depth=1):
if use_icons:
print "%s[submenu] (%s) <%s> " % ( (depth*"\t"), menu.getName().encode('utf8'), findIcon(menu.getIcon(), theme) )
else:
print "%s[submenu] (%s) " % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,wm,use_icons,theme,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry,wm)
if entry.Show == False: continue
if use_icons:
print "%s[exec] (%s) {%s} <%s> " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0], findIcon(entry.DesktopEntry.getIcon(), theme) )
else:
print "%s[exec] (%s) {%s} " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0] )
elif isinstance(entry,xdg.Menu.Separator):
print "%s[separator]" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s[end]" % (depth*"\t")
def get_bgimgs_and_parse(xPath):
try:
if isfile(os.path.expanduser("~/.fluxbox/bgmenu")) == True:
os.unlink(os.path.expanduser("~/.fluxbox/bgmenu"))
except OSError:
pass
h = {}
bg_paths =["~/.fluxbox/backgrounds","/usr/share/wallpapers",
"/usr/share/backgrounds","/usr/share/backgrounds/images"]
try:
if xPath == None:
pass
else:
bg_paths.append(xPath)
except(TypeError):
pass
for dir in bg_paths:
for imgpth in bg_paths:
try:
imgs = os.listdir(os.path.expanduser(imgpth))
for i in imgs:
h[i] = imgpth
except (OSError):
pass
bgMenu = open(os.path.expanduser("~/.fluxbox/bgmenu"),'w+')
num = len(h)
countNum = 1
bgPagCk = 1
bgPgNum = 1
bgMenu.write( "[submenu] (Backgrounds)\n" )
bgMenu.write( "[submenu] (Backgrounds) {Set Your Background}\n" )
bgMenu.write("\t[exec] (Random Image) {fbsetbg -r ~/.fluxbox/backgrounds}\n")
types = ["png","jpg","jpeg","gif"]
for i in h.keys():
try:
t = i.split(".")[-1].lower()
if t in types:
print "Hello"
bgMenu.write( "\t[exec]\t("+ i +") {fbsetbg -f "+ h[i] + "/" + i +"}\n" )
countNum = countNum + 1
num = num - 1
bgPagCk = bgPagCk + 1
if bgPagCk == 26:
bgPgNum = bgPgNum + 1
bgMenu.write("[end]\n[submenu] (Backgrounds " + str(bgPgNum) +") \
{Set Your Background}\n")
bgPagCk = 1
if num == 0:
bgMenu.write( "[end]\n[end]\n" )
bgMenu.close()
except(KeyError):
print h[i]
pass
def main(argv):
# Setting the default values
wm = "fluxbox"
file = "~/.fluxbox/menu"
use_icons = False
use_bg = False
bg_Xpath = False
theme = "gnome"
lang = os.getenv("LANG","C")
file = os.path.expanduser("~/.fluxbox/menu")
do_submenu = False
use_stdout = False
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang=","file=","with-icons","stdout",\
"theme=","submenu","with-backgrounds","backgrounds-only","bg-path="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--lang"):
lang = arg
elif opt in ("-f", "--file"):
file = os.path.expanduser(arg)
elif opt == '--with-icons':
use_icons = True
elif opt in ("-t", "--theme"):
theme = arg
elif opt == '--stdout':
use_stdout = True
elif opt == '--stdout':
file = sys.stdout
elif opt == '--bg-path':
bg_Xpath = True
xPath = os.path.expanduser(arg)
elif opt == '--with-backgrounds':
use_bg = True
elif opt == '--backgrounds-only':
if bg_Xpath:
get_bgimgs_and_parse(xPath)
else:
get_bgimgs_and_parse(None)
raise SystemExit
elif opt == '--submenu':
do_submenu = True
if not use_stdout:
fsock = open(file,'w')
saveout = sys.stdout
sys.stdout = fsock
menu=xdg.Menu.parse()
# is done automatically now
# menu.setLocale(lang)
if not do_submenu:
print header()
parseMenu(menu,wm,use_icons,theme)
if not do_submenu and use_bg and bg_Xpath:
get_bgimgs_and_parse(xPath)
print "[include] (~/.fluxbox/bgmenu)"
if not do_submenu and use_bg and not bg_Xpath:
print "[include] (~/.fluxbox/bgmenu)"
get_bgimgs_and_parse(None)
if not do_submenu:
print footer()
if not use_stdout:
sys.stdout = saveout
# print menu
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for PekWM
Generates a dynamic menu for PekWM using the freedesktop.org standards
Usage:
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
"""
__author__ = "Michael Rice , Rudolf Kastl , Antonio Gomes"
__version__ = "$Revision: 1.0 $"
__date__ = "$Date: 2006/10/12 18:20:10 $"
__license__ = "GPL"
import xdg.Menu,xdg.DesktopEntry
import getopt,os,sys
def usage():
print __doc__
def checkWm(entry):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def parseMenu(menu,depth=1):
print "%s Submenu = \"%s\" {" % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry)
if entry.Show == False: continue
print "%sEntry = \"%s\" { Actions = \"Exec %s &\" } " % ( (depth*"\t"), \
entry.DesktopEntry.getName().encode("utf8"), \
entry.DesktopEntry.getExec().split()[0])
elif isinstance(entry,xdg.Menu.Separator):
print "%sSeparator {}" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s}" % (depth*"\t")
def main(argv):
lang = os.getenv("LANG","C")
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang="])
except getopt.GetoptError:
usage()
raise SystemExit
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
raise SystemExit
elif opt in ("-l", "--lang"):
lang = arg
menu=xdg.Menu.parse()
print "Dynamic {"
parseMenu(menu)
print "}"
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for Fluxbox
Generates a menu for Fluxbox using the freedesktop.org standards
Usage: fluxbox-fdo-menugen.py [options]
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
-f ..., --file=... output the menu into a file. Default = ~/.fluxbox/menu
-t ..., --theme=... what icon theme you want to use
--with-icons do not put icons for applications in the menu
--stdout output the menu to standard output
--submenu output to be used as an include/submenu with fluxbox
--with-backgrounds creates a background menu. Default background_paths =
~/.fluxbox/backgrounds, /usr/share/wallpapers,
/usr/share/backgrounds
--backgrounds-only do not regenerate menu, only do the bg menu.
--bg-path= path to location to look for images
example: --bg-path=~/pics
may be used with --backgrounds-only but --bg-path=
must be first: --bg-path=~/some/path --backgrounds-only
A nice example string to use: fluxbox-fdo-menugen.py --with-icons --with-backgrounds --bg-path=~/some/path
To update only the backgrounds: fluxbox-fdo-menugen.py --bg-path=~/some/path --backgrounds-only
"""
__author__ = "Rudolf Kastl , Antonio Gomes, Michael Rice"
__version__ = "$Revision: 1.2 $"
__date__ = "$Date: 2006/10/09 23:20:10 $"
__license__ = "GPL"
import os,re,sys,glob,getopt
import xdg.Menu,xdg.DesktopEntry,xdg.IconTheme
from os.path import isfile
def usage():
print __doc__
def header(wm="fluxbox"):
return """
[begin] (Fluxbox)
[exec] (Web Browser) {htmlview}
[exec] (Email) {evolution}
[exec] (Terminal) {$TERM}
[exec] (Irc) {xchat}
[separator]\n"""
def footer(wm="fluxbox"):
return """
[submenu] (Fluxbox Menu)
[config] (Configure)
[submenu] (System Styles) {Choose a style...}
[stylesdir] (/usr/share/fluxbox/styles)
[stylesdir] (/usr/share/commonbox/styles/)
[end]
[submenu] (User Styles) {Choose a style...}
[stylesdir] (~/.fluxbox/styles)
[end]
[workspaces] (Workspace List)
[submenu] (Tools)
[exec] (Window name) {xprop WM_CLASS|cut -d \" -f 2|xmessage -file - -center}
[exec] (Screenshot - JPG) {import screenshot.jpg && display -resize 50% screenshot.jpg}
[exec] (Screenshot - PNG) {import screenshot.png && display -resize 50% screenshot.png}
[exec] (Run) {fbrun }
[exec] (Regen Menu) {fluxbox-generate_menu --with-icons}
[end]
[submenu] (Window)
[restart] (kde) {startkde}
[restart] (openbox) {openbox}
[restart] (gnome) {gnome-session}
[end]
[exec] (Lock screen) {xscreensaver-command -lock}
[commanddialog] (Fluxbox Command)
[reconfig] (Reload config)
[restart] (Restart)
[separator]
[exit] (Exit)
[end]
[end]\n"""
def checkWm(entry, wm="fluxbox"):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def findIcon(icon, theme):
"""Finds the path and filename for the given icon name
e.g. gaim --> /usr/share/pixmaps/gaim.png
e.g. fart.png --> /usr/share/pixmaps/fart.png
"""
retval=str(xdg.IconTheme.getIconPath(icon, 48, theme))
if retval == "None":
retval=""
return (retval + "").encode('utf8')
def parseMenu(menu,wm,use_icons,theme,depth=1):
if use_icons:
print "%s[submenu] (%s) <%s> " % ( (depth*"\t"), menu.getName().encode('utf8'), findIcon(menu.getIcon(), theme) )
else:
print "%s[submenu] (%s) " % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,wm,use_icons,theme,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry,wm)
if entry.Show == False: continue
if use_icons:
print "%s[exec] (%s) {%s} <%s> " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0], findIcon(entry.DesktopEntry.getIcon(), theme) )
else:
print "%s[exec] (%s) {%s} " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0] )
elif isinstance(entry,xdg.Menu.Separator):
print "%s[separator]" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s[end]" % (depth*"\t")
def get_bgimgs_and_parse(xPath):
try:
if isfile(os.path.expanduser("~/.fluxbox/bgmenu")) == True:
os.unlink(os.path.expanduser("~/.fluxbox/bgmenu"))
except OSError:
pass
h = {}
bg_paths =["~/.fluxbox/backgrounds","/usr/share/wallpapers",
"/usr/share/backgrounds","/usr/share/backgrounds/images"]
try:
if xPath == None:
pass
else:
bg_paths.append(xPath)
except(TypeError):
pass
for dir in bg_paths:
for imgpth in bg_paths:
try:
imgs = os.listdir(os.path.expanduser(imgpth))
for i in imgs:
h[i] = imgpth
except (OSError):
pass
bgMenu = open(os.path.expanduser("~/.fluxbox/bgmenu"),'w+')
num = len(h)
countNum = 1
bgPagCk = 1
bgPgNum = 1
bgMenu.write( "[submenu] (Backgrounds)\n" )
bgMenu.write( "[submenu] (Backgrounds) {Set Your Background}\n" )
bgMenu.write("\t[exec] (Random Image) {fbsetbg -r ~/.fluxbox/backgrounds}\n")
types = ["png","jpg","jpeg","gif"]
for i in h.keys():
try:
t = i.split(".")[-1].lower()
if t in types:
print "Hello"
bgMenu.write( "\t[exec]\t("+ i +") {fbsetbg -f "+ h[i] + "/" + i +"}\n" )
countNum = countNum + 1
num = num - 1
bgPagCk = bgPagCk + 1
if bgPagCk == 26:
bgPgNum = bgPgNum + 1
bgMenu.write("[end]\n[submenu] (Backgrounds " + str(bgPgNum) +") \
{Set Your Background}\n")
bgPagCk = 1
if num == 0:
bgMenu.write( "[end]\n[end]\n" )
bgMenu.close()
except(KeyError):
print h[i]
pass
def main(argv):
# Setting the default values
wm = "fluxbox"
file = "~/.fluxbox/menu"
use_icons = False
use_bg = False
bg_Xpath = False
theme = "gnome"
lang = os.getenv("LANG","C")
file = os.path.expanduser("~/.fluxbox/menu")
do_submenu = False
use_stdout = False
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang=","file=","with-icons","stdout",\
"theme=","submenu","with-backgrounds","backgrounds-only","bg-path="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--lang"):
lang = arg
elif opt in ("-f", "--file"):
file = os.path.expanduser(arg)
elif opt == '--with-icons':
use_icons = True
elif opt in ("-t", "--theme"):
theme = arg
elif opt == '--stdout':
use_stdout = True
elif opt == '--stdout':
file = sys.stdout
elif opt == '--bg-path':
bg_Xpath = True
xPath = os.path.expanduser(arg)
elif opt == '--with-backgrounds':
use_bg = True
elif opt == '--backgrounds-only':
if bg_Xpath:
get_bgimgs_and_parse(xPath)
else:
get_bgimgs_and_parse(None)
raise SystemExit
elif opt == '--submenu':
do_submenu = True
if not use_stdout:
fsock = open(file,'w')
saveout = sys.stdout
sys.stdout = fsock
menu=xdg.Menu.parse()
# is done automatically now
# menu.setLocale(lang)
if not do_submenu:
print header()
parseMenu(menu,wm,use_icons,theme)
if not do_submenu and use_bg and bg_Xpath:
get_bgimgs_and_parse(xPath)
print "[include] (~/.fluxbox/bgmenu)"
if not do_submenu and use_bg and not bg_Xpath:
print "[include] (~/.fluxbox/bgmenu)"
get_bgimgs_and_parse(None)
if not do_submenu:
print footer()
if not use_stdout:
sys.stdout = saveout
# print menu
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for PekWM
Generates a dynamic menu for PekWM using the freedesktop.org standards
Usage:
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
"""
__author__ = "Michael Rice , Rudolf Kastl , Antonio Gomes"
__version__ = "$Revision: 1.0 $"
__date__ = "$Date: 2006/10/12 18:20:10 $"
__license__ = "GPL"
import xdg.Menu,xdg.DesktopEntry
import getopt,os,sys
def usage():
print __doc__
def checkWm(entry):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def parseMenu(menu,depth=1):
print "%s Submenu = \"%s\" {" % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry)
if entry.Show == False: continue
print "%sEntry = \"%s\" { Actions = \"Exec %s &\" } " % ( (depth*"\t"), \
entry.DesktopEntry.getName().encode("utf8"), \
entry.DesktopEntry.getExec().split()[0])
elif isinstance(entry,xdg.Menu.Separator):
print "%sSeparator {}" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s}" % (depth*"\t")
def main(argv):
lang = os.getenv("LANG","C")
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang="])
except getopt.GetoptError:
usage()
raise SystemExit
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
raise SystemExit
elif opt in ("-l", "--lang"):
lang = arg
menu=xdg.Menu.parse()
print "Dynamic {"
parseMenu(menu)
print "}"
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for Fluxbox
Generates a menu for Fluxbox using the freedesktop.org standards
Usage: fluxbox-fdo-menugen.py [options]
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
-f ..., --file=... output the menu into a file. Default = ~/.fluxbox/menu
-t ..., --theme=... what icon theme you want to use
--with-icons do not put icons for applications in the menu
--stdout output the menu to standard output
--submenu output to be used as an include/submenu with fluxbox
--with-backgrounds creates a background menu. Default background_paths =
~/.fluxbox/backgrounds, /usr/share/wallpapers,
/usr/share/backgrounds
--backgrounds-only do not regenerate menu, only do the bg menu.
--bg-path= path to location to look for images
example: --bg-path=~/pics
may be used with --backgrounds-only but --bg-path=
must be first: --bg-path=~/some/path --backgrounds-only
A nice example string to use: fluxbox-fdo-menugen.py --with-icons --with-backgrounds --bg-path=~/some/path
To update only the backgrounds: fluxbox-fdo-menugen.py --bg-path=~/some/path --backgrounds-only
"""
__author__ = "Rudolf Kastl , Antonio Gomes, Michael Rice"
__version__ = "$Revision: 13 $"
__date__ = "$Date: 2010/01/20 21:50:10 $"
__license__ = "GPL"
import os,re,sys,glob,getopt
import xdg.Menu,xdg.DesktopEntry,xdg.IconTheme
from os.path import isfile
def usage():
print __doc__
def header(wm="fluxbox"):
return """
[begin] (Fluxbox)
[exec] (Web Browser) {htmlview}
[exec] (Email) {evolution}
[exec] (Terminal) {$TERM}
[exec] (Irc) {xchat}
[separator]\n"""
def footer(wm="fluxbox"):
return """
[submenu] (Fluxbox Menu)
[config] (Configure)
[submenu] (System Styles) {Choose a style...}
[stylesdir] (/usr/share/fluxbox/styles)
[stylesdir] (/usr/share/commonbox/styles/)
[end]
[submenu] (User Styles) {Choose a style...}
[stylesdir] (~/.fluxbox/styles)
[end]
[workspaces] (Workspace List)
[submenu] (Tools)
[exec] (Window name) {xprop WM_CLASS|cut -d \" -f 2|xmessage -file - -center}
[exec] (Screenshot - JPG) {import screenshot.jpg && display -resize 50% screenshot.jpg}
[exec] (Screenshot - PNG) {import screenshot.png && display -resize 50% screenshot.png}
[exec] (Run) {fbrun }
[exec] (Regen Menu) {fluxbox-generate_menu --with-icons}
[end]
[submenu] (Window)
[restart] (kde) {startkde}
[restart] (openbox) {openbox}
[restart] (gnome) {gnome-session}
[end]
[exec] (Lock screen) {xscreensaver-command -lock}
[commanddialog] (Fluxbox Command)
[reconfig] (Reload config)
[restart] (Restart)
[separator]
[exit] (Exit)
[end]
[end]\n"""
def checkWm(entry, wm="fluxbox"):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def findIcon(icon, theme):
"""Finds the path and filename for the given icon name
e.g. gaim --> /usr/share/pixmaps/gaim.png
e.g. fart.png --> /usr/share/pixmaps/fart.png
"""
retval=str(xdg.IconTheme.getIconPath(icon, 48, theme,["png","xpm"]))
if retval == "None":
retval=""
return (retval + "").encode('utf8')
def parseMenu(menu,wm,use_icons,theme,depth=1):
if use_icons:
print "%s[submenu] (%s) <%s> " % ( (depth*"\t"), menu.getName().encode('utf8'), findIcon(menu.getIcon(), theme) )
else:
print "%s[submenu] (%s) " % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,wm,use_icons,theme,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry,wm)
if entry.Show == False: continue
if use_icons:
print "%s[exec] (%s) {%s} <%s> " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0], findIcon(entry.DesktopEntry.getIcon(), theme) )
else:
print "%s[exec] (%s) {%s} " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0] )
elif isinstance(entry,xdg.Menu.Separator):
print "%s[separator]" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s[end]" % (depth*"\t")
def get_bgimgs_and_parse(xPath):
try:
if isfile(os.path.expanduser("~/.fluxbox/bgmenu")) == True:
os.unlink(os.path.expanduser("~/.fluxbox/bgmenu"))
except OSError:
pass
h = {}
bg_paths =["~/.fluxbox/backgrounds","/usr/share/wallpapers",
"/usr/share/backgrounds","/usr/share/backgrounds/images"]
try:
if xPath == None:
pass
else:
bg_paths.append(xPath)
except(TypeError):
pass
for dir in bg_paths:
for imgpth in bg_paths:
try:
imgs = os.listdir(os.path.expanduser(imgpth))
for i in imgs:
h[i] = imgpth
except (OSError):
pass
bgMenu = open(os.path.expanduser("~/.fluxbox/bgmenu"),'w+')
num = len(h)
countNum = 1
bgPagCk = 1
bgPgNum = 1
bgMenu.write( "[submenu] (Backgrounds)\n" )
bgMenu.write( "[submenu] (Backgrounds) {Set Your Background}\n" )
bgMenu.write("\t[exec] (Random Image) {fbsetbg -r ~/.fluxbox/backgrounds}\n")
types = ["png","jpg","jpeg","gif"]
for i in h.keys():
try:
t = i.split(".")[-1].lower()
if t in types:
print "Hello"
bgMenu.write( "\t[exec]\t("+ i +") {fbsetbg -f "+ h[i] + "/" + i +"}\n" )
countNum = countNum + 1
num = num - 1
bgPagCk = bgPagCk + 1
if bgPagCk == 26:
bgPgNum = bgPgNum + 1
bgMenu.write("[end]\n[submenu] (Backgrounds " + str(bgPgNum) +") \
{Set Your Background}\n")
bgPagCk = 1
if num == 0:
bgMenu.write( "[end]\n[end]\n" )
bgMenu.close()
except(KeyError):
print h[i]
pass
def main(argv):
# Setting the default values
wm = "fluxbox"
file = "~/.fluxbox/menu"
use_icons = False
use_bg = False
bg_Xpath = False
theme = "gnome"
lang = os.getenv("LANG","C")
file = os.path.expanduser("~/.fluxbox/menu")
do_submenu = False
use_stdout = False
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang=","file=","with-icons","stdout",\
"theme=","submenu","with-backgrounds","backgrounds-only","bg-path="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--lang"):
lang = arg
elif opt in ("-f", "--file"):
file = os.path.expanduser(arg)
elif opt == '--with-icons':
use_icons = True
elif opt in ("-t", "--theme"):
theme = arg
elif opt == '--stdout':
use_stdout = True
elif opt == '--stdout':
file = sys.stdout
elif opt == '--bg-path':
bg_Xpath = True
xPath = os.path.expanduser(arg)
elif opt == '--with-backgrounds':
use_bg = True
elif opt == '--backgrounds-only':
if bg_Xpath:
get_bgimgs_and_parse(xPath)
else:
get_bgimgs_and_parse(None)
raise SystemExit
elif opt == '--submenu':
do_submenu = True
if not use_stdout:
fsock = open(file,'w')
saveout = sys.stdout
sys.stdout = fsock
menu=xdg.Menu.parse()
# is done automatically now
# menu.setLocale(lang)
if not do_submenu:
print header()
parseMenu(menu,wm,use_icons,theme)
if not do_submenu and use_bg and bg_Xpath:
get_bgimgs_and_parse(xPath)
print "[include] (~/.fluxbox/bgmenu)"
if not do_submenu and use_bg and not bg_Xpath:
print "[include] (~/.fluxbox/bgmenu)"
get_bgimgs_and_parse(None)
if not do_submenu:
print footer()
if not use_stdout:
sys.stdout = saveout
# print menu
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for PekWM
Generates a dynamic menu for PekWM using the freedesktop.org standards
Usage:
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
"""
__author__ = "Michael Rice , Rudolf Kastl , Antonio Gomes"
__version__ = "$Revision: 1.0 $"
__date__ = "$Date: 2006/10/12 18:20:10 $"
__license__ = "GPL"
import xdg.Menu,xdg.DesktopEntry
import getopt,os,sys
def usage():
print __doc__
def checkWm(entry):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def parseMenu(menu,depth=1):
print "%s Submenu = \"%s\" {" % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry)
if entry.Show == False: continue
print "%sEntry = \"%s\" { Actions = \"Exec %s &\" } " % ( (depth*"\t"), \
entry.DesktopEntry.getName().encode("utf8"), \
entry.DesktopEntry.getExec().split()[0])
elif isinstance(entry,xdg.Menu.Separator):
print "%sSeparator {}" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s}" % (depth*"\t")
def main(argv):
lang = os.getenv("LANG","C")
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang="])
except getopt.GetoptError:
usage()
raise SystemExit
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
raise SystemExit
elif opt in ("-l", "--lang"):
lang = arg
menu=xdg.Menu.parse()
print "Dynamic {"
parseMenu(menu)
print "}"
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
# vim: noexpandtab:ts=4:sts=4
"""Menu Generator for Fluxbox
Generates a menu for Fluxbox using the freedesktop.org standards
Usage: fluxbox-fdo-menugen.py [options]
Options:
-l ..., --lang=... create the menu using a language. Default = $LANG
-h, --help show this help
-f ..., --file=... output the menu into a file. Default = ~/.fluxbox/menu
-t ..., --theme=... what icon theme you want to use
--with-icons do not put icons for applications in the menu
--stdout output the menu to standard output
--submenu output to be used as an include/submenu with fluxbox
--with-backgrounds creates a background menu. Default background_paths =
~/.fluxbox/backgrounds, /usr/share/wallpapers,
/usr/share/backgrounds
--backgrounds-only do not regenerate menu, only do the bg menu.
--bg-path= path to location to look for images
example: --bg-path=~/pics
may be used with --backgrounds-only but --bg-path=
must be first: --bg-path=~/some/path --backgrounds-only
A nice example string to use: fluxbox-fdo-menugen.py --with-icons --with-backgrounds --bg-path=~/some/path
To update only the backgrounds: fluxbox-fdo-menugen.py --bg-path=~/some/path --backgrounds-only
"""
__author__ = "Rudolf Kastl , Antonio Gomes, Michael Rice"
__version__ = "$Revision: 13 $"
__date__ = "$Date: 2010/01/20 21:50:10 $"
__license__ = "GPL"
import os,re,sys,glob,getopt
import xdg.Menu,xdg.DesktopEntry,xdg.IconTheme
from os.path import isfile
def usage():
print __doc__
def header(wm="fluxbox"):
return """
[begin] (Fluxbox)
[exec] (Web Browser) {htmlview}
[exec] (Email) {evolution}
[exec] (Terminal) {$TERM}
[exec] (Irc) {xchat}
[separator]\n"""
def footer(wm="fluxbox"):
return """
[submenu] (Fluxbox Menu)
[config] (Configure)
[submenu] (System Styles) {Choose a style...}
[stylesdir] (/usr/share/fluxbox/styles)
[stylesdir] (/usr/share/commonbox/styles/)
[end]
[submenu] (User Styles) {Choose a style...}
[stylesdir] (~/.fluxbox/styles)
[end]
[workspaces] (Workspace List)
[submenu] (Tools)
[exec] (Window name) {xprop WM_CLASS|cut -d \" -f 2|xmessage -file - -center}
[exec] (Screenshot - JPG) {import screenshot.jpg && display -resize 50% screenshot.jpg}
[exec] (Screenshot - PNG) {import screenshot.png && display -resize 50% screenshot.png}
[exec] (Run) {fbrun }
[exec] (Regen Menu) {fluxbox-generate_menu --with-icons}
[end]
[submenu] (Window)
[restart] (kde) {startkde}
[restart] (openbox) {openbox}
[restart] (gnome) {gnome-session}
[end]
[exec] (Lock screen) {xscreensaver-command -lock}
[commanddialog] (Fluxbox Command)
[reconfig] (Reload config)
[restart] (Restart)
[separator]
[exit] (Exit)
[end]
[end]\n"""
def checkWm(entry, wm="fluxbox"):
if entry.DesktopEntry.getOnlyShowIn() != []:
entry.Show = False
if entry.DesktopEntry.getNotShowIn() != []:
if isinstance(entry, xdg.Menu.MenuEntry):
if wm in entry.DesktopEntry.getNotShowIn():
entry.Show = False
else:
entry.Show = True
def findIcon(icon, theme):
"""Finds the path and filename for the given icon name
e.g. gaim --> /usr/share/pixmaps/gaim.png
e.g. fart.png --> /usr/share/pixmaps/fart.png
"""
retval=str(xdg.IconTheme.getIconPath(icon, 48, theme,["png","xpm"]))
if retval == "None":
retval=""
return (retval + "").encode('utf8')
def parseMenu(menu,wm,use_icons,theme,depth=1):
if use_icons:
print "%s[submenu] (%s) <%s> " % ( (depth*"\t"), menu.getName().encode('utf8'), findIcon(menu.getIcon(), theme) )
else:
print "%s[submenu] (%s) " % ( (depth*"\t"), menu.getName().encode('utf8'), )
depth += 1
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parseMenu(entry,wm,use_icons,theme,depth)
elif isinstance(entry, xdg.Menu.MenuEntry):
checkWm(entry,wm)
if entry.Show == False: continue
if use_icons:
print "%s[exec] (%s) {%s} <%s> " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0], findIcon(entry.DesktopEntry.getIcon(), theme) )
else:
print "%s[exec] (%s) {%s} " % ( (depth*"\t"), entry.DesktopEntry.getName().encode("utf8"), entry.DesktopEntry.getExec().split()[0] )
elif isinstance(entry,xdg.Menu.Separator):
print "%s[separator]" % (depth*"\t")
elif isinstance(entry.xdg.Menu.Header):
print "%s%s" % ( (depth*"\t"), entry.Name )
depth -= 1
print "%s[end]" % (depth*"\t")
def get_bgimgs_and_parse(xPath):
try:
if isfile(os.path.expanduser("~/.fluxbox/bgmenu")) == True:
os.unlink(os.path.expanduser("~/.fluxbox/bgmenu"))
except OSError:
pass
h = {}
bg_paths =["~/.fluxbox/backgrounds","/usr/share/wallpapers",
"/usr/share/backgrounds","/usr/share/backgrounds/images"]
try:
if xPath == None:
pass
else:
bg_paths.append(xPath)
except(TypeError):
pass
for dir in bg_paths:
for imgpth in bg_paths:
try:
imgs = os.listdir(os.path.expanduser(imgpth))
for i in imgs:
h[i] = imgpth
except (OSError):
pass
bgMenu = open(os.path.expanduser("~/.fluxbox/bgmenu"),'w+')
num = len(h)
countNum = 1
bgPagCk = 1
bgPgNum = 1
bgMenu.write( "[submenu] (Backgrounds)\n" )
bgMenu.write( "[submenu] (Backgrounds) {Set Your Background}\n" )
bgMenu.write("\t[exec] (Random Image) {fbsetbg -r ~/.fluxbox/backgrounds}\n")
types = ["png","jpg","jpeg","gif"]
for i in h.keys():
try:
t = i.split(".")[-1].lower()
if t in types:
print "Hello"
bgMenu.write( "\t[exec]\t("+ i +") {fbsetbg -f "+ h[i] + "/" + i +"}\n" )
countNum = countNum + 1
num = num - 1
bgPagCk = bgPagCk + 1
if bgPagCk == 26:
bgPgNum = bgPgNum + 1
bgMenu.write("[end]\n[submenu] (Backgrounds " + str(bgPgNum) +") \
{Set Your Background}\n")
bgPagCk = 1
if num == 0:
bgMenu.write( "[end]\n[end]\n" )
bgMenu.close()
except(KeyError):
print h[i]
pass
def main(argv):
# Setting the default values
wm = "fluxbox"
file = "~/.fluxbox/menu"
use_icons = False
use_bg = False
bg_Xpath = False
theme = "gnome"
lang = os.getenv("LANG","C")
file = os.path.expanduser("~/.fluxbox/menu")
do_submenu = False
use_stdout = False
try:
opts, args = getopt.getopt(argv, "hf:dl:d", ["help","lang=","file=","with-icons","stdout",\
"theme=","submenu","with-backgrounds","backgrounds-only","bg-path="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--lang"):
lang = arg
elif opt in ("-f", "--file"):
file = os.path.expanduser(arg)
elif opt == '--with-icons':
use_icons = True
elif opt in ("-t", "--theme"):
theme = arg
elif opt == '--stdout':
use_stdout = True
elif opt == '--stdout':
file = sys.stdout
elif opt == '--bg-path':
bg_Xpath = True
xPath = os.path.expanduser(arg)
elif opt == '--with-backgrounds':
use_bg = True
elif opt == '--backgrounds-only':
if bg_Xpath:
get_bgimgs_and_parse(xPath)
else:
get_bgimgs_and_parse(None)
raise SystemExit
elif opt == '--submenu':
do_submenu = True
if not use_stdout:
fsock = open(file,'w')
saveout = sys.stdout
sys.stdout = fsock
menu=xdg.Menu.parse()
# is done automatically now
# menu.setLocale(lang)
if not do_submenu:
print header()
parseMenu(menu,wm,use_icons,theme)
if not do_submenu and use_bg and bg_Xpath:
get_bgimgs_and_parse(xPath)
print "[include] (~/.fluxbox/bgmenu)"
if not do_submenu and use_bg and not bg_Xpath:
print "[include] (~/.fluxbox/bgmenu)"
get_bgimgs_and_parse(None)
if not do_submenu:
print footer()
if not use_stdout:
sys.stdout = saveout
# print menu
if __name__ == "__main__":
main(sys.argv[1:])
| Python |
#
# main.py
# Fluke
#
# Created by Dmitry Kichenko on 24/08/09.
# Copyright University of Toronto 2009. All rights reserved.
#
#import modules required by application
import objc
import AppKit
import Foundation
from PyObjCTools import AppHelper
# import modules containing classes required to start application and load MainMenu.nib
import FlukeAppDelegate, FlukeController
#f = Fluke.Fluke.alloc().init()
#f.openFileDialog()
# pass control to AppKit
AppHelper.runEventLoop()
| Python |
#!/opt/local/bin/python
#
# FlukeAppDelegate.py
# Fluke
#
# Created by Dmitry Kichenko on 24/08/09.
# Copyright University of Toronto 2009. All rights reserved.
#
from Foundation import *
from AppKit import *
import sys
from FlukeController import *
class FlukeAppDelegate(NSObject):
def applicationDidFinishLaunching_(self, sender):
NSLog("Application finished launching.")
def applicationShouldTerminateAfterLastWindowClosed_(self,sender):
return True
| Python |
#!/opt/local/bin/python
#
# FlukeAppDelegate.py
# Fluke
#
# Created by Dmitry Kichenko on 24/08/09.
# Copyright University of Toronto 2009. All rights reserved.
#
from Foundation import *
from AppKit import *
import sys
from FlukeController import *
class FlukeAppDelegate(NSObject):
def applicationDidFinishLaunching_(self, sender):
NSLog("Application finished launching.")
def applicationShouldTerminateAfterLastWindowClosed_(self,sender):
return True
| Python |
"""
Fluke lets you play FLACs in iTunes. This the command line version of it.
USAGE
python flukeapp.py file1[ file2 file3...] [--convert]
file1,file2.. can be directories
--convert automaticaly convert imported FLACs to Apple Lossless
"""
import sys,os
# py2app fix - make sure we're going to find those third part libs
resPath = sys.argv[0].split("/")
resPath.pop()
sys.path.insert( 0, os.path.join('/'.join([a for a in resPath]), 'lib', 'python2.6', 'lib-dynload') )
import fluke
def cleanUpSysArgs(args):
# Remove the script itself as an argument
del(args[0])
# Clean up the -psn argument Finder adds
for i in range(len(args)):
if args[i].startswith('-psn'): del(f)
args[i] = args[i].decode('utf-8')
return args
if __name__ == "__main__":
sys.argv = cleanUpSysArgs(sys.argv)
if len(sys.argv) == 0:
print(__doc__)
files = fluke.FLAC(sys.argv)
files.itunesAdd()
if "--convert" in sys.argv:
files.itunesConvert()
| Python |
# -*- coding: utf-8 -*-
"""
Module for dealing with iTunes stuff.
"""
import sys, os
# Fluke specific modules
from appscript import *
from mutagen.flac import FLAC, FLACNoHeaderError
from fluke.exceptions import *
itunesApp = app(u'iTunes')
def add(files):
filesASPaths = [getASPath(f) for f in files]
try:
tracks = itunesApp.add( filesASPaths, to=app.library_playlists[1])
except RuntimeError:
raise ItunesNotInitialized
return tracks
def fixMetadata(files):
print('FIXING METADATA: ' + str(files))
for path,track in files:
try:
metadata = FLAC(path) # metadata pulled from .flac
except (FLACNoHeaderError):
# Lack of proper FLAC header is not necessarily a bad FLAC
metadata = {}
if isSong(track):
if metadata.has_key('tracknumber'):
setTrackNumber(track, metadata['tracknumber'][0])
else:
deleteTrack(track)
raise ItunesFormatError(path + " is not a FLAC file.")
def setTrackNumber(track, tracknumber):
if '/' in tracknumber: # if both the track # and count are given, split them
tracknumber, trackcount = tracknumber.split('/')
track.track_count.set(trackcount)
track.track_number.set(tracknumber)
def convert(f, deleteFlac=False):
"""Convert imported tracks into Apple Lossless"""
validateTrack(f)
f.convert()
if deleteFlac: deleteTrack(f)
def isSong(f):
"""Checks if track has a length i.e. wheter it's an actual song that can be played"""
validateTrack(f)
return not hasattr(f.time.get(), 'name') and True or False
def validateTrack(f):
"""Checks if argument is a valid appscript reference & if the track is in the iTunes library"""
from appscript.reference import Reference
if isinstance(f, Reference) and hasattr(f, 'time'):
return True
else:
raise ItunesReferenceError("Supplied argument is not a valid iTunes track.")
def deleteTrack(f):
"""Delete a track from HDD and iTunes library"""
path = f.location().path
f.delete()
os.remove(path)
def getEncoder(type=None):
"""Get currently set file encoder"""
if type == 'lossless':
return itunesApp.encoders[u'Lossless Encoder'].get()
else:
return itunesApp.current_encoder.get()
def setEncoder(enc):
"""Set an encoder. Takes reference to encoder, e.g. app('iTunes').encoders.get()[0]"""
itunesApp.current_encoder.set(enc)
def getASPath(path):
"""Return AppleScript-friendly path"""
path = path.replace('\ ', ' ') # AppleScript doesn't escape spaces
p = path.split('/')
# Put name of disk first if song not on startup disk
if 'Volumes' in p:
p = p[2:]
return ':'.join([s for s in p])
# Otherwise, prepend startup disk name to path
return ASStartupDisk() + path.replace("/",":")
def ASStartupDisk():
"""Return current startup disk. No relation to asses."""
return app(u'Finder').startup_disk.name.get()
| Python |
"""
All exceptions pooled together for easy importing later
"""
class FlukeException(Exception): pass
class ItunesReferenceError(FlukeException): pass
class ItunesFormatError(FlukeException): pass
class ItunesNotInitialized(FlukeException): pass
class GUIFormatError(FlukeException): pass
class GUIItunesRestart(FlukeException): pass
| Python |
"""
Main Fluke class. Accepts a string or a list of files upon initiation.
"""
__all__ = ['itunes', 'exceptions']
import sys,os,types
import itunes
from fluke.exceptions import *
from appscript.reference import CommandError
class FLAC(object):
def __init__(self,files=None):
self.files = [] # list of lists with paths and references
if files:
self.files = [[f] for f in self.processFiles(self.argsToList(files)) ]
def itunesAdd(self):
"""Add processed tracks to iTunes"""
self.setFileTypeToOggs(self.filesPaths()) # iTunes doesn't eat them files otherwise
try:
tracks = itunes.add( self.filesPaths() )
except (ItunesFormatError,CommandError) as e:
raise GUIFormatError(e.args[0])
tracks = ( type(tracks) != types.ListType ) and [tracks] or tracks
if len(tracks):
for i in range(len(self.files)):
self.files[i] = self.files[i][:1] # kill the previous reference if there is one
self.files[i].append(tracks[i])
else:
raise GUIItunesRestart("Couldn't add the file(s). Please restart iTunes.")
return False
try:
itunes.fixMetadata(self.filesList())
except ItunesFormatError as e:
raise GUIFormatError(e.args[0])
def itunesConvert(self):
"""Convert added files to iTunes"""
originalEnc = itunes.getEncoder() # save user's encoder and switch to lossless
itunes.setEncoder( itunes.getEncoder('lossless') )
for f in self.files:
itunes.convert(f, delete=True)
itunes.setEncoder(originalEnc) # set the encoder back to w/e user had it set to
def filesList(self,files=None):
"""Get and set list of files to process"""
if files:
self.__init__(files)
return self.files
def filesFilenames(self):
"""Return a list of filenames of all files being processed"""
return [os.path.splitext(os.path.split(f)[1])[0] for f in self.filesPaths()]
def filesPaths(self):
"""Return list of paths to files"""
return [f[0] for f in self.files]
def filesItunes(self):
"""Return list of references to added songs in iTunes library"""
return [f[1] for f in self.files]
def processFiles(self, files):
"""
Recurse through dirs if any are given and filter out non-flacs
"""
results = []
for f in files:
try: f = unicode(f, errors='replace')
except (TypeError): pass
if os.path.isdir(f):
flacs = [os.path.abspath(os.path.join(f,s)) \
for s in os.listdir(f) if os.path.splitext(s)[1] == '.flac']
results.extend(flacs)
else:
if os.path.isfile(f) and os.path.splitext(f)[1] == '.flac':
results.append(os.path.abspath(f))
return results
def argsToList(self,args):
"""Check if argument was a list or a string. Always return a list"""
if type(args) == types.StringType:
return [args]
else:
return list(args)
def setFileTypeToOggs(self,fn):
"""Set filetype to OggS to allow playback in iTunes"""
from Carbon import File, Files
for f in fn:
fl, is_dir = File.FSPathMakeRef(f.encode('utf-8'))
if is_dir:
return False
ci, _fn, fsspc, pfl = fl.FSGetCatalogInfo(Files.kFSCatInfoFinderInfo)
finfo = fsspc.FSpGetFInfo()
finfo.Type = 'OggS'
fsspc.FSpSetFInfo(finfo)
return True
def __str__(self):
return str(self.files)
def __repr__(self):
return str(self.files)
def __len__(self):
return len(self.files)
| Python |
"""
Fluke controller
"""
# PyObjC modules
from objc import YES, NO, IBAction, IBOutlet
from Foundation import *
from AppKit import *
from PyObjCTools import AppHelper
import sys,os
import fluke, flukeapp
from fluke.exceptions import *
class FlukeController(NSObject):
mainWindow = IBOutlet()
myAlertView = IBOutlet()
fileCount = IBOutlet()
expandedFiles = IBOutlet()
buttonGo = IBOutlet()
checkboxDelete = IBOutlet()
progressIndicator = IBOutlet()
arrayController = IBOutlet()
files = []
filenames = []
def awakeFromNib(self):
"""Wake up, Neo"""
self.convertToLossless = False
self.deleteAfterConversion = False
sys.argv = flukeapp.cleanUpSysArgs(sys.argv)
self.windowHeight_(self,130)
self.buttonGo.setKeyEquivalent_(u'\r') # assign GO to return key
# Open fileOpen dialog if no files were fed in
if not sys.argv:
self.open_(self)
else:
self.fillList_(self, sys.argv)
@IBAction
def open_(self, sender):
"""Open dialogue for when Fluke was opened on its own"""
panel = NSOpenPanel.openPanel()
panel.setCanChooseDirectories_(True)
panel.setAllowsMultipleSelection_(True)
result = panel.runModalForTypes_(('flac',))
if result == NSOKButton:
self.fillList_(self, panel.filenames())
else:
NSApp.terminate_(self)
@IBAction
def addFiles_(self,sender):
self.toggleProgressBar_(self)
try:
self.files.itunesAdd()
except GUIFormatError as e:
self.createError_message_(self,e.args[0])
self.toggleProgressBar_(self)
def fillList_(self,sender,files):
"""Fill GUI out with filenames. Takes list of files."""
self.mainWindow.makeKeyAndOrderFront_(self)
self.files = fluke.FLAC(files)
self.filenames = [ NSDictionary.dictionaryWithDictionary_(
{
'filename' : os.path.splitext( os.path.split(f)[1] )[0],
'filepath' : f
}) for f in self.files.filesPaths() ]
self.arrayController.rearrangeObjects() # refresh the table with new values
self.setTextFileCount_(self,len(self.files))
# GUI methods
@objc.IBAction
def createError_message_(self,sender,message):
"""Alert method. Serves as a reference as I'll forget it otherwise"""
alert = NSAlert.alloc().init()
alert.addButtonWithTitle_("OK")
alert.setMessageText_('Uh oh')
alert.setInformativeText_(message)
alert.setAlertStyle_(NSWarningAlertStyle)
buttonPressed=alert.beginSheetModalForWindow_modalDelegate_didEndSelector_contextInfo_( \
self.mainWindow, self, False, 0)
@IBAction
def toggleFileList_(self,sender):
"""Set the height of the file list NSTableView"""
fileListHeight = 220
if sender.state() == 1:
self.expandedFiles.setHidden_(False)
self.expandedFilesHeight_(self,fileListHeight)
self.windowHeight_(self,350)
else:
self.expandedFilesHeight_(self,fileListHeight)
self.windowHeight_(self,130)
self.expandedFiles.setHidden_(True)
@IBAction
def toggleProgressBar_(self,sender):
"""Turn the spinning wheel on and off"""
if self.progressIndicator.isHidden():
self.progressIndicator.setHidden_(NO)
self.progressIndicator.startAnimation_(self)
else:
self.progressIndicator.setHidden_(YES)
self.progressIndicator.stopAnimation_(self)
@IBAction
def toggleButtonGo_(self,sender):
"""Disable the GO button when adding"""
pass
@IBAction
def toggleDeleteAfterConvert(self,sender):
"""Set whether we're going to delete files upon conversion"""
self.deleteAfterConversion = (sender.state == 1) and True or False
@IBAction
def toggleConvertToLossless_(self,sender):
"""Toggle the Delete option when selecting Conver to lossless"""
if sender.state() == 1:
self.checkboxDelete.setEnabled_(True)
else:
self.checkboxDelete.setState_(0)
self.checkboxDelete.setEnabled_(False)
def setTextFileCount_(self,sender,count):
"""Set how many files are being converted"""
self.fileCount.setStringValue_('Adding ' + str(count) + ' files')
def expandedFilesHeight_(self,sender,height):
"""Adjust the height of the file list"""
origin = self.expandedFiles.frame().origin
size = self.expandedFiles.frame().size
NSLog(str(self.mainWindow.frame()))
#self.expandedFiles.setFrame_(NSMakeRect(origin.x,origin.y-(height-size.height),508,height))
def windowHeight_(self,sender,height):
"""Animate the window height as we toggle with expandedFilesHeight()"""
origin = self.mainWindow.frame().origin
size = self.mainWindow.frame().size
self.mainWindow.setFrame_display_animate_(NSMakeRect(origin.x, \
origin.y-((height-size.height)/2), size.width, height), YES, YES)
def filenamesToDictionary_(sender,filename,filepath):
result = {}
result['filename'] = filename
result['filepath'] = filepath
return result
if __name__ == "__main__":
NSLog("hi!!!")
| Python |
#
# main.py
# Fluke
#
# Created by Dmitry Kichenko on 24/08/09.
# Copyright University of Toronto 2009. All rights reserved.
#
#import modules required by application
import objc
import AppKit
import Foundation
from PyObjCTools import AppHelper
# import modules containing classes required to start application and load MainMenu.nib
import FlukeAppDelegate, FlukeController
#f = Fluke.Fluke.alloc().init()
#f.openFileDialog()
# pass control to AppKit
AppHelper.runEventLoop()
| Python |
#
# FlukeAppDelegate.py
# Fluke
#
# Created by Dmitry Kichenko on 24/08/09.
# Copyright University of Toronto 2009. All rights reserved.
#
from Foundation import *
from AppKit import *
import sys
from FlukeController import *
class FlukeAppDelegate(NSObject):
def applicationDidFinishLaunching_(self, sender):
NSLog("Application finished launching.")
| Python |
"""
Fluke lets you play FLACs in iTunes. This the command line version of it.
USAGE
python flukeapp.py file1[ file2 file3...] [--convert]
file1,file2.. can be directories
--convert automaticaly convert imported FLACs to Apple Lossless
"""
import sys,os
# py2app fix - make sure we're going to find those third part libs
resPath = sys.argv[0].split("/")
resPath.pop()
sys.path.insert( 0, os.path.join('/'.join([a for a in resPath]), 'lib', 'python2.5', 'lib-dynload') )
import fluke
def cleanUpSysArgs(args):
# Remove the script itself as an argument
del(args[0])
# Clean up the -psn argument Finder adds
for i in range(len(args)):
if args[i].startswith('-psn'): del(f)
args[i] = args[i].decode('utf-8')
return args
if __name__ == "__main__":
sys.argv = cleanUpSysArgs(sys.argv)
if len(sys.argv) == 0:
print(__doc__)
files = fluke.FLAC(sys.argv)
files.itunesAdd()
if "--convert" in sys.argv:
files.itunesConvert()
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Module for dealing with iTunes stuff.
"""
import sys, os
# Fluke specific modules
from appscript import *
from mutagen.flac import FLAC, FLACNoHeaderError
itunesApp = app(u'iTunes')
class ItunesReferenceError(Exception): pass
class ItunesFormatError(Exception): pass
def add(files):
filesASPaths = [getASPath(f) for f in files]
tracks = itunesApp.add( filesASPaths, to=app.library_playlists[1])
return tracks
def fixMetadata(files):
for path,track in files:
try:
metadata = FLAC(path) # metadata pulled from .flac
except (FLACNoHeaderError):
# Lack of proper FLAC header is not necessarily a bad FLAC
metadata = {}
if isSong(track):
if metadata.has_key('tracknumber'):
setTrackNumber(track, metadata['tracknumber'][0])
else:
deleteTrack(track)
raise ItunesFormatError(filename + " is not a FLAC file.")
def setTrackNumber(track, tracknumber):
if '/' in tracknumber: # if both the track # and count are given, split them
tracknumber, trackcount = tracknumber.split('/')
track.track_count.set(trackcount)
track.track_number.set(tracknumber)
def convert(f, deleteFlac=False):
"""Convert imported tracks into Apple Lossless"""
validateTrack(f)
f.convert()
if deleteFlac: deleteTrack(f)
def isSong(f):
"""Checks if track has a length i.e. wheter it's an actual song that can be played"""
validateTrack(f)
return not hasattr(f.time.get(), 'name') and True or False
def validateTrack(f):
"""Checks if argument is a valid appscript reference & if the track is in the iTunes library"""
from appscript.reference import Reference
if isinstance(f, Reference) and hasattr(f, 'time'):
return True
else:
raise ItunesReferenceError("Supplied argument is not a valid iTunes track.")
def deleteTrack(f):
"""Delete a track from HDD and iTunes library"""
path = f.location().path
f.delete()
os.remove(path)
def getEncoder(type=None):
"""Get currently set file encoder"""
if type == 'lossless':
return itunesApp.encoders[u'Lossless Encoder'].get()
else:
return itunesApp.current_encoder.get()
def setEncoder(enc):
"""Set an encoder. Takes reference to encoder, e.g. app('iTunes').encoders.get()[0]"""
itunesApp.current_encoder.set(enc)
def getASPath(path):
"""Return AppleScript-friendly path"""
path = path.replace('\ ', ' ') # AppleScript doesn't escape spaces
p = path.split('/')
# Put name of disk first if song not on startup disk
if 'Volumes' in p:
p = p[2:]
return ':'.join([s for s in p])
# Otherwise, prepend startup disk name to path
return ASStartupDisk() + path.replace("/",":")
def ASStartupDisk():
"""Return current startup disk. No relation to asses."""
return app(u'Finder').startup_disk.name.get()
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Module for dealing with iTunes stuff.
"""
import sys, os
# Fluke specific modules
from appscript import *
from mutagen.flac import FLAC, FLACNoHeaderError
itunesApp = app(u'iTunes')
class ItunesReferenceError(Exception): pass
class ItunesFormatError(Exception): pass
def add(files):
filesASPaths = [getASPath(f) for f in files]
tracks = itunesApp.add( filesASPaths, to=app.library_playlists[1])
return tracks
def fixMetadata(files):
for path,track in files:
try:
metadata = FLAC(path) # metadata pulled from .flac
except (FLACNoHeaderError):
# Lack of proper FLAC header is not necessarily a bad FLAC
metadata = {}
if isSong(track):
if metadata.has_key('tracknumber'):
setTrackNumber(track, metadata['tracknumber'][0])
else:
deleteTrack(track)
raise ItunesFormatError(filename + " is not a FLAC file.")
def setTrackNumber(track, tracknumber):
if '/' in tracknumber: # if both the track # and count are given, split them
tracknumber, trackcount = tracknumber.split('/')
track.track_count.set(trackcount)
track.track_number.set(tracknumber)
def convert(f, deleteFlac=False):
"""Convert imported tracks into Apple Lossless"""
validateTrack(f)
f.convert()
if deleteFlac: deleteTrack(f)
def isSong(f):
"""Checks if track has a length i.e. wheter it's an actual song that can be played"""
validateTrack(f)
return not hasattr(f.time.get(), 'name') and True or False
def validateTrack(f):
"""Checks if argument is a valid appscript reference & if the track is in the iTunes library"""
from appscript.reference import Reference
if isinstance(f, Reference) and hasattr(f, 'time'):
return True
else:
raise ItunesReferenceError("Supplied argument is not a valid iTunes track.")
def deleteTrack(f):
"""Delete a track from HDD and iTunes library"""
path = f.location().path
f.delete()
os.remove(path)
def getEncoder(type=None):
"""Get currently set file encoder"""
if type == 'lossless':
return itunesApp.encoders[u'Lossless Encoder'].get()
else:
return itunesApp.current_encoder.get()
def setEncoder(enc):
"""Set an encoder. Takes reference to encoder, e.g. app('iTunes').encoders.get()[0]"""
itunesApp.current_encoder.set(enc)
def getASPath(path):
"""Return AppleScript-friendly path"""
path = path.replace('\ ', ' ') # AppleScript doesn't escape spaces
p = path.split('/')
# Put name of disk first if song not on startup disk
if 'Volumes' in p:
p = p[2:]
return ':'.join([s for s in p])
# Otherwise, prepend startup disk name to path
return ASStartupDisk() + path.replace("/",":")
def ASStartupDisk():
"""Return current startup disk. No relation to asses."""
return app(u'Finder').startup_disk.name.get()
| Python |
"""
Main Fluke class. Accepts a string or a list of files upon initiation.
"""
__all__ = ['itunes']
import sys,os
import itunes
class FLAC(object):
def __init__(self,files=None):
self.files = [] # list of lists with paths and references
if files:
self.files = [[f] for f in self.processFiles(self.argsToList(files)) ]
def itunesAdd(self):
"""Add processed tracks to iTunes"""
self.setFileTypeToOggs(self.filesPath())
tracks = itunes.add( self.filesPath() )
if tracks:
for i in range(len(self.files)):
self.files[i].append(tracks[i])
else:
print "Couldn't add the file(s). Please restart iTunes or reinstall Fluke."
return False
itunes.fixMetadata(self.filesList())
def itunesConvert(self):
"""Convert added files to iTunes"""
originalEnc = itunes.getEncoder() # save user's encoder and switch to lossless
itunes.setEncoder( itunes.getEncoder('lossless') )
for f in self.files:
itunes.convert(f, delete=True)
itunes.setEncoder(originalEnc) # set the encoder back to w/e user had it set to
def filesList(self,files=None):
"""Get and set list of files to process"""
if files:
self.__init__(files)
return self.files
def filesPath(self):
"""Return list of paths to files"""
return [f[0] for f in self.files]
def filesItunes(self):
"""Return list of references to added songs in iTunes library"""
return [f[1] for f in self.files]
def processFiles(self, files):
"""
Recurse through dirs if any are given and filter out non-flacs
"""
import types
results = []
for f in files:
if type(f) <> types.UnicodeType: f = unicode(f, errors='replace')
if os.path.isdir(f):
flacs = [os.path.abspath(os.path.join(f,s)) \
for s in os.listdir(f) if os.path.splitext(s)[1] == '.flac']
results.extend(flacs)
else:
if os.path.isfile(f) and os.path.splitext(f)[1] == '.flac':
results.append(os.path.abspath(f))
return results
def argsToList(self,args):
"""Check if argument was a list or a string. Always return a list"""
import types
if type(args) == types.StringType:
return [args]
else:
return list(args)
def setFileTypeToOggs(self,fn):
"""Set filetype to OggS to allow playback in iTunes"""
from Carbon import File, Files
for f in fn:
fl, is_dir = File.FSPathMakeRef(f.encode('utf-8'))
if is_dir:
return False
ci, _fn, fsspc, pfl = fl.FSGetCatalogInfo(Files.kFSCatInfoFinderInfo)
finfo = fsspc.FSpGetFInfo()
finfo.Type = 'OggS'
fsspc.FSpSetFInfo(finfo)
return True
def __str__(self):
return str(self.files)
def __repr__(self):
return str(self.files)
| Python |
"""
Fluke controller for Xcode and py2app
"""
# PyObjC modules
import objc
from Foundation import *
from AppKit import *
from PyObjCTools import AppHelper
import sys,os
# py2app fix - make sure we're going to find those third part libs
#resPath = sys.argv[0].split("/")
#resPath.pop()
#sys.path.insert( 0, os.path.join('/'.join([a for a in resPath]), 'lib', 'python2.5', 'lib-dynload') )
import fluke
class FlukeController(NSObject):
mainWindow = objc.IBOutlet()
checkboxDelete = objc.IBOutlet()
def awakeFromNib(self):
import flukeapp
NSLog(str(self.mainWindow.canBecomeMainWindow()))
sys.argv = flukeapp.cleanUpSysArgs(sys.argv)
NSLog('System arguments: ' + str(sys.argv))
# Open fileOpen dialog if no files were fed in
#self.processFiles(sys.argv)
@objc.IBAction
def open_(self, sender):
panel = NSOpenPanel.openPanel()
panel.setCanChooseDirectories_(True)
panel.setAllowsMultipleSelection_(True)
panel.beginSheetForDirectory_file_types_modalForWindow_modalDelegate_didEndSelector_contextInfo_(
'~/', None, ('flac',), self.mainWindow,
self, 'openPanelDidEnd:panel:returnCode:contextInfo:', 0)
@objc.IBAction
def toggleConversion_(self,sender):
NSLog(str(sender.state()))
if sender.state() == 1:
self.checkboxDelete.setEnabled_(True)
else:
self.checkboxDelete.setState_(0)
self.checkboxDelete.setEnabled_(False)
@AppHelper.endSheetMethod
def openPanelDidEnd_panel_returnCode_contextInfo_(self, panel, returnCode, contextInfo):
if returnCode:
print "Open: %s" % panel.filenames()
else:
print "Cancel"
if __name__ == "__main__":
NSLog("hi!!!")
| Python |
import os
import sys
CURRENT_DIR = os.getcwd()
SRC_DIR = os.path.join(CURRENT_DIR, 'src')
sys.path.append(SRC_DIR)
from util import device
print '### Setup script for FluidVoice V4'
un = raw_input('Username:')
id = raw_input('ID (int, should be unique):')
id = int(id)
print '### Creating profile file'
f = open('src/profile', mode='wa')
f.write(un + ',' + str(id))
f.close()
print '### Creating bookmark'''
bookmarks_template_path = os.path.join(CURRENT_DIR, 'deploy/MyBookmarks.template')
bookmarks_file_path = os.path.join(CURRENT_DIR, 'deploy/MyBookmarks.xml')
FLASH_UI_PATH = os.path.join(CURRENT_DIR, 'flash/src/FluidFlashApp.swf')
f = open(bookmarks_template_path, mode='ra')
template = f.read()
spliced = template.replace('%%%FV_UI_PATH%%%', FLASH_UI_PATH)
f.close()
f = open(bookmarks_file_path, mode='wa')
f.write(spliced)
f.close()
bookmarks_link_dst = '/home/user/.bookmarks/MyBookmarks.xml'
if os.path.exists(bookmarks_link_dst):
os.remove(bookmarks_link_dst)
os.link(bookmarks_file_path, bookmarks_link_dst)
print '### Creating FlashTrust settings'
FLASH_TRUST_PATH = '/home/user/.macromedia/Flash_Player/#Security/FlashPlayerTrust'
if not os.path.exists(FLASH_TRUST_PATH):
os.makedirs(FLASH_TRUST_PATH)
trust_link_dst = os.path.join(FLASH_TRUST_PATH, 'trust.cfg')
if os.path.exists(trust_link_dst):
os.remove(trust_link_dst)
trust_link_src = os.path.join(CURRENT_DIR, 'deploy/trust.cfg')
f = open(trust_link_src, mode='w')
print 'Writing',FLASH_UI_PATH
f.write(FLASH_UI_PATH + '\n')
f.close()
print 'Linking to ',trust_link_src,'from',trust_link_dst
os.link(trust_link_src, trust_link_dst)
print '### Setup complete.''' | Python |
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python, and influenced by
Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
# Modified for Series 60
import sys
import logging
import types
import os, string
import pickle as cPickle
import struct
import time
import glob
try:
import codecs
except ImportError:
codecs = None
#
# Some constants...
#
DEFAULT_TCP_LOGGING_PORT = 9020
DEFAULT_UDP_LOGGING_PORT = 9021
DEFAULT_HTTP_LOGGING_PORT = 9022
DEFAULT_SOAP_LOGGING_PORT = 9023
SYSLOG_UDP_PORT = 514
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
a streaming socket. The socket is kept open across logging calls.
If the peer resets it, an attempt is made to reconnect on the next call.
The pickle which is sent is that of the LogRecord's attribute dictionary
(__dict__), so that the receiver does not need to have the logging module
installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
The attribute 'closeOnError' is set to 1 - which means that if
a socket error occurs, the socket is silently closed and then
reopened on the next logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
self.sock = None
self.closeOnError = 0
self.retryTime = None
#
# Exponential backoff parameters.
#
self.retryStart = 1.0
self.retryMax = 30.0
self.retryFactor = 2.0
def makeSocket(self):
"""
A factory method which allows subclasses to define the precise
type of socket they want.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.host, self.port))
return s
def createSocket(self):
"""
Try to create a socket, using an exponential backoff with
a max retry time. Thanks to Robert Olson for the original patch
(SF #815911) which has been slightly refactored.
"""
now = time.time()
# Either retryTime is None, in which case this
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
attempt = 1
else:
attempt = (now >= self.retryTime)
if attempt:
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
except:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
else:
self.retryPeriod = self.retryPeriod * self.retryFactor
if self.retryPeriod > self.retryMax:
self.retryPeriod = self.retryMax
self.retryTime = now + self.retryPeriod
def send(self, s):
"""
Send a pickled string to the socket.
This function allows for partial sends which can happen when the
network is busy.
"""
if self.sock is None:
self.createSocket()
#self.sock can be None either because we haven't reached the retry
#time yet, or because we have reached the retry time and retried,
#but are still unable to connect.
if self.sock:
try:
if hasattr(self.sock, "sendall"):
self.sock.sendall(s)
else:
sentsofar = 0
left = len(s)
while left > 0:
sent = self.sock.send(s[sentsofar:])
sentsofar = sentsofar + sent
left = left - sent
except socket.error:
self.sock.close()
self.sock = None # so we can call createSocket next time
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
dummy = self.format(record) # just to get traceback text into record.exc_text
record.exc_info = None # to avoid Unpickleable error
s = cPickle.dumps(record.__dict__, 1)
if ei:
record.exc_info = ei # for next handler
slen = struct.pack(">L", len(s))
return slen + s
def handleError(self, record):
"""
Handle an error during logging.
An error has occurred during logging. Most likely cause -
connection lost. Close the socket so that we can retry on the
next event.
"""
if self.closeOnError and self.sock:
self.sock.close()
self.sock = None #try to reconnect next time
else:
logging.Handler.handleError(self, record)
def emit(self, record):
"""
Emit a record.
Pickles the record and writes it to the socket in binary format.
If there is an error with the socket, silently drop the packet.
If there was a problem with the socket, re-establishes the
socket.
"""
try:
s = self.makePickle(record)
self.send(s)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
"""
Closes the socket.
"""
if self.sock:
self.sock.close()
self.sock = None
logging.Handler.close(self)
class DatagramHandler(SocketHandler):
"""
A handler class which writes logging records, in pickle format, to
a datagram socket. The pickle which is sent is that of the LogRecord's
attribute dictionary (__dict__), so that the receiver does not need to
have the logging module installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
self.closeOnError = 0
def makeSocket(self):
"""
The factory method of SocketHandler is here overridden to create
a UDP socket (SOCK_DGRAM).
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return s
def send(self, s):
"""
Send a pickled string to a socket.
This function no longer allows for partial sends which can happen
when the network is busy - UDP does not guarantee delivery and
can deliver packets out of sequence.
"""
if self.sock is None:
self.createSocket()
self.sock.sendto(s, (self.host, self.port))
class BufferingHandler(logging.Handler):
"""
A handler class which buffers logging records in memory. Whenever each
record is added to the buffer, a check is made to see if the buffer should
be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity):
"""
Initialize the handler with the buffer size.
"""
logging.Handler.__init__(self)
self.capacity = capacity
self.buffer = []
def shouldFlush(self, record):
"""
Should the handler flush its buffer?
Returns true if the buffer is up to capacity. This method can be
overridden to implement custom flushing strategies.
"""
return (len(self.buffer) >= self.capacity)
def emit(self, record):
"""
Emit a record.
Append the record. If shouldFlush() tells us to, call flush() to process
the buffer.
"""
self.buffer.append(record)
if self.shouldFlush(record):
self.flush()
def flush(self):
"""
Override to implement custom flushing behaviour.
This version just zaps the buffer to empty.
"""
self.buffer = []
def close(self):
"""
Close the handler.
This version just flushes and chains to the parent class' close().
"""
self.flush()
logging.Handler.close(self)
class MemoryHandler(BufferingHandler):
"""
A handler class which buffers logging records in memory, periodically
flushing them to a target handler. Flushing occurs whenever the buffer
is full, or when an event of a certain severity or greater is seen.
"""
def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
"""
Initialize the handler with the buffer size, the level at which
flushing should occur and an optional target.
Note that without a target being set either here or via setTarget(),
a MemoryHandler is no use to anyone!
"""
BufferingHandler.__init__(self, capacity)
self.flushLevel = flushLevel
self.target = target
def shouldFlush(self, record):
"""
Check for buffer full or a record at the flushLevel or higher.
"""
return (len(self.buffer) >= self.capacity) or \
(record.levelno >= self.flushLevel)
def setTarget(self, target):
"""
Set the target handler for this handler.
"""
self.target = target
def flush(self):
"""
For a MemoryHandler, flushing means just sending the buffered
records to the target, if there is one. Override if you want
different behaviour.
"""
if self.target:
for record in self.buffer:
self.target.handle(record)
self.buffer = []
def close(self):
"""
Flush, set the target to None and lose the buffer.
"""
self.flush()
self.target = None
BufferingHandler.close(self)
| Python |
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Logging package for Python. Based on PEP 282 and comments thereto in
comp.lang.python, and influenced by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, os, types, time, string, cStringIO, traceback
try:
import codecs
except ImportError:
codecs = None
try:
import thread
import threading
except ImportError:
thread = None
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "beta"
__version__ = "0.4.9.7"
__date__ = "07 October 2005"
#---------------------------------------------------------------------------
# Miscellaneous module data
#---------------------------------------------------------------------------
#
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame.
#
if hasattr(sys, 'frozen'): #support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif string.lower(__file__[-4:]) in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
else:
_srcfile = __file__
_srcfile = os.path.normcase(_srcfile)
# next bit filched from 1.5.2's inspect.py
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except:
return sys.exc_traceback.tb_frame.f_back
if hasattr(sys, '_getframe'): currentframe = sys._getframe
# done filching
# _srcfile is only used in conjunction with sys._getframe().
# To provide compatibility with older versions of Python, set _srcfile
# to None if _getframe() is not available; this value will prevent
# findCaller() from being called.
#if not hasattr(sys, "_getframe"):
# _srcfile = None
#
#_startTime is used as the base when calculating the relative time of events
#
_startTime = time.time()
#
#raiseExceptions is used to see if exceptions during handling should be
#propagated
#
raiseExceptions = 1
#---------------------------------------------------------------------------
# Level related stuff
#---------------------------------------------------------------------------
#
# Default levels and level names, these can be replaced with any positive set
# of values having corresponding names. There is a pseudo-level, NOTSET, which
# is only really there as a lower limit for user-defined levels. Handlers and
# loggers are initialized with NOTSET so that they will log all messages, even
# at user-defined levels.
#
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
_levelNames = {
CRITICAL : 'CRITICAL',
ERROR : 'ERROR',
WARNING : 'WARNING',
INFO : 'INFO',
DEBUG : 'DEBUG',
NOTSET : 'NOTSET',
'CRITICAL' : CRITICAL,
'ERROR' : ERROR,
'WARN' : WARNING,
'WARNING' : WARNING,
'INFO' : INFO,
'DEBUG' : DEBUG,
'NOTSET' : NOTSET,
}
def getLevelName(level):
"""
Return the textual representation of logging level 'level'.
If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
INFO, DEBUG) then you get the corresponding string. If you have
associated levels with names using addLevelName then the name you have
associated with 'level' is returned.
If a numeric value corresponding to one of the defined levels is passed
in, the corresponding string representation is returned.
Otherwise, the string "Level %s" % level is returned.
"""
return _levelNames.get(level, ("Level %s" % level))
def addLevelName(level, levelName):
"""
Associate 'levelName' with 'level'.
This is used when converting levels to text during message formatting.
"""
_acquireLock()
try: #unlikely to cause an exception, but you never know...
_levelNames[level] = levelName
_levelNames[levelName] = level
finally:
_releaseLock()
#---------------------------------------------------------------------------
# Thread-related stuff
#---------------------------------------------------------------------------
#
#_lock is used to serialize access to shared data structures in this module.
#This needs to be an RLock because fileConfig() creates Handlers and so
#might arbitrary user threads. Since Handler.__init__() updates the shared
#dictionary _handlers, it needs to acquire the lock. But if configuring,
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
#
_lock = None
def _acquireLock():
"""
Acquire the module-level lock for serializing access to shared data.
This should be released with _releaseLock().
"""
global _lock
if (not _lock) and thread:
_lock = threading.RLock()
if _lock:
_lock.acquire()
def _releaseLock():
"""
Release the module-level lock acquired by calling _acquireLock().
"""
if _lock:
_lock.release()
#---------------------------------------------------------------------------
# The logging record
#---------------------------------------------------------------------------
class LogRecord:
"""
A LogRecord instance represents an event being logged.
LogRecord instances are created every time something is logged. They
contain all the information pertinent to the event being logged. The
main information passed in is in msg and args, which are combined
using str(msg) % args to create the message field of the record. The
record also includes information such as when the record was created,
the source line where the logging call was made, and any exception
information to be logged.
"""
def __init__(self, name, level, pathname, lineno, msg, args, exc_info):
"""
Initialize a logging record with interesting information.
"""
ct = time.time()
self.name = name
self.msg = msg
#
# The following statement allows passing of a dictionary as a sole
# argument, so that you can do something like
# logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
# Suggested by Stefan Behnel.
# Note that without the test for args[0], we get a problem because
# during formatting, we test to see if the arg is present using
# 'if self.args:'. If the event being logged is e.g. 'Value is %d'
# and if the passed arg fails 'if self.args:' then no formatting
# is done. For example, logger.warn('Value is %d', 0) would log
# 'Value is %d' instead of 'Value is 0'.
# For the use case of passing a dictionary, this should not be a
# problem.
if args and (len(args) == 1) and args[0] and (type(args[0]) == types.DictType):
args = args[0]
self.args = args
self.levelname = getLevelName(level)
self.levelno = level
self.pathname = pathname
try:
self.filename = os.path.basename(pathname)
self.module = os.path.splitext(self.filename)[0]
except:
self.filename = pathname
self.module = "Unknown module"
self.exc_info = exc_info
self.exc_text = None # used to cache the traceback text
self.lineno = lineno
self.created = ct
self.msecs = (ct - long(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
if thread:
self.thread = thread.get_ident()
self.threadName = threading.currentThread().getName()
else:
self.thread = None
self.threadName = None
if hasattr(os, 'getpid'):
self.process = os.getpid()
else:
self.process = None
def __str__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
def getMessage(self):
"""
Return the message for this LogRecord.
Return the message for this LogRecord after merging any user-supplied
arguments with the message.
"""
if not hasattr(types, "UnicodeType"): #if no unicode support...
msg = str(self.msg)
else:
msg = self.msg
if type(msg) not in (types.UnicodeType, types.StringType):
try:
msg = str(self.msg)
except UnicodeError:
msg = self.msg #Defer encoding till later
if self.args:
msg = msg % self.args
return msg
def makeLogRecord(dict):
"""
Make a LogRecord whose attributes are defined by the specified dictionary,
This function is useful for converting a logging event received over
a socket connection (which is sent as a dictionary) into a LogRecord
instance.
"""
rv = LogRecord(None, None, "", 0, "", (), None)
rv.__dict__.update(dict)
return rv
#---------------------------------------------------------------------------
# Formatter classes and functions
#---------------------------------------------------------------------------
class Formatter:
"""
Formatter instances are used to convert a LogRecord to text.
Formatters need to know how a LogRecord is constructed. They are
responsible for converting a LogRecord to (usually) a string which can
be interpreted by either a human or an external system. The base Formatter
allows a formatting string to be specified. If none is supplied, the
default value of "%s(message)\\n" is used.
The Formatter can be initialized with a format string which makes use of
knowledge of the LogRecord attributes - e.g. the default value mentioned
above makes use of the fact that the user's message and arguments are pre-
formatted into a LogRecord's message attribute. Currently, the useful
attributes in a LogRecord are described by:
%(name)s Name of the logger (logging channel)
%(levelno)s Numeric logging level for the message (DEBUG, INFO,
WARNING, ERROR, CRITICAL)
%(levelname)s Text logging level for the message ("DEBUG", "INFO",
"WARNING", "ERROR", "CRITICAL")
%(pathname)s Full pathname of the source file where the logging
call was issued (if available)
%(filename)s Filename portion of pathname
%(module)s Module (name portion of filename)
%(lineno)d Source line number where the logging call was issued
(if available)
%(created)f Time when the LogRecord was created (time.time()
return value)
%(asctime)s Textual time when the LogRecord was created
%(msecs)d Millisecond portion of the creation time
%(relativeCreated)d Time in milliseconds when the LogRecord was created,
relative to the time the logging module was loaded
(typically at application startup time)
%(thread)d Thread ID (if available)
%(threadName)s Thread name (if available)
%(process)d Process ID (if available)
%(message)s The result of record.getMessage(), computed just as
the record is emitted
"""
converter = time.localtime
def __init__(self, fmt=None, datefmt=None):
"""
Initialize the formatter with specified format strings.
Initialize the formatter either with the specified format string, or a
default as described above. Allow for specialized date formatting with
the optional datefmt argument (if omitted, you get the ISO8601 format).
"""
if fmt:
self._fmt = fmt
else:
self._fmt = "%(message)s"
self.datefmt = datefmt
def formatTime(self, record, datefmt=None):
"""
Return the creation time of the specified LogRecord as formatted text.
This method should be called from format() by a formatter which
wants to make use of a formatted time. This method can be overridden
in formatters to provide for any specific requirement, but the
basic behaviour is as follows: if datefmt (a string) is specified,
it is used with time.strftime() to format the creation time of the
record. Otherwise, the ISO8601 format is used. The resulting
string is returned. This function uses a user-configurable function
to convert the creation time to a tuple. By default, time.localtime()
is used; to change this for a particular formatter instance, set the
'converter' attribute to a function with the same signature as
time.localtime() or time.gmtime(). To change it for all formatters,
for example if you want all logging times to be shown in GMT,
set the 'converter' attribute in the Formatter class.
"""
ct = self.converter(record.created)
if datefmt:
s = time.strftime(datefmt, ct)
else:
t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
s = "%s,%03d" % (t, record.msecs)
return s
def formatException(self, ei):
"""
Format and return the specified exception information as a string.
This default implementation just uses
traceback.print_exception()
"""
sio = cStringIO.StringIO()
traceback.print_exception(ei[0], ei[1], ei[2], None, sio)
s = sio.getvalue()
sio.close()
if s[-1] == "\n":
s = s[:-1]
return s
def format(self, record):
"""
Format the specified record as text.
The record's attribute dictionary is used as the operand to a
string formatting operation which yields the returned string.
Before formatting the dictionary, a couple of preparatory steps
are carried out. The message attribute of the record is computed
using LogRecord.getMessage(). If the formatting string contains
"%(asctime)", formatTime() is called to format the event time.
If there is exception information, it is formatted using
formatException() and appended to the message.
"""
record.message = record.getMessage()
if string.find(self._fmt,"%(asctime)") >= 0:
record.asctime = self.formatTime(record, self.datefmt)
s = self._fmt % record.__dict__
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
if s[-1] != "\n":
s = s + "\n"
s = s + record.exc_text
return s
#
# The default formatter to use when no other is specified
#
_defaultFormatter = Formatter()
class BufferingFormatter:
"""
A formatter suitable for formatting a number of records.
"""
def __init__(self, linefmt=None):
"""
Optionally specify a formatter which will be used to format each
individual record.
"""
if linefmt:
self.linefmt = linefmt
else:
self.linefmt = _defaultFormatter
def formatHeader(self, records):
"""
Return the header string for the specified records.
"""
return ""
def formatFooter(self, records):
"""
Return the footer string for the specified records.
"""
return ""
def format(self, records):
"""
Format the specified records and return the result as a string.
"""
rv = ""
if len(records) > 0:
rv = rv + self.formatHeader(records)
for record in records:
rv = rv + self.linefmt.format(record)
rv = rv + self.formatFooter(records)
return rv
#---------------------------------------------------------------------------
# Filter classes and functions
#---------------------------------------------------------------------------
class Filter:
"""
Filter instances are used to perform arbitrary filtering of LogRecords.
Loggers and Handlers can optionally use Filter instances to filter
records as desired. The base filter class only allows events which are
below a certain point in the logger hierarchy. For example, a filter
initialized with "A.B" will allow events logged by loggers "A.B",
"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
initialized with the empty string, all events are passed.
"""
def __init__(self, name=''):
"""
Initialize a filter.
Initialize with the name of the logger which, together with its
children, will have its events allowed through the filter. If no
name is specified, allow every event.
"""
self.name = name
self.nlen = len(name)
def filter(self, record):
"""
Determine if the specified record is to be logged.
Is the specified record to be logged? Returns 0 for no, nonzero for
yes. If deemed appropriate, the record may be modified in-place.
"""
if self.nlen == 0:
return 1
elif self.name == record.name:
return 1
elif string.find(record.name, self.name, 0, self.nlen) != 0:
return 0
return (record.name[self.nlen] == ".")
class Filterer:
"""
A base class for loggers and handlers which allows them to share
common code.
"""
def __init__(self):
"""
Initialize the list of filters to be an empty list.
"""
self.filters = []
def addFilter(self, filter):
"""
Add the specified filter to this handler.
"""
if not (filter in self.filters):
self.filters.append(filter)
def removeFilter(self, filter):
"""
Remove the specified filter from this handler.
"""
if filter in self.filters:
self.filters.remove(filter)
def filter(self, record):
"""
Determine if a record is loggable by consulting all the filters.
The default is to allow the record to be logged; any filter can veto
this and the record is then dropped. Returns a zero value if a record
is to be dropped, else non-zero.
"""
rv = 1
for f in self.filters:
if not f.filter(record):
rv = 0
break
return rv
#---------------------------------------------------------------------------
# Handler classes and functions
#---------------------------------------------------------------------------
_handlers = {} #repository of handlers (for flushing when shutdown called)
_handlerList = [] # added to allow handlers to be removed in reverse of order initialized
class Handler(Filterer):
"""
Handler instances dispatch logging events to specific destinations.
The base handler class. Acts as a placeholder which defines the Handler
interface. Handlers can optionally use Formatter instances to format
records as desired. By default, no formatter is specified; in this case,
the 'raw' message as determined by record.message is logged.
"""
def __init__(self, level=NOTSET):
"""
Initializes the instance - basically setting the formatter to None
and the filter list to empty.
"""
Filterer.__init__(self)
self.level = level
self.formatter = None
#get the module data lock, as we're updating a shared structure.
_acquireLock()
try: #unlikely to raise an exception, but you never know...
_handlers[self] = 1
_handlerList.insert(0, self)
finally:
_releaseLock()
self.createLock()
def createLock(self):
"""
Acquire a thread lock for serializing access to the underlying I/O.
"""
if thread:
self.lock = threading.RLock()
else:
self.lock = None
def acquire(self):
"""
Acquire the I/O thread lock.
"""
if self.lock:
self.lock.acquire()
def release(self):
"""
Release the I/O thread lock.
"""
if self.lock:
self.lock.release()
def setLevel(self, level):
"""
Set the logging level of this handler.
"""
self.level = level
def format(self, record):
"""
Format the specified record.
If a formatter is set, use it. Otherwise, use the default formatter
for the module.
"""
if self.formatter:
fmt = self.formatter
else:
fmt = _defaultFormatter
return fmt.format(record)
def emit(self, record):
"""
Do whatever it takes to actually log the specified logging record.
This version is intended to be implemented by subclasses and so
raises a NotImplementedError.
"""
raise NotImplementedError, 'emit must be implemented '\
'by Handler subclasses'
def handle(self, record):
"""
Conditionally emit the specified logging record.
Emission depends on filters which may have been added to the handler.
Wrap the actual emission of the record with acquisition/release of
the I/O thread lock. Returns whether the filter passed the record for
emission.
"""
rv = self.filter(record)
if rv:
self.acquire()
try:
self.emit(record)
finally:
self.release()
return rv
def setFormatter(self, fmt):
"""
Set the formatter for this handler.
"""
self.formatter = fmt
def flush(self):
"""
Ensure all logging output has been flushed.
This version does nothing and is intended to be implemented by
subclasses.
"""
pass
def close(self):
"""
Tidy up any resources used by the handler.
This version does removes the handler from an internal list
of handlers which is closed when shutdown() is called. Subclasses
should ensure that this gets called from overridden close()
methods.
"""
#get the module data lock, as we're updating a shared structure.
_acquireLock()
try: #unlikely to raise an exception, but you never know...
del _handlers[self]
_handlerList.remove(self)
finally:
_releaseLock()
def handleError(self, record):
"""
Handle errors which occur during an emit() call.
This method should be called from handlers when an exception is
encountered during an emit() call. If raiseExceptions is false,
exceptions get silently ignored. This is what is mostly wanted
for a logging system - most users will not care about errors in
the logging system, they are more interested in application errors.
You could, however, replace this with a custom handler if you wish.
The record which was being processed is passed in to this method.
"""
if raiseExceptions:
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
del ei
class StreamHandler(Handler):
"""
A handler class which writes logging records, appropriately formatted,
to a stream. Note that this class does not close the stream, as
sys.stdout or sys.stderr may be used.
"""
def __init__(self, strm=None):
"""
Initialize the handler.
If strm is not specified, sys.stderr is used.
"""
Handler.__init__(self)
if strm is None:
strm = sys.stderr
self.stream = strm
self.formatter = None
def flush(self):
"""
Flushes the stream.
"""
self.stream.flush()
def emit(self, record):
"""
Emit a record.
If a formatter is specified, it is used to format the record.
The record is then written to the stream with a trailing newline
[N.B. this may be removed depending on feedback]. If exception
information is present, it is formatted using
traceback.print_exception and appended to the stream.
"""
try:
msg = self.format(record)
fs = "%s\n"
if not hasattr(types, "UnicodeType"): #if no unicode support...
self.stream.write(fs % msg)
else:
try:
self.stream.write(fs % msg)
except UnicodeError:
self.stream.write(fs % msg.encode("UTF-8"))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class FileHandler(StreamHandler):
"""
A handler class which writes formatted logging records to disk files.
"""
def __init__(self, filename, mode='a', encoding=None):
"""
Open the specified file and use it as the stream for logging.
"""
if codecs is None:
encoding = None
if encoding is None:
stream = open(filename, mode)
else:
stream = codecs.open(filename, mode, encoding)
StreamHandler.__init__(self, stream)
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
self.baseFilename = os.path.abspath(filename)
self.mode = mode
def close(self):
"""
Closes the stream.
"""
self.flush()
self.stream.close()
StreamHandler.close(self)
#---------------------------------------------------------------------------
# Manager classes and functions
#---------------------------------------------------------------------------
class PlaceHolder:
"""
PlaceHolder instances are used in the Manager logger hierarchy to take
the place of nodes for which no loggers have been defined. This class is
intended for internal use only and not as part of the public API.
"""
def __init__(self, alogger):
"""
Initialize with the specified logger being a child of this placeholder.
"""
#self.loggers = [alogger]
self.loggerMap = { alogger : None }
def append(self, alogger):
"""
Add the specified logger as a child of this placeholder.
"""
#if alogger not in self.loggers:
if not self.loggerMap.has_key(alogger):
#self.loggers.append(alogger)
self.loggerMap[alogger] = None
#
# Determine which class to use when instantiating loggers.
#
_loggerClass = None
def setLoggerClass(klass):
"""
Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__()
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError, "logger not derived from logging.Logger: " + \
klass.__name__
global _loggerClass
_loggerClass = klass
def getLoggerClass():
"""
Return the class to be used when instantiating a logger.
"""
return _loggerClass
class Manager:
"""
There is [under normal circumstances] just one Manager instance, which
holds the hierarchy of loggers.
"""
def __init__(self, rootnode):
"""
Initialize the manager with the root node of the logger hierarchy.
"""
self.root = rootnode
self.disable = 0
self.emittedNoHandlerWarning = 0
self.loggerDict = {}
def getLogger(self, name):
"""
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
If a PlaceHolder existed for the specified name [i.e. the logger
didn't exist but a child of it did], replace it with the created
logger and fix up the parent/child references which pointed to the
placeholder to now point to the logger.
"""
rv = None
_acquireLock()
try:
if self.loggerDict.has_key(name):
rv = self.loggerDict[name]
if isinstance(rv, PlaceHolder):
ph = rv
rv = _loggerClass(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupChildren(ph, rv)
self._fixupParents(rv)
else:
rv = _loggerClass(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupParents(rv)
finally:
_releaseLock()
return rv
def _fixupParents(self, alogger):
"""
Ensure that there are either loggers or placeholders all the way
from the specified logger to the root of the logger hierarchy.
"""
name = alogger.name
i = string.rfind(name, ".")
rv = None
while (i > 0) and not rv:
substr = name[:i]
if not self.loggerDict.has_key(substr):
self.loggerDict[substr] = PlaceHolder(alogger)
else:
obj = self.loggerDict[substr]
if isinstance(obj, Logger):
rv = obj
else:
assert isinstance(obj, PlaceHolder)
obj.append(alogger)
i = string.rfind(name, ".", 0, i - 1)
if not rv:
rv = self.root
alogger.parent = rv
def _fixupChildren(self, ph, alogger):
"""
Ensure that children of the placeholder ph are connected to the
specified logger.
"""
#for c in ph.loggers:
for c in ph.loggerMap.keys():
if string.find(c.parent.name, alogger.name) <> 0:
alogger.parent = c.parent
c.parent = alogger
#---------------------------------------------------------------------------
# Logger classes and functions
#---------------------------------------------------------------------------
class Logger(Filterer):
"""
Instances of the Logger class represent a single logging channel. A
"logging channel" indicates an area of an application. Exactly how an
"area" is defined is up to the application developer. Since an
application can have any number of areas, logging channels are identified
by a unique string. Application areas can be nested (e.g. an area
of "input processing" might include sub-areas "read CSV files", "read
XLS files" and "read Gnumeric files"). To cater for this natural nesting,
channel names are organized into a namespace hierarchy where levels are
separated by periods, much like the Java or Python package namespace. So
in the instance given above, channel names might be "input" for the upper
level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
There is no arbitrary limit to the depth of nesting.
"""
def __init__(self, name, level=NOTSET):
"""
Initialize the logger with a name and an optional level.
"""
Filterer.__init__(self)
self.name = name
self.level = level
self.parent = None
self.propagate = 1
self.handlers = []
self.disabled = 0
def setLevel(self, level):
"""
Set the logging level of this logger.
"""
self.level = level
def debug(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
"""
if self.manager.disable >= DEBUG:
return
if DEBUG >= self.getEffectiveLevel():
apply(self._log, (DEBUG, msg, args), kwargs)
def info(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'INFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
"""
if self.manager.disable >= INFO:
return
if INFO >= self.getEffectiveLevel():
apply(self._log, (INFO, msg, args), kwargs)
def warning(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'WARNING'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
"""
if self.manager.disable >= WARNING:
return
if self.isEnabledFor(WARNING):
apply(self._log, (WARNING, msg, args), kwargs)
warn = warning
def error(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'ERROR'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.error("Houston, we have a %s", "major problem", exc_info=1)
"""
if self.manager.disable >= ERROR:
return
if self.isEnabledFor(ERROR):
apply(self._log, (ERROR, msg, args), kwargs)
def exception(self, msg, *args):
"""
Convenience method for logging an ERROR with exception information.
"""
apply(self.error, (msg,) + args, {'exc_info': 1})
def critical(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'CRITICAL'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
"""
if self.manager.disable >= CRITICAL:
return
if CRITICAL >= self.getEffectiveLevel():
apply(self._log, (CRITICAL, msg, args), kwargs)
fatal = critical
def log(self, level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
"""
if type(level) != types.IntType:
if raiseExceptions:
raise TypeError, "level must be an integer"
else:
return
if self.manager.disable >= level:
return
if self.isEnabledFor(level):
apply(self._log, (level, msg, args), kwargs)
def findCaller(self):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = currentframe().f_back
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
rv = (filename, f.f_lineno, co.co_name)
break
return rv
def makeRecord(self, name, level, fn, lno, msg, args, exc_info):
"""
A factory method which can be overridden in subclasses to create
specialized LogRecords.
"""
return LogRecord(name, level, fn, lno, msg, args, exc_info)
def _log(self, level, msg, args, exc_info=None):
"""
Low-level logging routine which creates a LogRecord and then calls
all the handlers of this logger to handle the record.
"""
if _srcfile:
fn, lno, func = self.findCaller()
else:
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if type(exc_info) != types.TupleType:
exc_info = sys.exc_info()
record = self.makeRecord(self.name, level, fn, lno, msg, args, exc_info)
self.handle(record)
def handle(self, record):
"""
Call the handlers for the specified record.
This method is used for unpickled records received from a socket, as
well as those created locally. Logger-level filtering is applied.
"""
if (not self.disabled) and self.filter(record):
self.callHandlers(record)
def addHandler(self, hdlr):
"""
Add the specified handler to this logger.
"""
if not (hdlr in self.handlers):
self.handlers.append(hdlr)
def removeHandler(self, hdlr):
"""
Remove the specified handler from this logger.
"""
if hdlr in self.handlers:
#hdlr.close()
hdlr.acquire()
try:
self.handlers.remove(hdlr)
finally:
hdlr.release()
def callHandlers(self, record):
"""
Pass a record to all relevant handlers.
Loop through all handlers for this logger and its parents in the
logger hierarchy. If no handler was found, output a one-off error
message to sys.stderr. Stop searching up the hierarchy whenever a
logger with the "propagate" attribute set to zero is found - that
will be the last logger whose handlers are called.
"""
c = self
found = 0
while c:
for hdlr in c.handlers:
found = found + 1
if record.levelno >= hdlr.level:
hdlr.handle(record)
if not c.propagate:
c = None #break out
else:
c = c.parent
if (found == 0) and raiseExceptions and not self.manager.emittedNoHandlerWarning:
sys.stderr.write("No handlers could be found for logger"
" \"%s\"\n" % self.name)
self.manager.emittedNoHandlerWarning = 1
def getEffectiveLevel(self):
"""
Get the effective level for this logger.
Loop through this logger and its parents in the logger hierarchy,
looking for a non-zero logging level. Return the first one found.
"""
logger = self
while logger:
if logger.level:
return logger.level
logger = logger.parent
return NOTSET
def isEnabledFor(self, level):
"""
Is this logger enabled for level 'level'?
"""
if self.manager.disable >= level:
return 0
return level >= self.getEffectiveLevel()
class RootLogger(Logger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
Logger.__init__(self, "root", level)
_loggerClass = Logger
root = RootLogger(WARNING)
Logger.root = root
Logger.manager = Manager(Logger.root)
#---------------------------------------------------------------------------
# Configuration classes and functions
#---------------------------------------------------------------------------
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""
Do basic configuration for the logging system.
This function does nothing if the root logger already has handlers
configured. It is a convenience method intended for use by simple scripts
to do one-shot configuration of the logging package.
The default behaviour is to create a StreamHandler which writes to
sys.stderr, set a formatter using the BASIC_FORMAT format string, and
add the handler to the root logger.
A number of optional keyword arguments may be specified, which can alter
the default behaviour.
filename Specifies that a FileHandler be created, using the specified
filename, rather than a StreamHandler.
filemode Specifies the mode to open the file, if filename is specified
(if filemode is unspecified, it defaults to 'a').
format Use the specified format string for the handler.
datefmt Use the specified date/time format.
level Set the root logger level to the specified level.
stream Use the specified stream to initialize the StreamHandler. Note
that this argument is incompatible with 'filename' - if both
are present, 'stream' is ignored.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
remembered that StreamHandler does not close its stream (since it may be
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
when the handler is closed.
"""
if len(root.handlers) == 0:
filename = kwargs.get("filename")
if filename:
mode = kwargs.get("filemode", 'a')
hdlr = FileHandler(filename, mode)
else:
stream = kwargs.get("stream")
hdlr = StreamHandler(stream)
fs = kwargs.get("format", BASIC_FORMAT)
dfs = kwargs.get("datefmt", None)
fmt = Formatter(fs, dfs)
hdlr.setFormatter(fmt)
root.addHandler(hdlr)
level = kwargs.get("level")
if level:
root.setLevel(level)
#---------------------------------------------------------------------------
# Utility functions at module level.
# Basically delegate everything to the root logger.
#---------------------------------------------------------------------------
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
return Logger.manager.getLogger(name)
else:
return root
#def getRootLogger():
# """
# Return the root logger.
#
# Note that getLogger('') now does the same thing, so this function is
# deprecated and may disappear in the future.
# """
# return root
def critical(msg, *args, **kwargs):
"""
Log a message with severity 'CRITICAL' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.critical, (msg,)+args, kwargs)
fatal = critical
def error(msg, *args, **kwargs):
"""
Log a message with severity 'ERROR' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.error, (msg,)+args, kwargs)
def exception(msg, *args):
"""
Log a message with severity 'ERROR' on the root logger,
with exception information.
"""
apply(error, (msg,)+args, {'exc_info': 1})
def warning(msg, *args, **kwargs):
"""
Log a message with severity 'WARNING' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.warning, (msg,)+args, kwargs)
warn = warning
def info(msg, *args, **kwargs):
"""
Log a message with severity 'INFO' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.info, (msg,)+args, kwargs)
def debug(msg, *args, **kwargs):
"""
Log a message with severity 'DEBUG' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.debug, (msg,)+args, kwargs)
def log(level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level' on the root logger.
"""
if len(root.handlers) == 0:
basicConfig()
apply(root.log, (level, msg)+args, kwargs)
def disable(level):
"""
Disable all logging calls less severe than 'level'.
"""
root.manager.disable = level
def shutdown():
"""
Perform any cleanup actions in the logging system (e.g. flushing
buffers).
Should be called at application exit.
"""
for h in _handlerList[:]: # was _handlers.keys():
#errors might occur, for example, if files are locked
#we just ignore them if raiseExceptions is not set
try:
h.flush()
h.close()
except:
if raiseExceptions:
raise
#else, swallow
#Let's try and shutdown automatically on application exit...
try:
import atexit
atexit.register(shutdown)
except ImportError: # for Python versions < 2.0
def exithook(status, old_exit=sys.exit):
try:
shutdown()
finally:
old_exit(status)
sys.exit = exithook | Python |
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, socket, struct, os, traceback, types
try:
import thread
import threading
except ImportError:
thread = None
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
#first, do the formatters...
flist = cp.get("formatters", "keys")
if len(flist):
flist = string.split(flist, ",")
formatters = {}
for form in flist:
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
f = logging.Formatter(fs, dfs)
formatters[form] = f
#next, do the handlers...
#critical section...
logging._acquireLock()
try:
try:
#first, lose the existing handlers...
logging._handlers.clear()
#now set up the new ones...
hlist = cp.get("handlers", "keys")
if len(hlist):
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
try:
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
klass = eval(klass, vars(logging))
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
except: #if an error occurs when instantiating a handler, too bad
pass #this could happen e.g. because of lack of privileges
#now all handlers are loaded, fixup inter-handler references...
for fixup in fixups:
h = fixup[0]
t = fixup[1]
h.setTarget(handlers[t])
#at last, the loggers...first the root...
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[hand])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[hand])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
except:
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
del ei
finally:
logging._releaseLock()
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
fileConfig(file)
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
| Python |
import threading
import sqlite3
import time
import os
import sys
import cPickle as pickle
from Queue import Queue
DB_DIR = './'
DB_FILENAME = 'data.db'
DB_DEBUG = True
dbs = {}
def get_database(dir, filename):
global dbs
name = dir + filename
if name in dbs:
db = dbs[name]
else:
db = Database(dir, filename)
dbs[name] = db
return db
class Database:
class ConnWrapper:
def __init__(self, file):
self.__file = file
def __dict_factory(self, cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def execute(self, query, arg=None):
conn = sqlite3.connect(self.__file)
conn.row_factory = self.__dict_factory
dres = []
try:
c = conn.cursor()
res = c.execute(query, arg or tuple())
for r in res:
dres.append(r)
conn.commit()
finally:
conn.close()
return dres
def __init__(self, d=DB_DIR, f=DB_FILENAME):
DB_PATH = d + '/' + f
if not os.path.exists(d):
os.makedirs(d)
if not os.path.exists(DB_PATH):
create = True
else:
create = False
self.__conn = Database.ConnWrapper(DB_PATH)
if create:
self.__create()
self.__tables = {}
for row in self.__conn.execute(''' SELECT pickled FROM tables '''):
pickled = row['pickled']
if pickled == 'tables':
continue
try:
table = pickle.loads(pickled.encode('ascii'))
table.set_conn(self.__conn)
self.__tables[table.__class__.__name__] = table
except Exception, e:
print "Couldn't load table:",e
print 'Finished init. Tables:',self.__tables
def __create(self):
print 'creating tables'
self.__conn.execute(''' CREATE TABLE tables (id INTEGER PRIMARY KEY, pickled STRING) ''')
self.__conn.execute(''' INSERT INTO tables VALUES (?, ?) ''', (1, 'tables'))
print 'created tables'
def clear(self):
self.__conn.close()
os.remove(DB_FILE_URL)
self.__conn = Database.ThreadedConnection(DB_FILE_URL)
self.__create()
def reset_tables(self):
for table in self.__tables.values():
table.reset()
def add_table(self, table):
#full_name = table.__module__ + table.__class__.__name__
#if full_name.startswith('__main__'):
# full_name = full_name[8:]
pickled = pickle.dumps(table)
[result] = self.__conn.execute(''' SELECT COUNT(*) AS count FROM tables WHERE pickled=? ''', (pickled,))
if result['count'] > 0:
raise RuntimeError('There is already a table called ' + table.__class__.__name__)
else:
self.__conn.execute(''' INSERT INTO tables VALUES (?,?) ''', (None,pickled))
table.set_conn(self.__conn)
table._init()
self.__tables[table.__class__.__name__] = table
def get_table(self, cls):
name = cls.__name__
if name in self.__tables:
return self.__tables[name]
else:
raise RuntimeError("That table doesn't exist.")
def has_table(self, cls):
return cls.__name__ in self.__tables
def get_or_create(self, cls):
#full_name = cls.__module__ + cls.__name__
#if full_name.startswith('__main__'):
# full_name = full_name[8:]
if self.has_table(cls):
return self.get_table(cls)
else:
instance = cls()
self.add_table(instance)
return instance
def shutdown(self):
pass
class Table:
def __init__(self):
pass
def set_conn(self, conn):
self._conn = conn
def reset(self):
self._drop()
self._init()
def _drop(self):
raise NotImplementedError
def _init(self):
raise NotImplementedError
class MessageMeta(Table):
def __init__(self):
Table.__init__(self)
def _init(self):
self._conn.execute('''
create table message_meta
(mid INTEGER PRIMARY KEY NOT NULL, expires INTEGER NOT NULL, delivered INTEGER)
''')
def _drop(self):
self._conn.execute('''
drop table message_meta
''')
def reset(self, conn):
self._drop()
self._init()
def register(self, msg):
c = self._conn
c.execute(''' INSERT INTO message_meta VALUES (?,?,?) ''', (msg.mid, msg.expires, msg.delivered))
def is_expired(self, mid):
c = self._conn
r = c.execute(''' SELECT expires FROM message_meta WHERE mid=? ''', (mid,))
if r is None:
raise 'Message ' + str(mid) + ' not in DB.'
return r[0] < time.time()
def is_delivered(self, mid):
c = self._conn
r = c.execute(''' SELECT delivered FROM message_meta WHERE mid=? ''', (mid,))
if r is None:
raise 'Message ' + str(mid) + ' not in DB.'
return r[0] is not None
def get_all(self):
c = self._conn
rows = c.execute(''' SELECT * FROM message_meta ''')
msgs = []
for r in rows:
msgs.append({'mid':r[0], 'expires':r[1], 'delivered':r[2]})
return msgs
if __name__ == '__main__':
class Message:
def __init__(self, mid, expires, delivered):
self.mid = mid
self.expires = expires
self.delivered = delivered
d = Database()
try:
#if d.has_table('MessageMeta'):
# m = d.get_table('MessageMeta')
#else:
# m = MessageMeta()
# d.add_table(m)
m = d.get_or_create(MessageMeta)
#m.register(Message(12345, 1, 2))
#m.register(Message(45678, 5, 6))
print 'delivered?',m.is_delivered(12345)
print 'expired?',m.is_expired(12345)
print 'all:',m.get_all()
#print 'should die:',m.is_delivered(123123123)
#assert False # kept running!
finally:
d.shutdown()
| Python |
import os
from presence import ps
from fvutil.managers import text, audio, objectmanager
# For live audio
from audio import mixerinterface
# For interfacing with flash
from ui import flashinterface
import ui.uiconstants as uicon
import gobject
from data import db, broker, textmanager, loginmanager, interactionmanager, shoppingmanager
from poll import pollmanager
# For commandline args
import getopt
LAPTOP = False
FLASH_LOCAL = True # normally True for deployment, False for testing UI from remote machine
class Engine:
def __init__(self, iface="wlan0",configfile=None):
# Initialize audio service
self._audio = audiomanager.AudioManager(self._am, self._mixer, d)
# Initialize TextMsgService
self._login.connect( 'login_result', self.onLoginResult)
# Connect to UI events
self._fi.connect_signal('login', self._login.onLogin)
#self._fi.connect_signal('edit_object', self._broker.onEditObject)
self._fi.connect_signal('edit_object', self._mixer.onChange)
self._fi.connect_signal('edit_quality', self._broker.onEditQuality)
# Asynch audio UI events
self._fi.connect_signal('start_record' , self._mixer.onStartRecord)
self._fi.connect_signal('stop_record', self._mixer.onStopRecord)
self._fi.connect_signal('start_play', self._mixer.onStartPlay)
self._fi.connect_signal('stop_play', self._mixer.onStopPlay)
self._fi.connect_signal('poll_start_record', self._mixer.onStartRecord)
self._fi.connect_signal('poll_start_record', self._poll.onStartRecord)
self._fi.connect_signal('poll_stop_record', self._mixer.onStopRecord)
self._fi.connect_signal('poll_stop_record', self._poll.onStopRecord)
self._fi.connect_signal('poll_start_play', self._mixer.onStartPlay)
self._fi.connect_signal('poll_start_play', self._poll.onStartPlay)
self._fi.connect_signal('poll_stop_play', self._mixer.onStopPlay)
self._fi.connect_signal('poll_stop_play', self._poll.onStopPlay)
self._fi.connect_signal('send_object', self._broker.onSendObject)
self._fi.connect_signal('get_messages', self._timeline.getMessages)
self._fi.connect_signal('get_message_count', self._timeline.getMessageCount)
# Connect to presence events
self._presence.connect( 'online', self._fi.updateObject )
self._presence.connect( 'online', self._mixer.onAddInput )
self._presence.connect( 'object_offline', self._fi.updateObject )
self._presence.connect( 'object_offline', self._mixer.onOffline )
# Asynch audio flash callbacks
self._mixer.connect('my_ssrc', self._presence.onMySSRC)
self._mixer.connect('started_record', self._fi.startedRecord)
self._mixer.connect('stopped_record', self._fi.stoppedRecord)
self._mixer.connect('started_play', self._fi.startedPlay)
self._mixer.connect('stopped_play', self._fi.stoppedPlay)
self._audio.connect('sent_audio', self._fi.sentObject)
self._audio.connect('received_audio', self._fi.updateObject)
self._audio.connect('received_audio', self._mixer.onAddInput)
# Asynch text UI callbacks
self._text.connect('created_text', self._fi.sentObject)
self._text.connect('received_text', self._fi.updateObject)
self._shopping.connect('created_list', self._fi.sentObject)
self._shopping.connect('received_list', self._fi.updateObject)
# Asynch poll UI callbacks
self._poll.connect('created_poll', self._fi.sentObject)
self._poll.connect('received_poll', self._fi.updateObject)
self._poll.connect('submitted_pollvote', self._fi.sentObject)
self._poll.connect('received_pollresults', self._fi.resultsReady)
# audio requests from UI
self._fi.connect_signal('push_to_talk', self._mixer.onMIC)
self._fi.connect_signal('push_to_talk', self._presence.onChangeStatus)
self._mixer.connect( 'ack_push_to_talk', self._fi.onAckPushToTalk)
# UI request to open an object
self._fi.connect_signal('open_object', self._broker.onOpenObject)
#self._broker.connect('return_object', self._fi.returnObject)
# group related methods
self._fi.connect_signal('new_group', self._gs.onNewGroup)
self._fi.connect_signal('add_user_to_group',self._gs.onAddUserToGroup)
self._fi.connect_signal('del_user_from_group', self._gs.onDelUserFromGroup )
self._fi.connect_signal('show_group', self._gs.onShowGroup )
self._fi.connect_signal('del_group', self._gs.onDelGroup )
self._fi.connect_signal('update_group_name', self._gs.onUpdateGroupName )
# response events and new events in group service
#self._gs.connect('return_groups', self._fi.sendGroups )
self._gs.connect('group_appeared', self._fi.groupAppeared )
self._gs.addLookupNameCallback( self._presence.getUserName )
# Start network thread
net.start()
def run(self):
# Start main application loop
gobject.threads_init()
self._mainloop = gobject.MainLoop()
self._mainloop.run()
@threadtrace
def onLoginResult(self, obj, result, username):
"""
Called on interface-connect.
Sends the UI the list of users and objects around me.
@param dummy: a dummy variable to make signal transmission work.
"""
import threading
print "In thread:",threading.currentThread().getName()
allusers = self._presence.getAllUsers()
allobjects = self._broker.getAllObjects()
#for i in allitems:
"""
allitems = [{'subcommand':uicon.ADD, 'guid':123, 'name':'Kwan', 'status':uicon.AVAILABLE, 'type':uicon.FV},
{'subcommand':uicon.ADD, 'guid':124, 'name':'An', 'status': uicon.OFFLINE, 'type':uicon.FV},
{'subcommand':uicon.ADD, 'guid':125, 'name':'John', 'status':uicon.AVAILABLE, 'type':uicon.PHONE},
{'subcommand':uicon.ADD, 'guid':125, 'name':'Txt->John', 'status':uicon.AVAILABLE, 'type':uicon.TXTMSG, 'priority':uicon.PRIORITY_LOW},
{'subcommand':uicon.ADD, 'guid':126, 'name':'Voice->Charles', 'status':uicon.AVAILABLE, 'type':uicon.AUDIOMSG, 'priority':uicon.PRIORITY_MEDIUM},
{'subcommand':uicon.ADD, 'guid':127, 'name':'Poll->John', 'status':uicon.AVAILABLE, 'type':uicon.POLL, 'priority':uicon.PRIORITY_HIGH},
{'subcommand':uicon.ADD, 'guid':128, 'name':'Partial->John', 'status':uicon.AVAILABLE, 'type':uicon.PARTIAL, 'priority':uicon.PRIORITY_MEDIUM},
{'subcommand':uicon.ADD, 'guid':129, 'name':'Poll->John', 'status':uicon.AVAILABLE, 'type':uicon.POLLRESULT, 'priority':uicon.PRIORITY_LOW},
{'subcommand':uicon.ADD, 'guid':130, 'name':'Fungroup', 'status':uicon.AVAILABLE, 'type':uicon.GROUP}]
"""
if result == 1:
self._fi.loginAck(username, allusers+allobjects)
else:
self._fi.loginAck( None, None)
def shutDown(self):
self._logger.endLog()
self._db.shutDown()
print "Ending Log"
def usage():
print """
fvengine : fluidvoice v3 application
python fvengine.py [-c <config>] [-f <flash local/remote>]
-h print this message
-a run in adhoc network mode
-i <ifname> run with the specified interface name (default wlan0)
-c <config> use asynchronous messaging configuration given in
specified config file
-f <local/remote> "local" to use UI on localhost; "remote" to allow usage
of remote UI. default is local.
"""
if __name__ == "__main__":
o,a = getopt.getopt(sys.argv[1:], 'hc:f:ai:') # : specifies we expect a value after c and f
opts = {}
for k,v in o:
opts[k] = v
# If help has been asked, display usage and exit
if '-h' in opts:
usage()
sys.exit(0)
if '-i' in opts:
ifname = opts['-i']
else:
ifname = "wlan0"
if '-a' in opts:
common.SetRCVR(ifname)
if '-c' in opts:
config = opts['-c']
else:
print "No config file specified, using default FV config."
config = "../config/fv"
if '-f' in opts:
val = opts['-f']
if val == "remote":
FLASH_LOCAL = False
elif val == "local":
FLASH_LOCAL = True
else:
print "Unrecognized value given for -f argument:",val
usage()
sys.exit(1)
# check to see if directories are ready
checksetup.setupDirs()
app = Engine(ifname, config)
try:
app.run()
except KeyboardInterrupt:
app.shutDown()
am.AsyncManager.instance().shutDown()
print "Crtl+C pressed. Shutting down."
| Python |
from util import config
import logging.config
logging.config.fileConfig('logging.config')
from async import asyncmanager
from db import db
from net import network, queues
from presence import ps
from fvutil import broker, accounts, fvcon
from fvutil.managers import audio, text, group, poll, shopping, timeline, presencewrapper, tivo
from ui import flashinterface
from audio import mixerinterface
import os
import gobject
logger = logging.getLogger('fluidvoice')
import cPickle as pickle
class FluidVoice:
class Control:
def __init__(self, iface, port, presence):
self.__presence = presence
self.__queue = queues.Queue()
self.__socket = network.Unicast(iface, config.get_int('fv', 'control-port'), self.__queue, self.__receive)
self.__type_receivers = {}
def send(self, control, id):
"""
Sends a control message to a node with the given id
@param control: An object that represents some control information
@type control: whatever
@param id: The id of the direct neighbor to send to
@type id: int
"""
neighbor = self.__presence.get_neighbor(id)
addr = (neighbor.ip, neighbor.meta[config.get_int('fv', 'control-port-key')])
data = pickle.dumps(control, pickle.HIGHEST_PROTOCOL)
logger.debug('Sending control %s to address %s', control, addr)
self.__queue.put(data, addr)
def is_neighbor(self, id):
neighbors = self.__presence.get_neighbors()
for n in neighbors:
if n.pid == id:
return True
return False
def register_type_receiver(self, type, recv_func):
type_name = type.__name__
if type_name not in self.__type_receivers:
self.__type_receivers[type_name] = []
self.__type_receivers[type_name].append(recv_func)
def __receive(self, data, addr):
control = pickle.loads(data)
control_type = control.__class__.__name__
receivers = self.__type_receivers.get(control_type, None)
if receivers:
for receiver in receivers:
receiver(control)
else:
raise ValueError, "Control channel got a type it doesn't know how to handle: %s" % (control_type,)
# TODO: Get rid of my-id and my-name as we use proper login system
def __init__(self, iface, config_files=[], my_id=None, my_name=None, flash_local=False):
logger.critical('----------------- START RUN -----------------')
default_config = os.path.join(os.path.dirname(__file__), 'fv.config')
config.add_file(default_config)
if config_files:
config.add_files(config_files)
print 'Initializing:',my_id,my_name
# Initialize all the modules.
self.__presence = ps.PresenceService(iface, my_id, my_name, standalone=False, config_files=config_files)
# Add meta about me
self.__presence.add_meta(fvcon.STATUS, fvcon.ONLINE)
self.__presence.add_meta(fvcon.PRIORITY, fvcon.PRIORITY_LOW)
# TODO: Do we need this?
self.__presence.add_meta(fvcon.AUDIO_PORT, 'audio port')
self.__presence.add_meta(fvcon.SSRC, None)
self.__presence.add_meta(config.get_int('fv', 'control-port-key'), config.get_int('fv', 'control-port'))
self.__control = FluidVoice.Control(iface, config.get_int('fv', 'control-port'), self.__presence)
self.__am = asyncmanager.AsyncManager(iface, self.__presence, config_files=config_files)
self.__accounts = accounts.Accounts.instance()
self.__mixer = mixerinterface.MixerInterface(network.get_broadcast_addr(iface))
# TODO: Remove this:
if not self.__accounts.user_exists('Guest'):
self.__accounts.create_user('Guest', '')
self.__presence_wrapper = presencewrapper.PresenceWrapper(self.__presence)
self.__text = text.TextManager(self.__am, my_id)
self.__audio = audio.AudioManager(self.__am, my_id, self.__mixer)
self.__group = group.GroupManager(self.__am, my_id)
self.__poll = poll.PollManager(self.__am, my_id, self.__mixer)
self.__shopping = shopping.ShoppingManager(self.__am, my_id)
self.__tivo = tivo.TivoManager(self.__am, my_id, self.__mixer, self.__control)
self.__broker = broker.ObjectBroker(self.__presence_wrapper, self.__group, self.__audio,
self.__text, self.__poll, self.__shopping, self.__tivo)
self.__timeline = timeline.Timeline(my_id)
self.__flash = flashinterface.FlashInterface(iface, local=flash_local)
# Wire up callbacks, events
self.__am.connect('message-received', self.__broker.on_receive_object)
#self.__accounts.connect('login-result', self.on_login_result)
self.__flash.connect('login', self.__on_login)
self.__flash.connect('edit_object', self.__mixer.onChange)
# Async audio
self.__flash.connect('start_record' , self.__mixer.onStartRecord)
self.__flash.connect('stop_record', self.__mixer.onStopRecord)
self.__flash.connect('start_play', self.__mixer.onStartPlay)
self.__flash.connect('stop_play', self.__mixer.onStopPlay)
self.__flash.connect('poll_start_record', self.__poll.on_start_record)
self.__flash.connect('poll_stop_record', self.__poll.on_stop_record)
self.__flash.connect('poll_start_play', self.__poll.on_start_play)
self.__flash.connect('poll_stop_play', self.__poll.on_stop_play)
self.__flash.connect('send_object', self.__broker.on_send_object)
self.__flash.connect('get_messages', self.__timeline.get_messages)
self.__flash.connect('get_message_count', self.__timeline.get_message_count)
self.__presence.connect('neighbor-online', self.__neighbor_online)
self.__presence.connect('neighbor-update', self.__neighbor_update)
self.__presence.connect('neighbor-offline', self.__neighbor_offline)
self.__mixer.connect('my_ssrc', self.__on_my_SSRC)
self.__mixer.connect('started_record', self.__flash.startedRecord)
self.__mixer.connect('stopped_record', self.__flash.stoppedRecord)
self.__mixer.connect('started_play', self.__flash.startedPlay)
self.__mixer.connect('stopped_play', self.__flash.stoppedPlay)
self.__audio.connect('sent_audio', self.__flash.sentObject)
self.__audio.connect('received_audio', self.__flash.updateObject)
self.__audio.connect('received_audio', self.__mixer.onAddInput)
# Asynch text UI callbacks
self.__text.connect('created_text', self.__flash.sentObject)
self.__text.connect('received_text', self.__flash.updateObject)
self.__shopping.connect('created_list', self.__flash.sentObject)
self.__shopping.connect('received_list', self.__flash.updateObject)
# Asynch poll UI callbacks
self.__poll.connect('created_poll', self.__flash.sentObject)
self.__poll.connect('received_poll', self.__flash.updateObject)
self.__poll.connect('submitted_pollvote', self.__flash.sentObject)
self.__poll.connect('received_pollresults', self.__flash.resultsReady)
# audio requests from UI
self.__flash.connect('push_to_talk', self.__mixer.onMIC)
# TODO: What is this?
#self.__flash.connect('push_to_talk', self.__presence.onChangeStatus)
#self.__mixer.connect( 'ack_push_to_talk', self.__flash.onAckPushToTalk)
# UI request to open an object
self.__flash.connect('open_object', self.__broker.on_open_object)
#self._broker.connect('return_object', self.__flash.returnObject)
# group related methods
self.__flash.connect('new_group', self.__group.on_new_group)
self.__flash.connect('add_user_to_group',self.__group.on_add_user_to_group)
self.__flash.connect('del_user_from_group', self.__group.on_del_user_from_group )
self.__flash.connect('show_group', self.__group.on_show_group )
self.__flash.connect('del_group', self.__group.on_del_group )
self.__flash.connect('update_group_name', self.__group.on_update_group_name)
# Tivo-related methods
self.__flash.connect('start_tivo', self.__tivo.on_start_tivo)
self.__flash.connect('stop_tivo', self.__tivo.on_stop_tivo)
self.__flash.connect('exclude_me_from_tivo', self.__tivo.on_exclude_me_from_tivo)
self.__tivo.connect('sent_tivo', self.__flash.sentObject)
self.__tivo.connect('received_tivo', self.__flash.updateObject)
self.__tivo.connect('started_tivo', self.__flash.onStartedTivo)
self.__tivo.connect('warning_tivo', self.__flash.onWarningTivo)
# response events and new events in group service
#self._gs.connect('return_groups', self.__flash.sendGroups )
self.__group.connect('group_appeared', self.__flash.groupAppeared )
#self._group.addLookupNameCallback( self._presence.getUserName )
network.start()
def run(self):
# Start main application loop
gobject.threads_init()
import thread
thread.start_new_thread(self.__text_control,())
self._mainloop = gobject.MainLoop()
self._mainloop.run()
def __text_control(self):
p = self.__presence
f = self.__flash
c = self.__control
import controlmsgs
msg = controlmsgs.StartTivo(12345, [2,3])
start = pickle.dumps(msg, pickle.HIGHEST_PROTOCOL)
msg = controlmsgs.ExcludeFromTivo(12345, 2)
rec = c._Control__receive
exclude = pickle.dumps(msg, pickle.HIGHEST_PROTOCOL)
while True:
cmd = raw_input('>>')
try:
exec(cmd)
except Exception, e:
print dir(e)
print e
def stop(self):
pass
# Event handlers
def __on_my_SSRC(self, obj, data):
raise NotImplementedError
def __on_login(self, flash, username, password):
success = self.__accounts.authenticate(username, password)
print success
if success:
allusers = self.__presence.get_all_known_users()
allobjects = self.__broker.get_all_objects()
flash.loginAck(username, allusers+allobjects)
print 'fluidvoice: Login ack',username,allusers+allobjects
else:
print 'fluidvoice: Username, password pair incorrect'
# CRAP!
# CRAP!!
# CRAP!!!
def __neighbor_online(self, obj, user, new):
print 'neighbor_online',user.pid
if isinstance(user, ps.UserPresence):
presence_type = fvcon.FV
else:
presence_type = fvcon.SERVICE
if new:
subcommand = fvcon.ADD
else:
subcommand = fvcon.UPDATE
print 'new?',new
presence_dict = {'subcommand':subcommand,
'id': user.pid,
'label':user.name,
'type':presence_type,
'status':user.meta[fvcon.STATUS],
'priority':user.meta[fvcon.PRIORITY],
'address': user.ip,
'port':user.meta[fvcon.AUDIO_PORT],
'ssrc': user.meta[fvcon.SSRC]
}
print 'SSRC!',presence_dict['ssrc']
self.__flash.updateObject(obj, presence_dict)
self.__mixer.onAddInput(obj, presence_dict)
def __neighbor_update(self, obj, user):
print 'neighbor_update',user.pid
if isinstance(user, ps.UserPresence):
presence_type = fvcon.FV
else:
presence_type = fvcon.SERVICE
presence_dict = {'subcommand':fvcon.UPDATE,
'id': user.pid,
'label':user.name,
'type':presence_type,
'status':user.meta[fvcon.STATUS],
'priority':user.meta[fvcon.PRIORITY],
'address': user.ip,
'port':user.meta[fvcon.AUDIO_PORT],
'ssrc': user.meta[fvcon.SSRC]
}
self.__flash.updateObject(obj, presence_dict)
print 'ON ADD INPUT'
self.__mixer.onAddInput(obj, presence_dict)
def __neighbor_offline(self, obj, user):
print 'neighbor_offline',user.pid
if isinstance(user, ps.UserPresence):
presence_type = fvcon.FV
else:
presence_type = fvcon.SERVICE
presence_dict = {'subcommand':fvcon.UPDATE,
'id': user.pid,
'label':user.name,
'type':presence_type,
'status':fvcon.OFFLINE,
'priority':user.meta[fvcon.PRIORITY],
'address': user.ip,
'port':user.meta[fvcon.AUDIO_PORT],
'ssrc': user.meta[fvcon.SSRC]
}
self.__flash.updateObject(obj, presence_dict)
self.__mixer.onOffline(obj, presence_dict)
def __on_my_SSRC(self, obj, ssrc):
print 'Setting my SSRC',ssrc
self.__presence.add_meta(fvcon.SSRC, ssrc)
if __name__ == "__main__":
import getopt
import sys
from util import device
o,a = getopt.getopt(sys.argv[1:], 'hc:f:ai:p:u:d:') # : specifies we expect a value after c and f
opts = {}
for k,v in o:
opts[k] = v
# If help has been asked, display usage and exit
if '-h' in opts:
usage()
sys.exit(0)
if '-i' in opts:
ifname = opts['-i']
else:
ifname = "eth1"
if '-a' in opts:
common.SetRCVR(ifname)
if '-c' in opts:
config_file = opts['-c']
else:
config_file = None
if '-f' in opts:
val = opts['-f']
if val == "remote":
flash_local = False
elif val == "local":
flash_local = True
else:
print "Unrecognized value given for -f argument:",val
usage()
sys.exit(1)
if '-p' in opts:
file = opts['-p']
f = open(file)
contents = f.read()
f.close()
username, user_id = contents.split(',')
else:
if '-u' not in opts:
print 'You must specify a username with the -u flag'
sys.exit(1)
else:
username = opts['-u']
if '-d' not in opts:
print 'You must specify a user id with the -d flag'
sys.exit(1)
else:
user_id = opts['-d']
# check to see if directories are ready
#checksetup.setupDirs()
user_id = int(user_id)
if config_file:
config_files = [config_file]
else:
config_files = []
if device.current() == device.WINDOWS:
config_files.append('windows-overrides.config')
elif device.current() == device.NOKIA_TABLET:
config_files.append('nokia-overrides.config')
app = FluidVoice(ifname, config_files, my_id=user_id, my_name=username, flash_local=flash_local)
try:
app.run()
except KeyboardInterrupt:
app.stop()
print "Crtl+C pressed. Shutting down." | Python |
import cPickle as pickle
import gobject
import time
import socket
import asyncore
import thread
import copy
import re
import sys
import os
from net import network, queues
from util import config, device
from db import db
# TODO:
# - Resilience to network failure
# - Stubbing framework?
# - Move UserPresence (etc) to Presence.User and fix refs?
# Status documentation (fv vs. presence status)
OFFLINE = None
ONLINE = None
import logging
logger = logging.getLogger('presence')
class Presence(gobject.GObject):
__gsignals__ = {
'offline' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
}
def __init__(self, pid, name):
gobject.GObject.__init__(self)
self.pid = pid
self.name = name
self.updated = time.time()
self.type = self.__class__.__name__
def __eq__(self, other):
return (self.pid == other.pid and self.__class__ == other.__class__)
def __hash__(self):
return hash(self.pid)
def __repr__(self):
return '<Presence: %s,%s>' % (self.pid, self.name)
class UserPresence(Presence):
def __init__(self, uid, name, ip, meta = {}):
Presence.__init__(self, uid, name)
self.ip = ip
self.meta = meta
self.objects = []
self.distance = config.get_int('presence', 'max-distance')
self.status = ONLINE
def watch(self, offline_func):
gobject.GObject.__init__(self)
self.__check_online_timer = gobject.timeout_add(config.get_int('presence', 'offline-check-timeout-ms'), self.__check_online)
self.__update_distance_timer = gobject.timeout_add(config.get_int('presence', 'distance-check-timeout-ms'), self.__update_distance)
self.connect('offline', offline_func)
self.updated = time.time()
# Do the initial distance update.
def initial_update():
self.__update_distance()
return False
gobject.idle_add(initial_update)
def has_at_all(self, pid):
for obj in self.objects:
if obj.pid == pid: return True
return False
def has_complete(self, pid):
for obj in self.objects:
if obj.pid == pid and not isinstance(obj, PartialPresence): return True
return False
def add_object(self, obj):
assert obj not in self.objects
self.objects.append(obj)
assert obj in self.objects
def remove_object(self, obj):
assert obj in self.objects
self.objects.remove(obj)
assert obj not in self.objects
def replace_with_new(self, new):
old = None
for o in self.objects:
if o.pid == new.pid: old = o
assert old is not None
self.objects.remove(old)
self.objects.append(new)
def alive(self):
self.updated = time.time()
def __update_distance(self):
logger.info('Updating distance for user %i', self.pid)
if device.current() == device.NOKIA_TABLET:
command = 'sudo ping -q -c3 ' + self.ip
regex = re.compile("round-trip min/avg/max = (?P<min>[0-9]+\.[0-9]+)/(?P<avg>[0-9]+\.[0-9]+)/(?P<max>[0-9]+\.[0-9]+)")
else:
command = 'ping -q -c3 ' + self.ip
regex = re.compile("rtt min/avg/max/mdev = (?P<min>[0-9]+\.[0-9]+)/(?P<avg>[0-9]+\.[0-9]+)/(?P<max>[0-9]+\.[0-9]+)/(?P<mdev>[0-9]+\.[0-9]+)")
ping = os.popen(command, 'r')
sys.stdout.flush()
updated = False
while True:
line = ping.readline()
if not line: break
match = regex.match(line)
if match:
avg = float(match.group('avg'))
updated = True
if updated:
self.distance = avg
else:
self.distance = config.get_int('presence', 'max-distance')
logger.info('Updated distance for user %i. New distance: %s', self.pid, self.distance)
return True
def __check_online(self):
offline_period = config.get_int('presence', 'offline-period-s')
logger.info('Checking if user %i is offline. Last update: %s. Now: %s. Max offline period: %s', self.pid, self.updated, time.time(), offline_period)
if time.time() - self.updated > offline_period:
self.status = OFFLINE
gobject.source_remove(self.__check_online_timer)
gobject.source_remove(self.__update_distance_timer)
self.emit('offline', self.pid)
return False
else:
return True
class ServicePresence(UserPresence):
def __init__(self, sid, name, ip, meta = {}):
UserPresence.__init__(self, sid, name, ip, meta)
class ObjectPresence(Presence):
def __init__(self, oid, name):
Presence.__init__(self, oid, name)
class PartialPresence(ObjectPresence):
def __init__(self, oid):
ObjectPresence.__init__(self, oid, '<partial>')
class ExpiredPresence(ObjectPresence):
def __init__(self, pid):
ObjectPresence.__init__(self, pid, '<expired>')
class DeliveredPresence(ObjectPresence):
def __init__(self, pid):
ObjectPresence.__init__(self, pid, '<delivery>')
class PresenceService(gobject.GObject):
__gsignals__ = {
'neighbor-online' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,gobject.TYPE_BOOLEAN)),
'neighbor-update' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'neighbor-offline' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'object-appeared' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'object-disappeared' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, iface, my_id, my_name, standalone=True, config_files=[]):
gobject.GObject.__init__(self)
config.add_file(os.path.join(os.path.dirname(__file__), 'presence.config'))
if config_files:
config.add_files(config_files)
global OFFLINE, ONLINE
OFFLINE = config.get_int('presence.status', 'offline')
ONLINE = config.get_int('presence.status', 'online')
self.me = UserPresence(my_id, my_name, network.get_ip_addr(iface))
# I'd much rather use a set() for present and offline, but because you
# can't retrieve elements from a set, I'm forced to use dictionaries.
self.__present = {}
self.__offline = {}
self.__queue = queues.Queue()
self.__socket = network.Broadcast(iface, config.get_int('presence', 'port'), self.__queue, self.__receive)
self.__PRESENCE_INTERVAL = config.get_int('presence', 'presence-interval-ms')
self.__beat_timer = gobject.timeout_add(self.__PRESENCE_INTERVAL, self.__beat)
self.__table = db.get_database(config.get('presence', 'db-dir'), config.get('presence', 'db-file')).get_or_create(PresenceTable)
if standalone:
network.start()
def add_meta(self, key, value):
if key in self.me.meta:
del self.me.meta[key]
self.me.meta[key] = value
def add_object(self, obj):
self.me.add_object(obj)
def remove_object(self, obj):
self.me.remove_object(obj)
def replace_with_new(self, obj):
self.me.replace_with_new(obj)
def get_known_user(self, uid):
user = self.__table.get_historical(uid)
self.__add_fields_to_user(user)
return user
def get_all_known_users(self):
users = self.__table.get_all_historical()
for user in users:
self.__add_fields_to_user(user)
return users
# This is a total BS method for adding some metadata to users that
# can be accessed outside of the class.
def __add_fields_to_user(self, user):
present = self.__present.get(user['uid'], None)
if not present:
user['status'] = OFFLINE
else:
user['status'] = ONLINE
user['meta'] = present.meta
user['type'] = present.type
def get_neighbor(self, nid):
return self.__present[nid]
def get_neighbors(self):
return copy.copy(self.__present.values())
def who_has_complete(self, pid):
neighbors = []
for neighbor in self.__present.values():
if neighbor.has_complete(pid): neighbors.append(neighbor)
return neighbors
def is_unique(self, obj):
return len(self.who_has_complete(obj.pid)) == 0 and not self.me.has_complete(obj.pid)
def __calc_diffs(self, old, new):
added = []
removed = []
for item in new:
if item not in old:
added.append(item)
for item in old:
if item not in new:
removed.append(item)
return added, removed
def __receive(self, data, addr):
neighbor = pickle.loads(data)
logger.debug('Received presence. Neighbor %i from addr %s', neighbor.pid, addr)
# Quick sanity check. :)
neighbor.is_me = False
# Save the user and let us know if the neighbor is new.
new = self.__table.save_user(neighbor)
if neighbor.pid in self.__present:
old = self.__present[neighbor.pid]
old.alive()
if neighbor.meta != old.meta or neighbor.name != old.name:
old.meta = neighbor.meta
old.name = neighbor.name
logger.info('Neighbor %i was updated with new meta %s', old.pid, old.meta)
self.emit('neighbor-update', old)
if old.objects != neighbor.objects:
added, removed = self.__calc_diffs(old.objects, neighbor.objects)
# This is a hack to make is_unique work.
old.objects = []
for obj in added:
if self.is_unique(obj):
logger.info('New object %i appeared at addr %s', obj.pid, addr)
self.emit('object-appeared', obj)
for obj in removed:
if self.is_unique(obj):
logger.info('Last object %i disappeared from addr %s', obj.pid, addr)
self.emit('object-disappeared', obj)
old.objects = neighbor.objects
else:
if neighbor.pid in self.__offline:
del self.__offline[neighbor.pid]
neighbor.watch(self.__on_offline)
for obj in neighbor.objects:
if self.is_unique(obj):
logger.info('New object %i appeared at addr %s', obj.pid, addr)
self.emit('object-appeared', obj)
self.__present[neighbor.pid] = neighbor
logger.info('New neighbor %i appeared at addr %s', neighbor.pid, addr)
self.emit('neighbor-online', neighbor, new)
self.__recalc_presence_interval()
def __recalc_presence_interval(self):
changed = False
count = len(self.__present)
if count > 10:
if self.__PRESENCE_INTERVAL != 20000:
self.__PRESENCE_INTERVAL = 20000
changed = True
elif count > 5:
if self.__PRESENCE_INTERVAL != 10000:
self.__PRESENCE_INTERVAL = 10000
changed = True
else:
if self.__PRESENCE_INTERVAL != 5000:
self.__PRESENCE_INTERVAL = 5000
changed = True
if changed:
logger.info('Changing presence interval to %i, because %i users are online', self.__PRESENCE_INTERVAL, count)
gobject.source_remove(self.__beat_timer)
self.__beat_timer = gobject.timeout_add(self.__PRESENCE_INTERVAL, self.__beat)
def __beat(self):
logger.debug('Sending heartbeat')
pickled = pickle.dumps(self.me, pickle.HIGHEST_PROTOCOL)
self.__queue.put(pickled)
return True
def __on_offline(self, neighbor, pid):
logger.info('Neighbor %i went offline',neighbor.pid)
self.__offline[neighbor.pid] = neighbor
del self.__present[neighbor.pid]
for obj in neighbor.objects:
if self.is_unique(obj): self.emit('object-disappeared', obj)
self.emit('neighbor-offline', neighbor)
# Testing hook
def run(self):
self.loop = gobject.MainLoop()
self.loop.run()
gobject.type_register(Presence)
gobject.type_register(PresenceService)
class PresenceTable(db.Table):
def __init__(self):
db.Table.__init__(self)
def _init(self):
self._conn.execute('''
CREATE TABLE presence
(uid INTEGER PRIMARY KEY NOT NULL, name STRING NOT NULL, timestamp INTEGER NOT NULL)
''')
def _drop(self):
self._conn.execute('''
DROP TABLE presence
''')
def save_user(self, user):
new = False
c = self._conn
[result] = c.execute('''
SELECT COUNT(*) as count FROM presence WHERE uid=?
''', (user.pid,))
count = result['count']
if count > 0:
c.execute('''
UPDATE presence
SET name=?, timestamp=?
WHERE uid=?''', (user.name, time.time(), user.pid))
else:
new = True
c.execute('''
INSERT INTO presence VALUES (?,?,?)''', (user.pid, user.name, time.time()))
return new
def get_historical_info(self, uid):
c = self._conn
[result] = c.execute('''
SELECT * FROM presence WHERE uid=?
''', (uid,))
return result
def get_all_historical(self):
c = self._conn
return c.execute(''' SELECT * FROM presence ''')
# The following is all test code
if __name__ == "__main__":
import sys
import fcntl
import struct
import random
def GetWirelessInterfaceName():
hostname = socket.gethostname()
if ("N800" or "n800" or "N810" or "n810") in hostname:
return "wlan0"
elif "viraLBaby" in hostname:
return "eth1"
return "wlan0"
# return "eth0"
def GetIPAddress(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def GetBcastAddress(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8919, # SIOCGIFBRDADDR
struct.pack('256s', ifname[:15])
)[20:24])
def Online(obj, presence):
print time.time(),presence.name,"came online with objects",presence.objects
def Offline(obj, presence):
print time.time(),presence.name,"went offline."
def Update(obj, presence):
print time.time(),presence.name,"was updated."
def ObjectAppeared(obj, presence):
print time.time(),"Object '",presence.name,"' appeared."
def ObjectDisappeared(obj, presence):
print time.time(),"Object '",presence.name,"' disappeared."
if len(sys.argv) < 5:
print "usage: python presence.py <send_port> <recv_port> <my_id> <my_name>"
exit(0)
iface = GetWirelessInterfaceName()
print iface
my_ip = GetIPAddress(iface)
broadcast_ip = GetBcastAddress(iface)
print my_ip, broadcast_ip
send_port = int(sys.argv[1])
recv_port = int(sys.argv[2])
my_id = sys.argv[3]
my_name = sys.argv[4]
#ip, port, my_name
pres = PresenceService(my_ip, broadcast_ip, send_port, recv_port, my_id, my_name)
pres.add_object(AudioMsgPresence(random.randint(1,100) , "name" + str(random.randint(1,100)), my_ip))
# connect to events of presence
pres.connect('neighbor-online', Online)
pres.connect('neighbor-offline', Offline)
pres.connect('neighbor-update', Update)
pres.connect('object-appeared', ObjectAppeared)
pres.connect('object-disappeared', ObjectDisappeared)
gobject.threads_init()
loop = gobject.MainLoop()
loop.run()
"""
ping_string = {
'ubuntu' : ,
'maemo' : ',
}
"""
| Python |
import os
import re
import device
def scan(ifname):
# Some debugging stuff
live = True
if live:
a = os.popen("iwlist " + ifname + " scanning")
lines = a.read().strip().split('\n')
a.close()
else:
a = open('n800-aps.txt')
lines = a.read().strip().split('\n')
a.close()
current = -1
aps = []
while lines:
if current > -1:
ap = aps[current]
line = lines[0]
mac = re.compile("[ ]+Cell [0-9]+ - Address: (?P<mac>[\w:]+)")
macmatch = mac.match(line)
if macmatch:
ap = {}
ap['mac'] = macmatch.groups('mac')[0]
aps.append(ap)
current += 1
lines = lines[1:]
continue
essid = re.compile("[ ]+ESSID:\"(?P<essid>[\w\s\.]*)\"")
essidmatch = essid.match(line)
if essidmatch:
ap['essid'] = essidmatch.group('essid')
lines = lines[1:]
continue
if device.current() == device.NOKIA_TABLET:
signal = re.compile(".*Signal level:(?P<level>-\d+) dBm")
else:
signal = re.compile(".*Signal level=(?P<level>-\d+) dBm")
signalmatch = signal.match(line)
if signalmatch:
ap['level'] = signalmatch.group('level')
lines = lines[1:]
continue
enc = re.compile("[ ]+Encryption key:(?P<encryption>[\w]*)")
encmatch = enc.match(line)
if encmatch:
encrypted = encmatch.group('encryption').strip()
if encrypted == 'off':
encrypted = False
else:
encrypted = True
ap['encryption'] = encrypted
lines = lines[1:]
continue
lines = lines[1:]
return aps
def current(ifname):
a = os.popen("iwconfig " + ifname)
lines = a.read().strip().split('\n')
a.close()
current = {}
while lines:
line = lines[0]
essid = re.compile(".*ESSID:\"(?P<essid>.+)\"")
essidmatch = essid.match(line)
if essidmatch:
current['essid'] = essidmatch.group('essid')
lines = lines[1:]
continue
mode = re.compile(".*Mode:(?P<mode>[-\w]+)")
modematch = mode.match(line)
if modematch:
current['mode'] = modematch.group('mode')
lines = lines[1:]
continue
"""
quality = re.compile(".*Link Quality=(?P<quality>\d+/\d+)")
qualitymatch = quality.match(line)
if qualitymatch:
current['quality'] = qualitymatch.group('quality')
lines = lines[1:]
continue
"""
if device.current() == device.NOKIA_TABLET:
signal = re.compile(".*Signal level:(?P<level>-\d+) dBm")
else:
signal = re.compile(".*Signal level=(?P<level>-\d+) dBm")
signalmatch = signal.match(line)
if signalmatch:
current['level'] = signalmatch.group('level')
lines = lines[1:]
continue
lines = lines[1:]
return current
if __name__ == '__main__':
# Sort by level
aps = scan('eth1')
from operator import itemgetter
aps = sorted(aps, key= itemgetter('level'))
# Filter ML only
ml_re = re.compile("media lab 802\.11")
def ml_filter(x):
if 'essid' in x:
return ml_re.match(x['essid']) != None
else:
return False
print filter(ml_filter, aps)
| Python |
def synchronized(lock):
def wrap(f):
def new(*args, **kw):
lock.acquire()
try:
return f(*args, **kw)
finally:
lock.release()
return new
return wrap
| Python |
import cPickle as pickle
def persist(obj, path):
f = open(path, mode='wb')
dump(obj, f, pickle.HIGHEST_PROTOCOL)
f.close()
def depersist(path):
f = open(path, mode='rb')
obj = load(f)
f.close()
return obj | Python |
import gobject
import socket
import apscan
import device
# For Nokia stuff
import dbus
import logging
logger = logging.getLogger('util.connectivity')
class Connectivity(gobject.GObject):
__gsignals__ = {
'disconnected' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,gobject.TYPE_BOOLEAN)),
'connected' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'connecting' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
EXTERNAL = 3
DISCONNECTED = 4
MODE_INF = 1
MODE_AD_HOC = 2
def __init__(self, interfaces):
gobject.GObject.__init__(self)
gobject.timeout_add(5000, self.__update)
self.__interfaces = interfaces
self.__current_state = Connectivity.DISCONNECTED
self.__update()
self.__last_aps = {}
def __update(self):
def check_external():
remote = ('www.google.com', 80)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
try:
s.connect(remote)
return True
except socket.gaierror, e:
return False
def determine_mode():
current = apscan.current('eth1')
mode = current['mode']
if mode == 'Managed':
return Connectivity.MODE_INF
elif mode == 'Ad-Hoc':
return Connectivity.MODE_AD_HOC
else:
raise ValueError('Got an unknown mode: %s' + (mode,))
def scan():
# TODO: Replace this with self.interfaces
aps = apscan.scan('eth1')
return aps
external = check_external()
mode = determine_mode()
aps = scan()
print external
print mode
print aps
if not external and aps is not self.__last_aps:
# Choose an AP that might give connectivity
from operator import itemgetter
def unenc(x):
if 'encrypted' in x:
return x['encrypted'] != True
else:
return False
aps = filter(unenc, aps)
aps = sorted(aps, key=itemgetter('level'))
self.__try_connect(aps[0])
self.__last_aps = aps
return True
def __try_connect(self, ap):
if device.current() == device.LINUX:
pass
elif device.current() == device.NOKIA_TABLET:
bus = dbus.SystemBus()
icd = bus.get_object('com.nokia.icd',
'/com/nokia/icd')
icd.com.nokia.icd.connect(ap['essid'], 0)
# dbus-send --type=method_call --system --dest=com.nokia.icd /com/nokia/icd com.nokia.icd.connect string:${IAP} uint32:0
if __name__ == '__main__':
c = Connectivity([])
| Python |
# Device-specific module
import os
import platform
NOKIA_TABLET = 'NOKIA_TABLET'
LINUX = 'LINUX'
WINDOWS = 'WINDOWS'
UMPC = 'UMPC'
devices = {
'armv6l' : NOKIA_TABLET,
'i686' : LINUX,
'i386' : LINUX,
'x86_64' : LINUX,
}
_current = None
def override(current):
_current = current
def current():
global _current
if _current is None:
if 'Windows' in platform.system():
_current = WINDOWS
else:
a = os.popen("uname -m")
line = a.read().strip()
a.close()
_current = devices[line]
return _current
if __name__ == '__main__':
device = current()
print device
| Python |
import ConfigParser
import device
parser = None
def __instance():
global parser
if parser is None:
parser = ConfigParser.ConfigParser()
return parser
def as_dict(section):
parser = __instance()
d = {}
for (name, value) in parser.items(section):
d[name] = value
return d
def items(section):
parser = __instance()
return parser.items(section)
def get(section, option):
parser = __instance()
return parser.get(section, str(option))
def get_int(section, option):
return int(get(section, str(option)))
def get_base16_int(section, option):
return int(get(section, str(option)), 16)
def get_float(section, option):
return float(get(section, str(option)))
def get_bool(section, option):
val = get(section, str(option)).strip().lower()
if val == 'false':
return False
elif val== 'true':
return True
else:
raise ValueError('Got value %s for a boolean.' % (val,))
def add_file(file_path):
parser = __instance()
parser.read(file_path)
def add_files(file_paths):
for file_path in file_paths:
add_file(file_path) | Python |
import gobject
import threading
import time
from util.decorators import *
import chunk, message
from util import config
import logging
logger = logging.getLogger('async.receiver')
class Receiver(gobject.GObject):
__gsignals__ = {
'message-complete' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'need-retransmission' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT,)),
'busy' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_BOOLEAN,)),
}
lock = threading.Lock()
def __init__(self):
gobject.GObject.__init__(self)
self.__incoming = {}
self.__timers = {}
self.__busy = False
self.__recent_completed = []
def shutdown(self):
return self.__incoming
@synchronized(lock)
def receive(self, data, addr):
if not self.__busy:
self.__busy = True
self.emit('busy', True)
c = chunk.Chunk.from_raw(data)
logger.debug('Received mid %i, seqno %i from %s', c.mid, c.seq, addr)
self.__stop_timer(c.mid)
if c.mid in self.__recent_completed:
logger.info('Chunk was for a recently completed message, id %i. Ignoring.', c.mid)
return # The message was recently completed; packets must have come in out of order.
msg = self.__incoming[c.mid]
if isinstance(c, chunk.EndChunk):
self.__check_complete(c.mid)
else:
msg.put_chunk(c)
self.__start_timer(c.mid)
@synchronized(lock)
def resume_incomplete(self, msg):
assert msg.header.mid not in self.__incoming
self.__incoming[msg.header.mid] = msg
self.__start_timer(msg.header.mid)
@synchronized(lock)
def register_incoming(self, header):
assert header.mid not in self.__incoming
self.__incoming[header.mid] = message.create_from_header(header)
def __stop_timer(self, mid):
if mid in self.__timers:
self.__timers[mid].cancel()
del self.__timers[mid]
def __start_timer(self, mid):
if mid in self.__timers:
self.__stop_timer(mid)
t = threading.Timer(config.get_int('async', 'completion-check-timeout-s'), self.__synced_check_complete, [mid])
self.__timers[mid] = t
t.start()
@synchronized(lock)
def __synced_check_complete(self, mid):
self.__check_complete(mid)
def __check_complete(self, mid):
self.__busy = False
self.emit('busy', False)
msg = self.__incoming[mid]
incomplete = msg.get_incomplete_seqs()
if incomplete:
logger.debug('Message %i incomplete; seqs: %s', msg.header.mid, incomplete)
self.emit('need-retransmission', msg.header, incomplete)
self.__start_timer(mid)
else:
del self.__incoming[mid]
self.emit('message-complete', msg)
self.__recent_completed.append(mid)
gobject.type_register(Receiver)
| Python |
# cache.py
# Caches for incoming and outgoing data.
import threading
import os
import shutil
import time
import chunk
from util import config
incoming = None
outgoing = None
import logging
logger = logging.getLogger('async')
def get_incoming():
global incoming
if incoming is None:
incoming = IncomingCache()
return incoming
def get_outgoing():
global outgoing
if outgoing is None:
outgoing = OutgoingCache()
return outgoing
def reset():
global incoming
global outgoing
incoming = None
outgoing = None
def get_cache_filename(header):
return config.get('async.folders', 'cache-root') + '/' + str(header.mid)
class BaseCache:
def __init__(self):
self._chunks = {}
self._lru = []
self._max_size = 100
self._lock = threading.Lock()
# Make the caching folders
if not os.path.exists(config.get('async.folders', 'cache-root')):
os.makedirs(config.get('async.folders', 'cache-root'))
def _evict(self):
key = self._lru.pop(0)
del self._chunks[key]
def _is_full(self):
return len(self._chunks) > self._max_size
def _make_key(self, header, seq):
return header.get_file_path() + '::' + str(seq);
class OutgoingCache(BaseCache):
def __init__(self):
BaseCache.__init__(self)
def get_chunk(self, header, seq):
"""
Gets the chunk with the sequence number seq from the message with the given header.
"""
key = self._make_key(header, seq)
if not key in self._chunks:
self.__load_chunks(header, seq)
chunk = self._chunks[key]
return chunk
def __load_chunks(self, header, seq, n=20):
"""
Loads n chunks of message msg starting at sequence number seq
"""
# Read in the data
offset = header.csize * seq
f = open(header.get_file_path(), mode='rb')
f.seek(offset)
data = f.read(header.csize * n)
f.close()
# Chunk it up
while data:
key = self._make_key(header, seq)
if self._is_full():
self._evict()
if not key in self._chunks:
self._chunks[key] = chunk.DataChunk(header.mid, seq, data[:header.csize])
self._lru.append(key)
data = data[header.csize:]
class IncomingCache(BaseCache):
def __init__(self):
BaseCache.__init__(self)
# id -> file descriptor
self.__files = {}
def put_chunk(self, header, chunk):
logger.debug('Putting chunk #%s of mid %s', chunk.seq, chunk.mid)
key = self._make_key(header, chunk.seq)
if not key in self._chunks:
if self._is_full():
self.flush()
self._chunks[key] = (chunk, header)
def flush(self):
self._lock.acquire()
logger.debug('Flushing the cache')
for (chunk,header) in self._chunks.values():
if chunk.mid in self.__files:
f = self.__files[chunk.mid]
else:
file_path = get_cache_filename(header)
if os.path.exists(file_path):
# The file exists, so we have to read old file, write it into new one.
# This is nonsense, but is necessary; you can't just start writing arbitrary data
# at byte positions without truncating the file.
existing = open(file_path, mode='rb')
data = existing.read()
existing.close()
os.remove(file_path)
# Dump the contents back; keep the file handle.
f = open(file_path, mode='wb+')
self.__files[chunk.mid] = f
f.write(data)
f.flush()
else:
logger.debug('File didnt exist; opening and adding to self.__files')
f = open(file_path, mode='wb+')
self.__files[chunk.mid] = f
logger.debug('Files: %s', self.__files)
if f.closed:
continue
offset = header.csize * chunk.seq
f.seek(offset)
f.write(chunk.data)
f.flush()
self._chunks = {}
self._lock.release()
return
def msg_complete(self, header):
logger.debug('Message complete. Header mid %s', header.mid)
self.flush()
self._lock.acquire()
self.__files[header.mid].close()
del self.__files[header.mid]
cache_file = get_cache_filename(header)
dest_file = header.get_file_path()
dest_dir = os.path.dirname(dest_file)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.move(cache_file, dest_file)
self._lock.release() | Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.