code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
'''Copyright 2011 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
#limitations under the License.'''
import json
import logging
import os
with open('shared/weapons/Weapons.json', 'r') as f:
_WEAPONS = json.loads(f.read())
def getVirtualItem(itemID):
assert itemID
for item in _WEAPONS['weapons']:
if item['itemID'] == itemID:
return item
raise Exception('itemID "%s" does not exist' % itemID)
| Python |
'''Copyright 2011 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
#limitations under the License.''' | Python |
'''Copyright 2011 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
#limitations under the License.'''
from google.appengine.api import backends
from google.appengine.ext import ndb
import json
import logging
import os
from client import shared
_IS_DEVELOPMENT = os.environ['SERVER_SOFTWARE'].startswith('Development/')
class Config(ndb.Model):
# Client ID for web applications
client_id = ndb.StringProperty(indexed=False)
client_secret = ndb.StringProperty(indexed=False)
# Simple API Access
api_key = ndb.StringProperty(indexed=False)
class User(ndb.Model):
displayName = ndb.StringProperty(indexed=False)
createDate = ndb.DateTimeProperty(indexed=False)
credits = ndb.IntegerProperty(indexed=False)
numWins = ndb.IntegerProperty(indexed=False)
numLoss = ndb.IntegerProperty(indexed=False)
virtualItems = ndb.StringProperty(repeated=True, indexed=False)
def getConfig(origin):
if _IS_DEVELOPMENT:
c = json.loads(open('build/keys-localhost.json').read())
setConfig(origin, c['client_id'], c['client_secret'], c['api_key'])
return Config.get_by_id(str(origin))
def setConfig(origin, client_id, client_secret, api_key):
config = Config(id=str(origin), client_id=client_id, client_secret=client_secret, api_key=api_key)
config.put()
def getUser(userID):
return User.get_by_id(str(userID))
def newUser(userID, displayName):
usr = User(id=str(userID))
usr.displayName = displayName
usr.credits = 1000
usr.numWins = 3
usr.numLosses = 5
usr.put()
return usr
#NOTE what are we doing here, really?
#the goal is to have virtual currency, but also allow for purchacing item combos
#called when client asks to unlock an item with credits
def unlockItemForUser(userID, itemID):
usr = getUser(userID)
if not usr:
return None
vi = shared.getVirtualItem(itemID)
if not vi:
return None
#Do this the hacky way and jusr push it to the end.
usr.virtualItems.append(itemID)
usr.credits -= vi.priceInCredits
usr.push()
return True
#called during a postback call from the IAP server
def purchaseItemForUser(userID, itemID):
usr = getUser(userID)
if not usr:
return None
vi = shared.getVirtualItem(itemID)
if not vi:
return None
if vi.itemType == "credits":
usr.credits += vi.itemData0
return True
return None
def userAttemptToBuy(userID, itemID):
assert userID
assert itemID
result = ""
usr = getUser(userID)
if not usr:
return {'result': False, 'message': 'User not found'}
vi = shared.getVirtualItem(itemID)
if not vi:
return {'result': False, 'message': 'Item not found; please check with the admin'}
#if the user has enough credits for the item, unlock the item
if usr.credits >= vi['priceInCredits']:
usr.virtualItems.append(itemID)
usr.credits -= vi['priceInCredits']
usr.put()
return {'result': True, 'itemID': itemID, 'userCredits': usr.credits}
return {'result': False, 'itemID': itemID, 'userCredits': usr.credits}
| Python |
import logging
# Change default log level to debug in dev_appserver to match production
logging.getLogger().setLevel(logging.DEBUG)
# but leave the default for App Engine APIs
logging.getLogger('google.appengine').setLevel(logging.INFO)
| Python |
#!/usr/bin/env python2.7
import json
import logging
import os
import pprint as pp
import sys
import unittest
import urllib2
try:
from webtest import TestApp
except:
print """Please install webtest from http://webtest.pythonpaste.org/"""
sys.exit(1)
# Attempt to locate the App Engine SDK based on the system PATH
for d in sorted(os.environ['PATH'].split(os.path.pathsep)):
path = os.path.join(d, 'dev_appserver.py')
if not os.path.isfile(path):
continue
print 'Found the App Engine SDK directory: %s' % d
sys.path.insert(0, d)
# The App Engine SDK root is now expected to be in sys.path (possibly provided via PYTHONPATH)
try:
import dev_appserver
except ImportError:
error_msg = ('The path to the App Engine Python SDK must be in the '
'PYTHONPATH environment variable to run unittests.')
# The app engine SDK isn't in sys.path. If we're on Windows, we can try to
# guess where it is.
import platform
if platform.system() == 'Windows':
sys.path = sys.path + ['C:\\Program Files\\Google\\google_appengine']
try:
import dev_appserver # pylint: disable-msg=C6204
except ImportError:
print error_msg
raise
else:
print error_msg
raise
# add App Engine libraries
sys.path += dev_appserver.EXTRA_PATHS
from google.appengine.ext import testbed
from google.appengine.api import backends
def make_state(serverid, extras=None):
"""Create test game server state."""
# something pseudo predictable / stable
uptime = hash(serverid) / 1e16
result = { 'serverid': serverid, 'uptime': uptime }
if extras:
result.update(extras)
return result
def make_game(serverid, name, extras=None):
"""Create test game instance."""
result = {
'serverid': serverid,
'name': name,
'game_state': {'players': {}, 'min_players': 2, 'max_players': 4},
}
if extras:
result.update(extras)
return result
class MatchMakerTest(unittest.TestCase):
def setUp(self):
# see testbed docs https://developers.google.com/appengine/docs/python/tools/localunittesting
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_memcache_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
# import matcher now that testbed has been activated
from client import matcher
# instatiate a new matcher
self.mm = matcher.MatchMaker()
# no maximum length for diffs
self.maxDiff = None
def tearDown(self):
self.testbed.deactivate()
def assertGameServers(self, expected_game_servers):
"""Assert the global game servers state."""
if expected_game_servers != self.mm._game_servers:
self.fail('Unexpected game servers state:\n----------\nEXPECTED:\n%s\n\nACTUAL:\n%s\n----------' % (expected_game_servers, self.mm._game_servers))
#self.assertEquals(expected_game_servers, self.mm._game_servers)
def assertPlayerGameState(self, expected_game, player_game):
"""Assert a specifc game state."""
self.assertIn('game', player_game)
self.assertIn('player_game_key', player_game)
self.assertEquals(expected_game, player_game['game'])
def assertGameServerWait(self, players_needed_for_next_game, pg):
"""Assert that the player is being asked to wait."""
self.assertEquals({'result': 'wait', 'players_needed_for_next_game': players_needed_for_next_game}, pg)
#######################################################################
# tests to verify server and game updates
#######################################################################
def test_initialized_state(self):
self.assertGameServers({})
def test_update_server_info(self):
self.assertGameServers({})
state_foo = make_state('foo')
expected = { 'foo':
{ 'server_info': state_foo,
'games': {},
}
}
self.mm.update_server_info(state_foo)
self.assertGameServers(expected)
def test_update_server_info_twice(self):
state_foo = make_state('foo')
state_bar = make_state('bar')
expected = { 'foo':
{ 'server_info': state_foo,
'games': {},
},
'bar':
{ 'server_info': state_bar,
'games': {},
},
}
self.assertGameServers({})
self.mm.update_server_info(state_foo)
self.mm.update_server_info(state_bar)
self.assertGameServers(expected)
def test_update_server_info_thrice(self):
state_foo1 = make_state('foo', {'update': 17} )
state_foo2 = make_state('foo', {'update': 42} )
state_bar = make_state('bar')
expected = { 'foo':
{ 'server_info': state_foo2,
'games': {},
},
'bar':
{ 'server_info': state_bar,
'games': {},
},
}
self.assertGameServers({})
self.mm.update_server_info(state_foo1)
self.mm.update_server_info(state_bar)
self.mm.update_server_info(state_foo2)
self.assertGameServers(expected)
def test_del_server_info(self):
state_foo = make_state('foo')
state_bar = make_state('bar')
expected = { 'bar':
{ 'server_info': state_bar,
'games': {},
},
}
self.assertGameServers({})
self.mm.update_server_info(state_foo)
self.mm.update_server_info(state_bar)
self.mm.del_game_server('foo')
self.assertGameServers(expected)
def test_update_game(self):
state_foo = make_state('foo')
game_abcd = make_game('foo', 'abcd')
expected = { 'foo':
{ 'server_info': state_foo,
'games': { 'abcd' : game_abcd,
}
}
}
self.assertGameServers({})
self.mm.update_server_info(state_foo)
self.mm.update_game(game_abcd)
self.assertGameServers(expected)
#######################################################################
# tests to verify match making
#######################################################################
def test_do_not_find_full_game(self):
state_foo = make_state('foo')
game_abcd = make_game('foo', 'abcd', {'game_state': {'players': {}, 'min_players': 0, 'max_players': 0}})
self.mm.update_server_info(state_foo)
# we have a game, but no players are needed
self.mm.update_game(game_abcd)
# do not find full game
pg = self.mm.find_player_game('fred')
self.assertGameServerWait(-1, pg)
def test_find_single_player_game(self):
state_foo = make_state('foo')
game_abcd = make_game('foo', 'abcd', {'game_state': {'players': {}, 'min_players': 1, 'max_players': 1}})
self.mm.update_server_info(state_foo)
# no available game yet
pg = self.mm.find_player_game('fred')
self.assertGameServerWait(-1, pg)
# game_abcd becomes available
self.mm.update_game(game_abcd)
# we should find it
pg = self.mm.find_player_game('fred')
self.assertPlayerGameState(game_abcd, pg)
# we should find it again
pg = self.mm.lookup_player_game('fred')
self.assertPlayerGameState(game_abcd, pg)
def test_wait_for_enough_players(self):
state_foo = make_state('foo')
# only 3 game slots remaining
game_abcd = make_game('foo', 'abcd', {'game_state': {'players': {}, 'min_players': 1, 'max_players': 1}})
game_efgh = make_game('foo', 'efgh', {'game_state': {'players': {}, 'min_players': 2, 'max_players': 2}})
self.mm.update_server_info(state_foo)
self.mm.update_game(game_abcd)
self.mm.update_game(game_efgh)
# player1 enters game_abcd
pg = self.mm.find_player_game('player1')
self.assertPlayerGameState(game_abcd, pg)
# player2 waits for a second player
pg = self.mm.find_player_game('player2')
self.assertGameServerWait(2, pg)
# player3 enters game_efgh
pg = self.mm.find_player_game('player3')
self.assertPlayerGameState(game_efgh, pg)
# player4 does not get in
pg = self.mm.find_player_game('player4')
self.assertGameServerWait(-1, pg)
# player2 finally enter game_efgh
pg = self.mm.lookup_player_game('player2')
self.assertPlayerGameState(game_efgh, pg)
def test_honor_max_players(self):
state_foo = make_state('foo')
# only 3 game slots remaining
game_abcd = make_game('foo', 'abcd', {'game_state': {'players': {}, 'min_players': 2, 'max_players': 4}})
self.mm.update_server_info(state_foo)
# player1 is told that a new game needs to be spun up
pg = self.mm.find_player_game('player1')
self.assertGameServerWait(-1, pg)
# a new game is made available
self.mm.update_game(game_abcd)
# player1 is told 2 players are required
pg = self.mm.find_player_game('player1')
self.assertGameServerWait(2, pg)
# player2 allows the game to start
pg = self.mm.find_player_game('player2')
self.assertPlayerGameState(game_abcd, pg)
# player3 drops in
pg = self.mm.find_player_game('player3')
self.assertPlayerGameState(game_abcd, pg)
# player4 drops in
pg = self.mm.find_player_game('player4')
self.assertPlayerGameState(game_abcd, pg)
# player5 does not get in
pg = self.mm.find_player_game('player5')
self.assertGameServerWait(-1, pg)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Bigquery Client library for Python."""
import abc
import collections
import datetime
import hashlib
import itertools
import json
import logging
import os
import pkgutil
import random
import re
import string
import sys
import textwrap
import time
import apiclient
from apiclient import discovery
from apiclient import http as http_request
from apiclient import model
import httplib2
# To configure apiclient logging.
import gflags as flags
# A unique non-None default, for use in kwargs that need to
# distinguish default from None.
_DEFAULT = object()
# The max number of rows requested in a single page if no explicit
# value is specified.
_MAX_ROWS_PER_REQUEST = 1000000
def _Typecheck(obj, types, message=None, method=None):
if not isinstance(obj, types):
if not message:
if method:
message = 'Invalid reference for %s: %r' % (method, obj)
else:
message = 'Type of %r is not one of %s' % (obj, types)
raise TypeError(message)
def _ToLowerCamel(name):
"""Convert a name with underscores to camelcase."""
return re.sub('_[a-z]', lambda match: match.group(0)[1].upper(), name)
def _ToFilename(url):
"""Converts a url to a filename."""
return ''.join([c for c in url if c in string.ascii_lowercase])
def _ApplyParameters(config, **kwds):
"""Adds all kwds to config dict, adjusting keys to camelcase.
Note this does not remove entries that are set to None, however.
kwds: A dict of keys and values to set in the config.
Args:
config: A configuration dict.
"""
config.update((_ToLowerCamel(k), v) for k, v in kwds.iteritems()
if v is not None)
def ConfigurePythonLogger(apilog=None):
"""Sets up Python logger, which BigqueryClient logs with.
Applications can configure logging however they want, but this
captures one pattern of logging which seems useful when dealing with
a single command line option for determining logging.
Args:
apilog: To log to sys.stdout, specify '', '-', '1', 'true', or
'stdout'. To log to sys.stderr, specify 'stderr'. To log to a
file, specify the file path. Specify None to disable logging.
"""
if apilog is None:
# Effectively turn off logging.
logging.disable(logging.CRITICAL)
else:
if apilog in ('', '-', '1', 'true', 'stdout'):
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
elif apilog == 'stderr':
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
elif apilog:
logging.basicConfig(filename=apilog, level=logging.INFO)
else:
logging.basicConfig(level=logging.INFO)
# Turn on apiclient logging of http requests and responses. (Here
# we handle both the flags interface from apiclient < 1.2 and the
# module global in apiclient >= 1.2.)
if hasattr(flags.FLAGS, 'dump_request_response'):
flags.FLAGS.dump_request_response = True
else:
model.dump_request_response = True
InsertEntry = collections.namedtuple('InsertEntry',
['insert_id', 'record'])
def JsonToInsertEntry(insert_id, json_string):
"""Parses a JSON encoded record and returns an InsertEntry.
Arguments:
insert_id: Id for the insert, can be None.
json_string: The JSON encoded data to be converted.
Returns:
InsertEntry object for adding to a table.
"""
try:
row = json.loads(json_string)
if not isinstance(row, dict):
raise BigqueryClientError('Value is not a JSON object')
return InsertEntry(insert_id, row)
except ValueError, e:
raise BigqueryClientError('Could not parse object: %s' % (str(e),))
class BigqueryError(Exception):
@staticmethod
def Create(error, server_error, error_ls, job_ref=None):
"""Returns a BigqueryError for json error embedded in server_error.
If error_ls contains any errors other than the given one, those
are also included in the returned message.
Args:
error: The primary error to convert.
server_error: The error returned by the server. (This is only used
in the case that error is malformed.)
error_ls: Additional errors to include in the error message.
job_ref: JobReference, if this is an error associated with a job.
Returns:
BigqueryError representing error.
"""
reason = error.get('reason')
if job_ref:
message = 'Error processing %r: %s' % (job_ref, error.get('message'))
else:
message = error.get('message')
# We don't want to repeat the "main" error message.
new_errors = [err for err in error_ls if err != error]
if new_errors:
message += '\nFailure details:\n'
message += '\n'.join(
textwrap.fill(
': '.join(filter(None, [
err.get('location', None), err.get('message', '')])),
initial_indent=' - ',
subsequent_indent=' ')
for err in new_errors)
if not reason or not message:
return BigqueryInterfaceError(
'Error reported by server with missing error fields. '
'Server returned: %s' % (str(server_error),))
if reason == 'notFound':
return BigqueryNotFoundError(message, error, error_ls, job_ref=job_ref)
if reason == 'duplicate':
return BigqueryDuplicateError(message, error, error_ls, job_ref=job_ref)
if reason == 'accessDenied':
return BigqueryAccessDeniedError(
message, error, error_ls, job_ref=job_ref)
if reason == 'invalidQuery':
return BigqueryInvalidQueryError(
message, error, error_ls, job_ref=job_ref)
if reason == 'termsOfServiceNotAccepted':
return BigqueryTermsOfServiceError(
message, error, error_ls, job_ref=job_ref)
if reason == 'backendError':
return BigqueryBackendError(
message, error, error_ls, job_ref=job_ref)
# We map the less interesting errors to BigqueryServiceError.
return BigqueryServiceError(message, error, error_ls, job_ref=job_ref)
class BigqueryCommunicationError(BigqueryError):
"""Error communicating with the server."""
pass
class BigqueryInterfaceError(BigqueryError):
"""Response from server missing required fields."""
pass
class BigqueryServiceError(BigqueryError):
"""Base class of Bigquery-specific error responses.
The BigQuery server received request and returned an error.
"""
def __init__(self, message, error, error_list, job_ref=None,
*args, **kwds):
"""Initializes a BigqueryServiceError.
Args:
message: A user-facing error message.
error: The error dictionary, code may inspect the 'reason' key.
error_list: A list of additional entries, for example a load job
may contain multiple errors here for each error encountered
during processing.
job_ref: Optional JobReference, if this error was encountered
while processing a job.
"""
super(BigqueryServiceError, self).__init__(message, *args, **kwds)
self.error = error
self.error_list = error_list
self.job_ref = job_ref
def __repr__(self):
return '%s: error=%s, error_list=%s, job_ref=%s' % (
self.__class__.__name__, self.error, self.error_list, self.job_ref)
class BigqueryNotFoundError(BigqueryServiceError):
"""The requested resource or identifier was not found."""
pass
class BigqueryDuplicateError(BigqueryServiceError):
"""The requested resource or identifier already exists."""
pass
class BigqueryAccessDeniedError(BigqueryServiceError):
"""The user does not have access to the requested resource."""
pass
class BigqueryInvalidQueryError(BigqueryServiceError):
"""The SQL statement is invalid."""
pass
class BigqueryTermsOfServiceError(BigqueryAccessDeniedError):
"""User has not ACK'd ToS."""
pass
class BigqueryBackendError(BigqueryServiceError):
"""A backend error typically corresponding to retriable HTTP 503 failures."""
pass
class BigqueryClientError(BigqueryError):
"""Invalid use of BigqueryClient."""
pass
class BigqueryClientConfigurationError(BigqueryClientError):
"""Invalid configuration of BigqueryClient."""
pass
class BigquerySchemaError(BigqueryClientError):
"""Error in locating or parsing the schema."""
pass
class BigqueryModel(model.JsonModel):
"""Adds optional global parameters to all requests."""
def __init__(self, trace=None, **kwds):
super(BigqueryModel, self).__init__(**kwds)
self.trace = trace
# pylint: disable=g-bad-name
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing request."""
if 'trace' not in query_params and self.trace:
query_params['trace'] = self.trace
return super(BigqueryModel, self).request(
headers, path_params, query_params, body_value)
# pylint: enable=g-bad-name
class BigqueryHttp(http_request.HttpRequest):
"""Converts errors into Bigquery errors."""
def __init__(self, bigquery_model, *args, **kwds):
super(BigqueryHttp, self).__init__(*args, **kwds)
self._model = bigquery_model
@staticmethod
def Factory(bigquery_model):
"""Returns a function that creates a BigqueryHttp with the given model."""
def _Construct(*args, **kwds):
captured_model = bigquery_model
return BigqueryHttp(captured_model, *args, **kwds)
return _Construct
def execute(self, **kwds): # pylint: disable=g-bad-name
try:
return super(BigqueryHttp, self).execute(**kwds)
except apiclient.errors.HttpError, e:
# TODO(user): Remove this when apiclient supports logging
# of error responses.
self._model._log_response(e.resp, e.content) # pylint: disable=protected-access
if e.resp.get('content-type', '').startswith('application/json'):
BigqueryClient.RaiseError(json.loads(e.content))
else:
raise BigqueryCommunicationError(
('Could not connect with BigQuery server.\n'
'Http response status: %s\n'
'Http response content:\n%s') % (
e.resp.get('status', '(unexpected)'), e.content))
class JobIdGenerator(object):
"""Base class for job id generators."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Generate(self, job_configuration):
"""Generates a job_id to use for job_configuration."""
class JobIdGeneratorNone(JobIdGenerator):
"""Job id generator that returns None, letting the server pick the job id."""
def Generate(self, unused_config):
return None
class JobIdGeneratorRandom(JobIdGenerator):
"""Generates random job ids."""
def Generate(self, unused_config):
return 'bqjob_r%08x_%016x' % (random.randint(0, sys.maxint),
int(time.time() * 1000))
class JobIdGeneratorFingerprint(JobIdGenerator):
"""Generates job ids that uniquely match the job config."""
def _Hash(self, config, sha1):
"""Computes the sha1 hash of a dict."""
keys = config.keys()
# Python dict enumeration ordering is random. Sort the keys
# so that we will visit them in a stable order.
keys.sort()
for key in keys:
sha1.update('%s' % (key,))
v = config[key]
if isinstance(v, dict):
logging.info('Hashing: %s...', key)
self._Hash(v, sha1)
elif isinstance(v, list):
logging.info('Hashing: %s ...', key)
for inner_v in v:
self._Hash(inner_v, sha1)
else:
logging.info('Hashing: %s:%s', key, v)
sha1.update('%s' % (v,))
def Generate(self, config):
s1 = hashlib.sha1()
self._Hash(config, s1)
job_id = 'bqjob_c%s' % (s1.hexdigest(),)
logging.info('Fingerprinting: %s:\n%s', config, job_id)
return job_id
class JobIdGeneratorIncrementing(JobIdGenerator):
"""Generates job ids that increment each time we're asked."""
def __init__(self, inner):
self._inner = inner
self._retry = 0
def Generate(self, config):
self._retry += 1
return '%s_%d' % (self._inner.Generate(config), self._retry)
class BigqueryClient(object):
"""Class encapsulating interaction with the BigQuery service."""
def __init__(self, **kwds):
"""Initializes BigqueryClient.
Required keywords:
api: the api to connect to, for example "bigquery".
api_version: the version of the api to connect to, for example "v2".
Optional keywords:
project_id: a default project id to use. While not required for
initialization, a project_id is required when calling any
method that creates a job on the server. Methods that have
this requirement pass through **kwds, and will raise
BigqueryClientConfigurationError if no project_id can be
found.
dataset_id: a default dataset id to use.
discovery_document: the discovery document to use. If None, one
will be retrieved from the discovery api. If not specified,
the built-in discovery document will be used.
job_property: a list of "key=value" strings defining properties
to apply to all job operations.
trace: a tracing header to inclue in all bigquery api requests.
sync: boolean, when inserting jobs, whether to wait for them to
complete before returning from the insert request.
wait_printer_factory: a function that returns a WaitPrinter.
This will be called for each job that we wait on. See WaitJob().
Raises:
ValueError: if keywords are missing or incorrectly specified.
"""
super(BigqueryClient, self).__init__()
for key, value in kwds.iteritems():
setattr(self, key, value)
self._apiclient = None
for required_flag in ('api', 'api_version'):
if required_flag not in kwds:
raise ValueError('Missing required flag: %s' % (required_flag,))
default_flag_values = {
'project_id': '',
'dataset_id': '',
'discovery_document': _DEFAULT,
'job_property': '',
'trace': None,
'sync': True,
'wait_printer_factory': BigqueryClient.TransitionWaitPrinter,
'job_id_generator': JobIdGeneratorIncrementing(JobIdGeneratorRandom()),
'max_rows_per_request': _MAX_ROWS_PER_REQUEST,
}
for flagname, default in default_flag_values.iteritems():
if not hasattr(self, flagname):
setattr(self, flagname, default)
if self.dataset_id and not self.project_id:
raise ValueError('Cannot set dataset_id without project_id')
def GetHttp(self):
"""Returns the httplib2 Http to use."""
http = httplib2.Http()
return http
def GetDiscoveryUrl(self):
"""Returns the url to the discovery document for bigquery."""
discovery_url = self.api + '/discovery/v1/apis/{api}/{apiVersion}/rest'
return discovery_url
@property
def apiclient(self):
"""Return the apiclient attached to self."""
if self._apiclient is None:
http = self.credentials.authorize(self.GetHttp())
bigquery_model = BigqueryModel(
trace=self.trace)
bigquery_http = BigqueryHttp.Factory(
bigquery_model)
discovery_document = self.discovery_document
if discovery_document == _DEFAULT:
# Use the api description packed with this client, if one exists.
try:
discovery_document = pkgutil.get_data(
'bigquery_client', 'discovery/%s.bigquery.%s.rest.json'
% (_ToFilename(self.api), self.api_version))
except IOError:
discovery_document = None
if discovery_document is None:
try:
self._apiclient = discovery.build(
'bigquery', self.api_version, http=http,
discoveryServiceUrl=self.GetDiscoveryUrl(),
model=bigquery_model,
requestBuilder=bigquery_http)
except (httplib2.ServerNotFoundError, apiclient.errors.HttpError), e:
# We can't find the specified server.
raise BigqueryCommunicationError(
'Cannot contact server. Please try again.\nError: %s'
'\nContent: %s' % (str(e), e.content))
except apiclient.errors.UnknownApiNameOrVersion, e:
# We can't resolve the discovery url for the given server.
raise BigqueryCommunicationError(
'Invalid API name or version: %s' % (str(e),))
else:
self._apiclient = discovery.build_from_document(
discovery_document, http=http,
model=bigquery_model,
requestBuilder=bigquery_http)
return self._apiclient
#################################
## Utility methods
#################################
@staticmethod
def FormatTime(secs):
return time.strftime('%d %b %H:%M:%S', time.localtime(secs))
@staticmethod
def FormatAcl(acl):
"""Format a server-returned ACL for printing."""
acl_entries = {
'OWNER': [],
'WRITER': [],
'READER': [],
}
for entry in acl:
entry = entry.copy()
role = entry.pop('role', '')
if not role or len(entry.values()) != 1:
raise BigqueryServiceError(
'Invalid ACL returned by server: %s' % (acl,))
for key, value in entry.iteritems():
# TODO(user): Remove this if once we've switched
# to v2.
if key == 'allAuthenticatedUsers':
acl_entries[role].append(key)
else:
acl_entries[role].append(value)
result_lines = []
if acl_entries['OWNER']:
result_lines.extend([
'Owners:', ',\n'.join(' %s' % (o,) for o in acl_entries['OWNER'])])
if acl_entries['WRITER']:
result_lines.extend([
'Writers:', ',\n'.join(' %s' % (o,) for o in acl_entries['WRITER'])])
if acl_entries['READER']:
result_lines.extend([
'Readers:', ',\n'.join(' %s' % (o,) for o in acl_entries['READER'])])
return '\n'.join(result_lines)
@staticmethod
def FormatSchema(schema):
"""Format a schema for printing."""
def PrintFields(fields, indent=0):
"""Print all fields in a schema, recurring as necessary."""
lines = []
for field in fields:
prefix = '| ' * indent
junction = '|' if field.get('type', 'STRING') != 'RECORD' else '+'
entry = '%s- %s: %s' % (
junction, field['name'], field.get('type', 'STRING').lower())
if field.get('mode', 'NULLABLE') != 'NULLABLE':
entry += ' (%s)' % (field['mode'].lower(),)
lines.append(prefix + entry)
if 'fields' in field:
lines.extend(PrintFields(field['fields'], indent + 1))
return lines
return '\n'.join(PrintFields(schema.get('fields', [])))
@staticmethod
def NormalizeWait(wait):
try:
return int(wait)
except ValueError:
raise ValueError('Invalid value for wait: %s' % (wait,))
@staticmethod
def ValidatePrintFormat(print_format):
if print_format not in ['show', 'list']:
raise ValueError('Unknown format: %s' % (print_format,))
@staticmethod
def _ParseIdentifier(identifier):
"""Parses identifier into a tuple of (possibly empty) identifiers.
This will parse the identifier into a tuple of the form
(project_id, dataset_id, table_id) without doing any validation on
the resulting names; missing names are returned as ''. The
interpretation of these identifiers depends on the context of the
caller. For example, if you know the identifier must be a job_id,
then you can assume dataset_id is the job_id.
Args:
identifier: string, identifier to parse
Returns:
project_id, dataset_id, table_id: (string, string, string)
"""
# We need to handle the case of a lone project identifier of the
# form domain.com:proj separately.
if re.search('^\w[\w.]*\.[\w.]+:\w[\w\d_-]*:?$', identifier):
return identifier, '', ''
project_id, _, dataset_and_table_id = identifier.rpartition(':')
if project_id:
dataset_id, _, table_id = dataset_and_table_id.partition('.')
else:
dataset_id, _, table_id = dataset_and_table_id.rpartition('.')
return project_id, dataset_id, table_id
def GetProjectReference(self, identifier=''):
"""Determine a project reference from an identifier and self."""
project_id, dataset_id, table_id = BigqueryClient._ParseIdentifier(
identifier)
try:
# ParseIdentifier('foo') is just a table_id, but we want to read
# it as a project_id.
project_id = project_id or table_id or self.project_id
if not dataset_id and project_id:
return ApiClientHelper.ProjectReference.Create(projectId=project_id)
except ValueError:
pass
raise BigqueryClientError('Cannot determine project described by %s' % (
identifier,))
def GetDatasetReference(self, identifier=''):
"""Determine a DatasetReference from an identifier and self."""
project_id, dataset_id, table_id = BigqueryClient._ParseIdentifier(
identifier)
if table_id and not project_id and not dataset_id:
# identifier is 'foo'
project_id = self.project_id
dataset_id = table_id
elif project_id and dataset_id and not table_id:
# identifier is 'foo:bar'
pass
elif not identifier:
# identifier is ''
project_id = self.project_id
dataset_id = self.dataset_id
else:
raise BigqueryError('Cannot determine dataset described by %s' % (
identifier,))
try:
return ApiClientHelper.DatasetReference.Create(
projectId=project_id, datasetId=dataset_id)
except ValueError:
raise BigqueryError('Cannot determine dataset described by %s' % (
identifier,))
def GetTableReference(self, identifier=''):
"""Determine a TableReference from an identifier and self."""
project_id, dataset_id, table_id = BigqueryClient._ParseIdentifier(
identifier)
try:
return ApiClientHelper.TableReference.Create(
projectId=project_id or self.project_id,
datasetId=dataset_id or self.dataset_id,
tableId=table_id,
)
except ValueError:
raise BigqueryError('Cannot determine table described by %s' % (
identifier,))
def GetReference(self, identifier=''):
"""Try to deduce a project/dataset/table reference from a string.
If the identifier is not compound, treat it as the most specific
identifier we don't have as a flag, or as the table_id. If it is
compound, fill in any unspecified part.
Args:
identifier: string, Identifier to create a reference for.
Returns:
A valid ProjectReference, DatasetReference, or TableReference.
Raises:
BigqueryError: if no valid reference can be determined.
"""
try:
return self.GetTableReference(identifier)
except BigqueryError:
pass
try:
return self.GetDatasetReference(identifier)
except BigqueryError:
pass
try:
return self.GetProjectReference(identifier)
except BigqueryError:
pass
raise BigqueryError('Cannot determine reference for "%s"' % (identifier,))
# TODO(user): consider introducing job-specific and possibly
# dataset- and project-specific parsers for the case of knowing what
# type we are looking for. Reinterpreting "dataset_id" as "job_id"
# is rather confusing.
def GetJobReference(self, identifier=''):
"""Determine a JobReference from an identifier and self."""
project_id, dataset_id, table_id = BigqueryClient._ParseIdentifier(
identifier)
if table_id and not project_id and not dataset_id:
# identifier is 'foo'
project_id = self.project_id
job_id = table_id
elif project_id and dataset_id and not table_id:
# identifier is 'foo:bar'
job_id = dataset_id
else:
job_id = None
if job_id:
try:
return ApiClientHelper.JobReference.Create(
projectId=project_id, jobId=job_id)
except ValueError:
pass
raise BigqueryError('Cannot determine job described by %s' % (
identifier,))
def GetObjectInfo(self, reference):
"""Get all data returned by the server about a specific object."""
# Projects are handled separately, because we only have
# bigquery.projects.list.
if isinstance(reference, ApiClientHelper.ProjectReference):
projects = self.ListProjects()
for project in projects:
if BigqueryClient.ConstructObjectReference(project) == reference:
project['kind'] = 'bigquery#project'
return project
raise BigqueryNotFoundError('Unknown %r' % (reference,))
if isinstance(reference, ApiClientHelper.JobReference):
return self.apiclient.jobs().get(**dict(reference)).execute()
elif isinstance(reference, ApiClientHelper.DatasetReference):
return self.apiclient.datasets().get(**dict(reference)).execute()
elif isinstance(reference, ApiClientHelper.TableReference):
return self.apiclient.tables().get(**dict(reference)).execute()
else:
raise TypeError('Type of reference must be one of: ProjectReference, '
'JobReference, DatasetReference, or TableReference')
def GetTableSchema(self, table_dict):
table_info = self.apiclient.tables().get(**table_dict).execute()
return table_info.get('schema', {})
def ReadTableRows(self, table_dict, max_rows=_MAX_ROWS_PER_REQUEST):
"""Read at most max_rows rows from a table."""
table_ref = ApiClientHelper.TableReference.Create(**table_dict)
return _TableTableReader(
self.apiclient,
max_rows,
table_ref).ReadRows()
def ReadJobRows(self, job_dict, max_rows=_MAX_ROWS_PER_REQUEST):
"""Read at most max_rows rows from a query result."""
job_ref = ApiClientHelper.JobReference.Create(**job_dict)
return _JobTableReader(self.apiclient, max_rows, job_ref).ReadRows()
def InsertTableRows(self, table_dict, inserts):
"""Insert rows into a table.
Arguments:
table_dict: table reference into which rows are to be inserted.
inserts: array of InsertEntry tuples where insert_id can be None.
Returns:
result of the operation.
"""
def _EncodeInsert(insert):
encoded = dict(json=insert.record)
if insert.insert_id:
encoded['insertId'] = insert.insert_id
return encoded
op = self.apiclient.tabledata().insertAll(
body=dict(rows=map(_EncodeInsert, inserts)),
**table_dict)
return op.execute()
def ReadSchemaAndRows(self, table_dict, start_row=0,
max_rows=_MAX_ROWS_PER_REQUEST):
"""Convenience method to get the schema and rows from a table.
Arguments:
table_dict: table reference dictionary.
start_row: first row to read.
max_rows: number of rows to read.
Returns:
A tuple where the first item is the list of fields and the
second item a list of rows.
"""
table_ref = ApiClientHelper.TableReference.Create(**table_dict)
return _TableTableReader(self.apiclient, self.max_rows_per_request,
table_ref).ReadSchemaAndRows(start_row, max_rows)
def ReadSchemaAndJobRows(self, job_dict, start_row=0,
max_rows=_MAX_ROWS_PER_REQUEST):
"""Convenience method to get the schema and rows from job query result.
Arguments:
job_dict: job reference dictionary.
start_row: first row to read.
max_rows: number of rows to read.
Returns:
A tuple where the first item is the list of fields and the
second item a list of rows.
"""
job_ref = ApiClientHelper.JobReference.Create(**job_dict)
reader = _JobTableReader(self.apiclient, self.max_rows_per_request,
job_ref)
return reader.ReadSchemaAndRows(start_row, max_rows)
@staticmethod
def ConfigureFormatter(formatter, reference_type, print_format='list'):
"""Configure a formatter for a given reference type.
If print_format is 'show', configures the formatter with several
additional fields (useful for printing a single record).
Arguments:
formatter: TableFormatter object to configure.
reference_type: Type of object this formatter will be used with.
print_format: Either 'show' or 'list' to control what fields are
included.
Raises:
ValueError: If reference_type or format is unknown.
"""
BigqueryClient.ValidatePrintFormat(print_format)
if reference_type == ApiClientHelper.JobReference:
if print_format == 'list':
formatter.AddColumns(('jobId',))
formatter.AddColumns(
('Job Type', 'State', 'Start Time', 'Duration',))
if print_format == 'show':
formatter.AddColumns(('Bytes Processed',))
elif reference_type == ApiClientHelper.ProjectReference:
if print_format == 'list':
formatter.AddColumns(('projectId',))
formatter.AddColumns(('friendlyName',))
elif reference_type == ApiClientHelper.DatasetReference:
if print_format == 'list':
formatter.AddColumns(('datasetId',))
if print_format == 'show':
formatter.AddColumns(('Last modified', 'ACLs',))
elif reference_type == ApiClientHelper.TableReference:
if print_format == 'list':
formatter.AddColumns(('tableId',))
if print_format == 'show':
formatter.AddColumns(('Last modified', 'Schema',
'Total Rows', 'Total Bytes',
'Expiration'))
else:
raise ValueError('Unknown reference type: %s' % (
reference_type.__name__,))
@staticmethod
def RaiseError(result):
"""Raises an appropriate BigQuery error given the json error result."""
error = result.get('error', {}).get('errors', [{}])[0]
raise BigqueryError.Create(error, result, [])
@staticmethod
def IsFailedJob(job):
"""Predicate to determine whether or not a job failed."""
return 'errorResult' in job.get('status', {})
@staticmethod
def RaiseIfJobError(job):
"""Raises a BigQueryError if the job is in an error state.
Args:
job: a Job resource.
Returns:
job, if it is not in an error state.
Raises:
BigqueryError: A BigqueryError instance based on the job's error
description.
"""
if BigqueryClient.IsFailedJob(job):
error = job['status']['errorResult']
error_ls = job['status'].get('errors', [])
raise BigqueryError.Create(
error, error, error_ls,
job_ref=BigqueryClient.ConstructObjectReference(job))
return job
@staticmethod
def GetJobTypeName(job_info):
"""Helper for job printing code."""
job_names = set(('extract', 'load', 'query', 'copy'))
try:
return set(job_info.get('configuration', {}).keys()).intersection(
job_names).pop()
except KeyError:
return None
@staticmethod
def ProcessSources(source_string):
"""Take a source string and return a list of URIs.
The list will consist of either a single local filename, which
we check exists and is a file, or a list of gs:// uris.
Args:
source_string: A comma-separated list of URIs.
Returns:
List of one or more valid URIs, as strings.
Raises:
BigqueryClientError: if no valid list of sources can be determined.
"""
sources = [source.strip() for source in source_string.split(',')]
gs_uris = [source for source in sources if source.startswith('gs://')]
if not sources:
raise BigqueryClientError('No sources specified')
if gs_uris:
if len(gs_uris) != len(sources):
raise BigqueryClientError('All URIs must begin with "gs://" if any do.')
return sources
else:
source = sources[0]
if len(sources) > 1:
raise BigqueryClientError(
'Local upload currently supports only one file, found %d' % (
len(sources),))
if not os.path.exists(source):
raise BigqueryClientError('Source file not found: %s' % (source,))
if not os.path.isfile(source):
raise BigqueryClientError('Source path is not a file: %s' % (source,))
return sources
@staticmethod
def ReadSchema(schema):
"""Create a schema from a string or a filename.
If schema does not contain ':' and is the name of an existing
file, read it as a JSON schema. If not, it must be a
comma-separated list of fields in the form name:type.
Args:
schema: A filename or schema.
Returns:
The new schema (as a dict).
Raises:
BigquerySchemaError:
If the schema is invalid or the filename does not exist.
"""
def NewField(entry):
name, _, field_type = entry.partition(':')
if entry.count(':') > 1 or not name.strip():
raise BigquerySchemaError('Invalid schema entry: %s' % (entry,))
return {
'name': name.strip(),
'type': field_type.strip().upper() or 'STRING',
}
if not schema:
raise BigquerySchemaError('Schema cannot be empty')
elif os.path.exists(schema):
with open(schema) as f:
try:
return json.load(f)
except ValueError, e:
raise BigquerySchemaError(
('Error decoding JSON schema from file %s: %s\n'
'To specify a one-column schema, use "name:string".') % (
schema, e))
elif re.match(r'[./\\]', schema) is not None:
# We have something that looks like a filename, but we didn't
# find it. Tell the user about the problem now, rather than wait
# for a round-trip to the server.
raise BigquerySchemaError(
('Error reading schema: "%s" looks like a filename, '
'but was not found.') % (schema,))
else:
return [NewField(entry) for entry in schema.split(',')]
@staticmethod
def _KindToName(kind):
"""Convert a kind to just a type name."""
return kind.partition('#')[2]
@staticmethod
def FormatInfoByKind(object_info):
"""Format a single object_info (based on its 'kind' attribute)."""
kind = BigqueryClient._KindToName(object_info.get('kind'))
if kind == 'job':
return BigqueryClient.FormatJobInfo(object_info)
elif kind == 'project':
return BigqueryClient.FormatProjectInfo(object_info)
elif kind == 'dataset':
return BigqueryClient.FormatDatasetInfo(object_info)
elif kind == 'table':
return BigqueryClient.FormatTableInfo(object_info)
else:
raise ValueError('Unknown object type: %s' % (kind,))
@staticmethod
def FormatJobInfo(job_info):
"""Prepare a job_info for printing.
Arguments:
job_info: Job dict to format.
Returns:
The new job_info.
"""
result = job_info.copy()
reference = BigqueryClient.ConstructObjectReference(result)
result.update(dict(reference))
if 'startTime' in result.get('statistics', {}):
start = int(result['statistics']['startTime']) / 1000
if 'endTime' in result['statistics']:
duration_seconds = int(result['statistics']['endTime']) / 1000 - start
result['Duration'] = str(datetime.timedelta(seconds=duration_seconds))
result['Start Time'] = BigqueryClient.FormatTime(start)
result['Job Type'] = BigqueryClient.GetJobTypeName(result)
result['State'] = result['status']['state']
if result['State'] == 'DONE':
try:
BigqueryClient.RaiseIfJobError(result)
result['State'] = 'SUCCESS'
except BigqueryError:
result['State'] = 'FAILURE'
if 'totalBytesProcessed' in result.get('statistics', {}):
result['Bytes Processed'] = result['statistics']['totalBytesProcessed']
return result
@staticmethod
def FormatProjectInfo(project_info):
"""Prepare a project_info for printing.
Arguments:
project_info: Project dict to format.
Returns:
The new project_info.
"""
result = project_info.copy()
reference = BigqueryClient.ConstructObjectReference(result)
result.update(dict(reference))
return result
@staticmethod
def FormatDatasetInfo(dataset_info):
"""Prepare a dataset_info for printing.
Arguments:
dataset_info: Dataset dict to format.
Returns:
The new dataset_info.
"""
result = dataset_info.copy()
reference = BigqueryClient.ConstructObjectReference(result)
result.update(dict(reference))
if 'lastModifiedTime' in result:
result['Last modified'] = BigqueryClient.FormatTime(
int(result['lastModifiedTime']) / 1000)
if 'access' in result:
result['ACLs'] = BigqueryClient.FormatAcl(result['access'])
return result
@staticmethod
def FormatTableInfo(table_info):
"""Prepare a table_info for printing.
Arguments:
table_info: Table dict to format.
Returns:
The new table_info.
"""
result = table_info.copy()
reference = BigqueryClient.ConstructObjectReference(result)
result.update(dict(reference))
if 'lastModifiedTime' in result:
result['Last modified'] = BigqueryClient.FormatTime(
int(result['lastModifiedTime']) / 1000)
if 'schema' in result:
result['Schema'] = BigqueryClient.FormatSchema(result['schema'])
if 'numBytes' in result:
result['Total Bytes'] = result['numBytes']
if 'numRows' in result:
result['Total Rows'] = result['numRows']
if 'expirationTime' in result:
result['Expiration'] = BigqueryClient.FormatTime(
int(result['expirationTime']) / 1000)
return result
@staticmethod
def ConstructObjectReference(object_info):
"""Construct a Reference from a server response."""
if 'kind' in object_info:
typename = BigqueryClient._KindToName(object_info['kind'])
lower_camel = typename + 'Reference'
if lower_camel not in object_info:
raise ValueError('Cannot find %s in object of type %s: %s' % (
lower_camel, typename, object_info))
else:
keys = [k for k in object_info if k.endswith('Reference')]
if len(keys) != 1:
raise ValueError('Expected one Reference, found %s: %s' % (
len(keys), keys))
lower_camel = keys[0]
upper_camel = lower_camel[0].upper() + lower_camel[1:]
reference_type = getattr(ApiClientHelper, upper_camel, None)
if reference_type is None:
raise ValueError('Unknown reference type: %s' % (typename,))
return reference_type.Create(**object_info[lower_camel])
@staticmethod
def ConstructObjectInfo(reference):
"""Construct an Object from an ObjectReference."""
typename = reference.__class__.__name__
lower_camel = typename[0].lower() + typename[1:]
return {lower_camel: dict(reference)}
def _PrepareListRequest(self, reference, max_results=None, page_token=None):
request = dict(reference)
if max_results is not None:
request['maxResults'] = max_results
if page_token is not None:
request['pageToken'] = page_token
return request
def _NormalizeProjectReference(self, reference):
if reference is None:
try:
return self.GetProjectReference()
except BigqueryClientError:
raise BigqueryClientError(
'Project reference or a default project is required')
return reference
def ListJobRefs(self, **kwds):
return map( # pylint: disable=g-long-lambda
BigqueryClient.ConstructObjectReference, self.ListJobs(**kwds))
def ListJobs(self, reference=None,
max_results=None, state_filter=None,
all_users=None):
"""Return a list of jobs.
Args:
reference: The ProjectReference to list jobs for.
max_results: The maximum number of jobs to return.
state_filter: A single state filter or a list of filters to
apply. If not specified, no filtering is applied.
all_users: Whether to list jobs for all users of the project. Requesting
user must be an owner of the project to list all jobs.
Returns:
A list of jobs.
"""
reference = self._NormalizeProjectReference(reference)
_Typecheck(reference, ApiClientHelper.ProjectReference, method='ListJobs')
request = self._PrepareListRequest(reference, max_results, None)
if state_filter is not None:
# The apiclient wants enum values as lowercase strings.
if isinstance(state_filter, basestring):
state_filter = state_filter.lower()
else:
state_filter = [s.lower() for s in state_filter]
_ApplyParameters(request, projection='full',
state_filter=state_filter, all_users=all_users)
jobs = self.apiclient.jobs().list(**request).execute()
return jobs.get('jobs', [])
def ListProjectRefs(self, **kwds):
"""List the project references this user has access to."""
return map( # pylint: disable=g-long-lambda
BigqueryClient.ConstructObjectReference, self.ListProjects(**kwds))
def ListProjects(self, max_results=None, page_token=None):
"""List the projects this user has access to."""
request = self._PrepareListRequest({}, max_results, page_token)
result = self.apiclient.projects().list(**request).execute()
return result.get('projects', [])
def ListDatasetRefs(self, **kwds):
return map( # pylint: disable=g-long-lambda
BigqueryClient.ConstructObjectReference, self.ListDatasets(**kwds))
def ListDatasets(self, reference=None, max_results=None, page_token=None,
list_all=None):
"""List the datasets associated with this reference."""
reference = self._NormalizeProjectReference(reference)
_Typecheck(reference, ApiClientHelper.ProjectReference,
method='ListDatasets')
request = self._PrepareListRequest(reference, max_results, page_token)
if list_all is not None:
request['all'] = list_all
result = self.apiclient.datasets().list(**request).execute()
return result.get('datasets', [])
def ListTableRefs(self, **kwds):
return map( # pylint: disable=g-long-lambda
BigqueryClient.ConstructObjectReference, self.ListTables(**kwds))
def ListTables(self, reference, max_results=None, page_token=None):
"""List the tables associated with this reference."""
_Typecheck(reference, ApiClientHelper.DatasetReference, method='ListTables')
request = self._PrepareListRequest(reference, max_results, page_token)
result = self.apiclient.tables().list(**request).execute()
return result.get('tables', [])
#################################
## Table and dataset management
#################################
def CopyTable(self, source_reference, dest_reference,
create_disposition=None, write_disposition=None,
ignore_already_exists=False, **kwds):
"""Copies a table.
Args:
source_reference: TableReference of source table.
dest_reference: TableReference of destination table.
create_disposition: Optional. Specifies the create_disposition for
the dest_reference.
write_disposition: Optional. Specifies the write_disposition for
the dest_reference.
ignore_already_exists: Whether to ignore "already exists" errors.
**kwds: Passed on to ExecuteJob.
Returns:
The job description, or None for ignored errors.
Raises:
BigqueryDuplicateError: when write_disposition 'WRITE_EMPTY' is
specified and the dest_reference table already exists.
"""
_Typecheck(source_reference, ApiClientHelper.TableReference,
method='CopyTable')
_Typecheck(dest_reference, ApiClientHelper.TableReference,
method='CopyTable')
copy_config = {
'destinationTable': dict(dest_reference),
'sourceTable': dict(source_reference),
}
_ApplyParameters(copy_config, create_disposition=create_disposition,
write_disposition=write_disposition)
try:
return self.ExecuteJob({'copy': copy_config}, **kwds)
except BigqueryDuplicateError, e:
if ignore_already_exists:
return None
raise e
def DatasetExists(self, reference):
_Typecheck(reference, ApiClientHelper.DatasetReference,
method='DatasetExists')
try:
self.apiclient.datasets().get(**dict(reference)).execute()
return True
except BigqueryNotFoundError:
return False
def TableExists(self, reference):
_Typecheck(reference, ApiClientHelper.TableReference, method='TableExists')
try:
self.apiclient.tables().get(**dict(reference)).execute()
return True
except BigqueryNotFoundError:
return False
def CreateDataset(self, reference, ignore_existing=False, description=None,
friendly_name=None, acl=None):
"""Create a dataset corresponding to DatasetReference.
Args:
reference: the DatasetReference to create.
ignore_existing: (boolean, default False) If False, raise
an exception if the dataset already exists.
description: an optional dataset description.
friendly_name: an optional friendly name for the dataset.
acl: an optional ACL for the dataset, as a list of dicts.
Raises:
TypeError: if reference is not a DatasetReference.
BigqueryDuplicateError: if reference exists and ignore_existing
is False.
"""
_Typecheck(reference, ApiClientHelper.DatasetReference,
method='CreateDataset')
body = BigqueryClient.ConstructObjectInfo(reference)
if friendly_name is not None:
body['friendlyName'] = friendly_name
if description is not None:
body['description'] = description
if acl is not None:
body['access'] = acl
try:
self.apiclient.datasets().insert(
body=body,
**dict(reference.GetProjectReference())).execute()
except BigqueryDuplicateError:
if not ignore_existing:
raise
def CreateTable(self, reference, ignore_existing=False, schema=None,
description=None, friendly_name=None, expiration=None):
"""Create a table corresponding to TableReference.
Args:
reference: the TableReference to create.
ignore_existing: (boolean, default True) If False, raise
an exception if the dataset already exists.
schema: an optional schema.
description: an optional table description.
friendly_name: an optional friendly name for the table.
expiration: optional expiration time in milliseconds since the epoch.
Raises:
TypeError: if reference is not a TableReference.
BigqueryDuplicateError: if reference exists and ignore_existing
is False.
"""
_Typecheck(reference, ApiClientHelper.TableReference, method='CreateTable')
try:
body = BigqueryClient.ConstructObjectInfo(reference)
if schema:
body['schema'] = {'fields': schema}
if friendly_name is not None:
body['friendlyName'] = friendly_name
if description is not None:
body['description'] = description
if expiration is not None:
body['expirationTime'] = expiration
self.apiclient.tables().insert(
body=body,
**dict(reference.GetDatasetReference())).execute()
except BigqueryDuplicateError:
if not ignore_existing:
raise
def UpdateTable(self, reference, schema=None,
description=None, friendly_name=None, expiration=None):
"""Updates a table.
Args:
reference: the TableReference to update.
schema: an optional schema.
description: an optional table description.
friendly_name: an optional friendly name for the table.
expiration: optional expiration time in milliseconds since the epoch.
Raises:
TypeError: if reference is not a TableReference.
"""
_Typecheck(reference, ApiClientHelper.TableReference, method='UpdateTable')
body = BigqueryClient.ConstructObjectInfo(reference)
if schema:
body['schema'] = {'fields': schema}
if friendly_name is not None:
body['friendlyName'] = friendly_name
if description is not None:
body['description'] = description
if expiration is not None:
body['expirationTime'] = expiration
self.apiclient.tables().patch(body=body, **dict(reference)).execute()
def UpdateDataset(self, reference,
description=None, friendly_name=None, acl=None):
"""Updates a dataset.
Args:
reference: the DatasetReference to update.
description: an optional dataset description.
friendly_name: an optional friendly name for the dataset.
acl: an optional ACL for the dataset, as a list of dicts.
Raises:
TypeError: if reference is not a DatasetReference.
"""
_Typecheck(reference, ApiClientHelper.DatasetReference,
method='UpdateDataset')
body = BigqueryClient.ConstructObjectInfo(reference)
if friendly_name is not None:
body['friendlyName'] = friendly_name
if description is not None:
body['description'] = description
if acl is not None:
body['access'] = acl
self.apiclient.datasets().patch(body=body, **dict(reference)).execute()
def DeleteDataset(self, reference, ignore_not_found=False,
delete_contents=None):
"""Deletes DatasetReference reference.
Args:
reference: the DatasetReference to delete.
ignore_not_found: Whether to ignore "not found" errors.
delete_contents: [Boolean] Whether to delete the contents of
non-empty datasets. If not specified and the dataset has
tables in it, the delete will fail. If not specified, the
server default applies.
Raises:
TypeError: if reference is not a DatasetReference.
BigqueryNotFoundError: if reference does not exist and
ignore_not_found is False.
"""
_Typecheck(reference, ApiClientHelper.DatasetReference,
method='DeleteDataset')
args = dict(reference)
if delete_contents is not None:
args['deleteContents'] = delete_contents
try:
self.apiclient.datasets().delete(**args).execute()
except BigqueryNotFoundError:
if not ignore_not_found:
raise
def DeleteTable(self, reference, ignore_not_found=False):
"""Deletes TableReference reference.
Args:
reference: the TableReference to delete.
ignore_not_found: Whether to ignore "not found" errors.
Raises:
TypeError: if reference is not a TableReference.
BigqueryNotFoundError: if reference does not exist and
ignore_not_found is False.
"""
_Typecheck(reference, ApiClientHelper.TableReference, method='DeleteTable')
try:
self.apiclient.tables().delete(**dict(reference)).execute()
except BigqueryNotFoundError:
if not ignore_not_found:
raise
#################################
## Job control
#################################
def StartJob(self, configuration,
project_id=None, upload_file=None, job_id=None):
"""Start a job with the given configuration.
Args:
configuration: The configuration for a job.
project_id: The project_id to run the job under. If None,
self.project_id is used.
upload_file: A file to include as a media upload to this request.
Only valid on job requests that expect a media upload file.
job_id: A unique job_id to use for this job. If a
JobIdGenerator, a job id will be generated from the job configuration.
If None, a unique job_id will be created for this request.
Returns:
The job resource returned from the insert job request. If there is an
error, the jobReference field will still be filled out with the job
reference used in the request.
Raises:
BigqueryClientConfigurationError: if project_id and
self.project_id are None.
"""
project_id = project_id or self.project_id
if not project_id:
raise BigqueryClientConfigurationError(
'Cannot start a job without a project id.')
configuration = configuration.copy()
if self.job_property:
configuration['properties'] = dict(
prop.partition('=')[0::2] for prop in self.job_property)
job_request = {'configuration': configuration}
# Use the default job id generator if no job id was supplied.
job_id = job_id or self.job_id_generator
if isinstance(job_id, JobIdGenerator):
job_id = job_id.Generate(configuration)
if job_id is not None:
job_reference = {'jobId': job_id, 'projectId': project_id}
job_request['jobReference'] = job_reference
media_upload = ''
if upload_file:
resumable = True
media_upload = http_request.MediaFileUpload(
filename=upload_file, mimetype='application/octet-stream',
resumable=resumable)
result = self.apiclient.jobs().insert(
body=job_request, media_body=media_upload,
projectId=project_id).execute()
return result
def _StartQueryRpc(self,
query,
dry_run=None,
use_cache=None,
preserve_nulls=None,
max_results=None,
timeout_ms=None,
min_completion_ratio=None,
project_id=None,
**kwds):
"""Executes the given query using the rpc-style query api.
Args:
query: Query to execute.
dry_run: Optional. Indicates whether the query will only be validated and
return processing statistics instead of actually running.
use_cache: Optional. Whether to use the query cache.
Caching is best-effort only and you should not make
assumptions about whether or how long a query result will be cached.
preserve_nulls: Optional. Indicates whether to preserve nulls in input
data. Temporary flag; will be removed in a future version.
max_results: Maximum number of results to return.
timeout_ms: Timeout, in milliseconds, for the call to query().
min_completion_ratio: Optional. Specifies the the minimum fraction of
data that must be scanned before a query returns. This value should be
between 0.0 and 1.0 inclusive.
project_id: Project id to use.
**kwds: Extra keyword arguments passed directly to jobs.Query().
Returns:
The query response.
Raises:
BigqueryClientConfigurationError: if project_id and
self.project_id are None.
"""
project_id = project_id or self.project_id
if not project_id:
raise BigqueryClientConfigurationError(
'Cannot run a query without a project id.')
request = {'query': query}
if self.dataset_id:
request['defaultDataset'] = dict(self.GetDatasetReference())
_ApplyParameters(
request,
preserve_nulls=preserve_nulls,
use_query_cache=use_cache,
timeout_ms=timeout_ms,
max_results=max_results,
min_completion_ratio=min_completion_ratio)
_ApplyParameters(request, dry_run=dry_run)
return self.apiclient.jobs().query(
body=request, projectId=project_id, **kwds).execute()
def GetQueryResults(self, job_id=None, project_id=None,
max_results=None, timeout_ms=None):
"""Waits for a query to complete, once.
Args:
job_id: The job id of the query job that we are waiting to complete.
project_id: The project id of the query job.
max_results: The maximum number of results.
timeout_ms: The number of milliseconds to wait for the query to complete.
Returns:
The getQueryResults() result.
Raises:
BigqueryClientConfigurationError: if project_id and
self.project_id are None.
"""
project_id = project_id or self.project_id
if not project_id:
raise BigqueryClientConfigurationError(
'Cannot get query results without a project id.')
kwds = {}
_ApplyParameters(kwds,
job_id=job_id,
project_id=project_id,
timeout_ms=timeout_ms,
max_results=max_results)
return self.apiclient.jobs().getQueryResults(**kwds).execute()
def RunJobSynchronously(self, configuration, project_id=None,
upload_file=None, job_id=None):
result = self.StartJob(configuration, project_id=project_id,
upload_file=upload_file, job_id=job_id)
if result['status']['state'] != 'DONE':
job_reference = BigqueryClient.ConstructObjectReference(result)
result = self.WaitJob(job_reference)
return self.RaiseIfJobError(result)
def ExecuteJob(self, configuration, sync=None,
project_id=None, upload_file=None, job_id=None):
"""Execute a job, possibly waiting for results."""
if sync is None:
sync = self.sync
if sync:
job = self.RunJobSynchronously(
configuration, project_id=project_id, upload_file=upload_file,
job_id=job_id)
else:
job = self.StartJob(
configuration, project_id=project_id, upload_file=upload_file,
job_id=job_id)
self.RaiseIfJobError(job)
return job
class WaitPrinter(object):
"""Base class that defines the WaitPrinter interface."""
def Print(self, job_id, wait_time, status):
"""Prints status for the current job we are waiting on.
Args:
job_id: the identifier for this job.
wait_time: the number of seconds we have been waiting so far.
status: the status of the job we are waiting for.
"""
raise NotImplementedError('Subclass must implement Print')
def Done(self):
"""Waiting is done and no more Print calls will be made.
This function should handle the case of Print not being called.
"""
raise NotImplementedError('Subclass must implement Done')
class WaitPrinterHelper(WaitPrinter):
"""A Done implementation that prints based off a property."""
print_on_done = False
def Done(self):
if self.print_on_done:
print
class QuietWaitPrinter(WaitPrinterHelper):
"""A WaitPrinter that prints nothing."""
def Print(self, unused_job_id, unused_wait_time, unused_status):
pass
class VerboseWaitPrinter(WaitPrinterHelper):
"""A WaitPrinter that prints every update."""
def Print(self, job_id, wait_time, status):
self.print_on_done = True
print '\rWaiting on %s ... (%ds) Current status: %-7s' % (
job_id, wait_time, status),
sys.stderr.flush()
class TransitionWaitPrinter(VerboseWaitPrinter):
"""A WaitPrinter that only prints status change updates."""
_previous_status = None
def Print(self, job_id, wait_time, status):
if status != self._previous_status:
self._previous_status = status
super(BigqueryClient.TransitionWaitPrinter, self).Print(
job_id, wait_time, status)
def WaitJob(self, job_reference, status='DONE',
wait=sys.maxint, wait_printer_factory=None):
"""Poll for a job to run until it reaches the requested status.
Arguments:
job_reference: JobReference to poll.
status: (optional, default 'DONE') Desired job status.
wait: (optional, default maxint) Max wait time.
wait_printer_factory: (optional, defaults to
self.wait_printer_factory) Returns a subclass of WaitPrinter
that will be called after each job poll.
Returns:
The job object returned by the final status call.
Raises:
StopIteration: If polling does not reach the desired state before
timing out.
ValueError: If given an invalid wait value.
"""
_Typecheck(job_reference, ApiClientHelper.JobReference, method='WaitJob')
start_time = time.time()
job = None
if wait_printer_factory:
printer = wait_printer_factory()
else:
printer = self.wait_printer_factory()
# This is a first pass at wait logic: we ping at 1s intervals a few
# times, then increase to max(3, max_wait), and then keep waiting
# that long until we've run out of time.
waits = itertools.chain(
itertools.repeat(1, 8),
xrange(2, 30, 3),
itertools.repeat(30))
current_wait = 0
current_status = 'UNKNOWN'
while current_wait <= wait:
try:
done, job = self.PollJob(job_reference, status=status, wait=wait)
current_status = job['status']['state']
if done:
printer.Print(job_reference.jobId, current_wait, current_status)
break
except BigqueryCommunicationError, e:
# Communication errors while waiting on a job are okay.
logging.warning('Transient error during job status check: %s', e)
except BigqueryBackendError, e:
# Temporary server errors while waiting on a job are okay.
logging.warning('Transient error during job status check: %s', e)
for _ in xrange(waits.next()):
current_wait = time.time() - start_time
printer.Print(job_reference.jobId, current_wait, current_status)
time.sleep(1)
else:
raise StopIteration(
'Wait timed out. Operation not finished, in state %s' % (
current_status,))
printer.Done()
return job
def PollJob(self, job_reference, status='DONE', wait=0):
"""Poll a job once for a specific status.
Arguments:
job_reference: JobReference to poll.
status: (optional, default 'DONE') Desired job status.
wait: (optional, default 0) Max server-side wait time for one poll call.
Returns:
Tuple (in_state, job) where in_state is True if job is
in the desired state.
Raises:
ValueError: If given an invalid wait value.
"""
_Typecheck(job_reference, ApiClientHelper.JobReference, method='PollJob')
wait = BigqueryClient.NormalizeWait(wait)
job = self.apiclient.jobs().get(**dict(job_reference)).execute()
current = job['status']['state']
return (current == status, job)
#################################
## Wrappers for job types
#################################
def RunQuery(self, **kwds):
"""Run a query job synchronously, and return the result.
Args:
**kwds: Passed on to self.Query.
Returns:
The rows in the query result as a list.
"""
new_kwds = dict(kwds)
new_kwds['sync'] = True
job = self.Query(**new_kwds)
return self.ReadJobRows(job['jobReference'])
def RunQueryRpc(self,
query,
dry_run=None,
use_cache=None,
preserve_nulls=None,
max_results=None,
wait=sys.maxint,
min_completion_ratio=None,
wait_printer_factory=None,
max_single_wait=None,
**kwds):
"""Executes the given query using the rpc-style query api.
Args:
query: Query to execute.
dry_run: Optional. Indicates whether the query will only be validated and
return processing statistics instead of actually running.
use_cache: Optional. Whether to use the query cache.
Caching is best-effort only and you should not make
assumptions about whether or how long a query result will be cached.
preserve_nulls: Optional. Indicates whether to preserve nulls in input
data. Temporary flag; will be removed in a future version.
max_results: Optional. Maximum number of results to return.
wait: (optional, default maxint) Max wait time in seconds.
min_completion_ratio: Optional. Specifies the the minimum fraction of
data that must be scanned before a query returns. This value should be
between 0.0 and 1.0 inclusive.
wait_printer_factory: (optional, defaults to
self.wait_printer_factory) Returns a subclass of WaitPrinter
that will be called after each job poll.
max_single_wait: Optional. Maximum number of seconds to wait for each call
to query() / getQueryResults().
**kwds: Passed directly to self.ExecuteSyncQuery.
Raises:
BigqueryClientError: if no query is provided.
StopIteration: if the query does not complete within wait seconds.
Returns:
The a tuple containing the schema fields and list of results of the query.
"""
if not self.sync:
raise BigqueryClientError('Running RPC-style query asynchronously is '
'not supported')
if not query:
raise BigqueryClientError('No query string provided')
if wait_printer_factory:
printer = wait_printer_factory()
else:
printer = self.wait_printer_factory()
start_time = time.time()
elapsed_time = 0
job_reference = None
current_wait_ms = None
while True:
try:
elapsed_time = 0 if job_reference is None else time.time() - start_time
remaining_time = wait - elapsed_time
if max_single_wait is not None:
# Compute the current wait, being careful about overflow, since
# remaining_time may be counting down from sys.maxint.
current_wait_ms = int(min(remaining_time, max_single_wait) * 1000)
if current_wait_ms < 0:
current_wait_ms = sys.maxint
if remaining_time < 0:
raise StopIteration('Wait timed out. Query not finished.')
if job_reference is None:
# We haven't yet run a successful Query(), so we don't
# have a job id to check on.
result = self._StartQueryRpc(
query=query,
preserve_nulls=preserve_nulls,
use_cache=use_cache,
dry_run=dry_run,
min_completion_ratio=min_completion_ratio,
timeout_ms=current_wait_ms,
max_results=0,
**kwds)
job_reference = ApiClientHelper.JobReference.Create(
**result['jobReference'])
else:
# The query/getQueryResults methods do not return the job state,
# so we just print 'RUNNING' while we are actively waiting.
printer.Print(job_reference.jobId, elapsed_time, 'RUNNING')
result = self.GetQueryResults(
job_reference.jobId,
max_results=0,
timeout_ms=current_wait_ms)
if result['jobComplete']:
return self.ReadSchemaAndJobRows(dict(job_reference),
max_rows=max_results)
except BigqueryCommunicationError, e:
# Communication errors while waiting on a job are okay.
logging.warning('Transient error during query: %s', e)
except BigqueryBackendError, e:
# Temporary server errors while waiting on a job are okay.
logging.warning('Transient error during query: %s', e)
def Query(self, query,
destination_table=None,
create_disposition=None,
write_disposition=None,
priority=None,
preserve_nulls=None,
allow_large_results=None,
dry_run=None,
use_cache=None,
min_completion_ratio=None,
**kwds):
# pylint: disable=g-doc-args
"""Execute the given query, returning the created job.
The job will execute synchronously if sync=True is provided as an
argument or if self.sync is true.
Args:
query: Query to execute.
destination_table: (default None) If provided, send the results to the
given table.
create_disposition: Optional. Specifies the create_disposition for
the destination_table.
write_disposition: Optional. Specifies the write_disposition for
the destination_table.
priority: Optional. Priority to run the query with. Either
'INTERACTIVE' (default) or 'BATCH'.
preserve_nulls: Optional. Indicates whether to preserve nulls in input
data. Temporary flag; will be removed in a future version.
allow_large_results: Enables larger destination table sizes.
dry_run: Optional. Indicates whether the query will only be validated and
return processing statistics instead of actually running.
use_cache: Optional. Whether to use the query cache. If create_disposition
is CREATE_NEVER, will only run the query if the result is already
cached. Caching is best-effort only and you should not make
assumptions about whether or how long a query result will be cached.
min_completion_ratio: Optional. Specifies the the minimum fraction of
data that must be scanned before a query returns. This value should be
between 0.0 and 1.0 inclusive.
**kwds: Passed on to self.ExecuteJob.
Raises:
BigqueryClientError: if no query is provided.
Returns:
The resulting job info.
"""
if not query:
raise BigqueryClientError('No query string provided')
query_config = {'query': query}
if self.dataset_id:
query_config['defaultDataset'] = dict(self.GetDatasetReference())
if destination_table:
try:
reference = self.GetTableReference(destination_table)
except BigqueryError, e:
raise BigqueryError('Invalid value %s for destination_table: %s' % (
destination_table, e))
query_config['destinationTable'] = dict(reference)
_ApplyParameters(
query_config,
allow_large_results=allow_large_results,
create_disposition=create_disposition,
preserve_nulls=preserve_nulls,
priority=priority,
write_disposition=write_disposition,
use_query_cache=use_cache,
min_completion_ratio=min_completion_ratio)
request = {'query': query_config}
_ApplyParameters(request, dry_run=dry_run)
return self.ExecuteJob(request, **kwds)
def Load(self, destination_table_reference, source,
schema=None, create_disposition=None, write_disposition=None,
field_delimiter=None, skip_leading_rows=None, encoding=None,
quote=None, max_bad_records=None, allow_quoted_newlines=None,
source_format=None, allow_jagged_rows=None,
**kwds):
"""Load the given data into BigQuery.
The job will execute synchronously if sync=True is provided as an
argument or if self.sync is true.
Args:
destination_table_reference: TableReference to load data into.
source: String specifying source data to load.
schema: (default None) Schema of the created table. (Can be left blank
for append operations.)
create_disposition: Optional. Specifies the create_disposition for
the destination_table_reference.
write_disposition: Optional. Specifies the write_disposition for
the destination_table_reference.
field_delimiter: Optional. Specifies the single byte field delimiter.
skip_leading_rows: Optional. Number of rows of initial data to skip.
encoding: Optional. Specifies character encoding of the input data.
May be "UTF-8" or "ISO-8859-1". Defaults to UTF-8 if not specified.
quote: Optional. Quote character to use. Default is '"'. Note that
quoting is done on the raw binary data before encoding is applied.
max_bad_records: Optional. Maximum number of bad records that should
be ignored before the entire job is aborted.
allow_quoted_newlines: Optional. Whether to allow quoted newlines in csv
import data.
source_format: Optional. Format of source data. May be "CSV",
"DATASTORE_BACKUP", or "NEWLINE_DELIMITED_JSON".
allow_jagged_rows: Optional. Whether to allow missing trailing optional
columns in csv import data.
**kwds: Passed on to self.ExecuteJob.
Returns:
The resulting job info.
"""
_Typecheck(destination_table_reference, ApiClientHelper.TableReference)
load_config = {'destinationTable': dict(destination_table_reference)}
sources = BigqueryClient.ProcessSources(source)
if sources[0].startswith('gs://'):
load_config['sourceUris'] = sources
upload_file = None
else:
upload_file = sources[0]
if schema is not None:
load_config['schema'] = {'fields': BigqueryClient.ReadSchema(schema)}
_ApplyParameters(
load_config, create_disposition=create_disposition,
write_disposition=write_disposition, field_delimiter=field_delimiter,
skip_leading_rows=skip_leading_rows, encoding=encoding,
quote=quote, max_bad_records=max_bad_records,
source_format=source_format,
allow_quoted_newlines=allow_quoted_newlines,
allow_jagged_rows=allow_jagged_rows)
return self.ExecuteJob(configuration={'load': load_config},
upload_file=upload_file, **kwds)
def Extract(self, source_table, destination_uri,
print_header=None, field_delimiter=None,
destination_format=None,
**kwds):
"""Extract the given table from BigQuery.
The job will execute synchronously if sync=True is provided as an
argument or if self.sync is true.
Args:
source_table: TableReference to read data from.
destination_uri: String specifying destination location.
print_header: Optional. Whether to print out a header row in the results.
field_delimiter: Optional. Specifies the single byte field delimiter.
destination_format: Optional. Format to extract table to. May be "CSV"
or "NEWLINE_DELIMITED_JSON".
**kwds: Passed on to self.ExecuteJob.
Returns:
The resulting job info.
Raises:
BigqueryClientError: if required parameters are invalid.
"""
_Typecheck(source_table, ApiClientHelper.TableReference)
if not destination_uri.startswith('gs://'):
raise BigqueryClientError('Extract only supports "gs://" uris.')
extract_config = {'sourceTable': dict(source_table)}
_ApplyParameters(
extract_config, destination_uri=destination_uri,
destination_format=destination_format,
print_header=print_header, field_delimiter=field_delimiter)
return self.ExecuteJob(configuration={'extract': extract_config}, **kwds)
class _TableReader(object):
"""Base class that defines the TableReader interface.
_TableReaders provide a way to read paginated rows and schemas from a table.
"""
def ReadRows(self, start_row=0, max_rows=None):
"""Read ad most max_rows rows from a table.
Args:
start_row: first row to return.
max_rows: maximum number of rows to return.
Raises:
BigqueryInterfaceError: when bigquery returns something unexpected.
Returns:
list of rows, each of which is a list of field values.
"""
(_, rows) = self.ReadSchemaAndRows(start_row=start_row, max_rows=max_rows)
return rows
def ReadSchemaAndRows(self, start_row=0, max_rows=None):
"""Read at most max_rows rows from a table and the schema.
Args:
start_row: first row to read.
max_rows: maximum number of rows to return.
Raises:
BigqueryInterfaceError: when bigquery returns something unexpected.
Returns:
A tuple where the first item is the list of fields and the
second item a list of rows.
"""
page_token = None
rows = []
schema = {}
max_rows = max_rows or _MAX_ROWS_PER_REQUEST
while len(rows) < max_rows:
rows_to_read = max_rows - len(rows)
rows_to_read = min(self.max_rows_per_request, rows_to_read)
(more_rows, page_token, current_schema) = self._ReadOnePage(
None if page_token else start_row,
max_rows=None if page_token else rows_to_read,
page_token=page_token)
if not schema and current_schema:
schema = current_schema.get('fields', {})
for row in more_rows:
rows.append([entry.get('v', '') for entry in row.get('f', [])])
if not page_token:
start_row += len(more_rows)
if not more_rows:
break
else:
# API server returned a page token but no rows.
if not more_rows:
raise BigqueryInterfaceError(
'Not enough rows returned by server for %r' % (self,))
return (schema, rows)
def __str__(self):
return self._GetPrintContext()
def __repr__(self):
return self._GetPrintContext()
def _GetPrintContext(self):
"""Returns context for what is being read."""
raise NotImplementedError('Subclass must implement GetPrintContext')
def _ReadOnePage(self, start_row, max_rows, page_token=None):
"""Read one page of data, up to max_rows rows.
Assumes that the table is ready for reading. Will signal an error otherwise.
Args:
start_row: first row to read.
max_rows: maximum number of rows to return.
page_token: Optional. current page token.
Returns:
tuple of:
rows: the actual rows of the table, in f,v format.
page_token: the page token of the next page of results.
schema: the schema of the table.
"""
raise NotImplementedError('Subclass must implement _ReadOnePage')
class _TableTableReader(_TableReader):
"""A TableReader that reads from a table."""
def __init__(self, local_apiclient, max_rows_per_request, table_ref):
self.table_ref = table_ref
self.max_rows_per_request = max_rows_per_request
self._apiclient = local_apiclient
def _GetPrintContext(self):
return '%r' % (self.table_ref,)
def _ReadOnePage(self, start_row, max_rows, page_token=None):
kwds = dict(self.table_ref)
kwds['maxResults'] = max_rows
if page_token:
kwds['pageToken'] = page_token
else:
kwds['startIndex'] = start_row
data = self._apiclient.tabledata().list(**kwds).execute()
page_token = data.get('pageToken', None)
rows = data.get('rows', [])
kwds = dict(self.table_ref)
table_info = self._apiclient.tables().get(**kwds).execute()
schema = table_info.get('schema', {})
return (rows, page_token, schema)
class _JobTableReader(_TableReader):
"""A TableReader that reads from a completed job."""
def __init__(self, local_apiclient, max_rows_per_request, job_ref):
self.job_ref = job_ref
self.max_rows_per_request = max_rows_per_request
self._apiclient = local_apiclient
def _GetPrintContext(self):
return '%r' % (self.job_ref,)
def _ReadOnePage(self, start_row, max_rows, page_token=None):
kwds = dict(self.job_ref)
kwds['maxResults'] = max_rows
# Sets the timeout to 0 because we assume the table is already ready.
kwds['timeoutMs'] = 0
if page_token:
kwds['pageToken'] = page_token
else:
kwds['startIndex'] = start_row
data = self._apiclient.jobs().getQueryResults(**kwds).execute()
if not data['jobComplete']:
raise BigqueryError('Job %s is not done' % (self,))
page_token = data.get('pageToken', None)
schema = data.get('schema', None)
rows = data.get('rows', [])
return (rows, page_token, schema)
class ApiClientHelper(object):
"""Static helper methods and classes not provided by the discovery client."""
def __init__(self, *unused_args, **unused_kwds):
raise NotImplementedError('Cannot instantiate static class ApiClientHelper')
class Reference(object):
"""Base class for Reference objects returned by apiclient."""
_required_fields = set()
_format_str = ''
def __init__(self, **kwds):
if type(self) == ApiClientHelper.Reference:
raise NotImplementedError(
'Cannot instantiate abstract class ApiClientHelper.Reference')
for name in self._required_fields:
if not kwds.get(name, ''):
raise ValueError('Missing required argument %s to %s' % (
name, self.__class__.__name__))
setattr(self, name, kwds[name])
@classmethod
def Create(cls, **kwds):
"""Factory method for this class."""
args = dict((k, v) for k, v in kwds.iteritems()
if k in cls._required_fields)
return cls(**args)
def __iter__(self):
return ((name, getattr(self, name)) for name in self._required_fields)
def __str__(self):
return self._format_str % dict(self)
def __repr__(self):
return "%s '%s'" % (self.typename, self)
def __eq__(self, other):
d = dict(other)
return all(getattr(self, name) == d.get(name, '')
for name in self._required_fields)
class JobReference(Reference):
_required_fields = set(('projectId', 'jobId'))
_format_str = '%(projectId)s:%(jobId)s'
typename = 'job'
class ProjectReference(Reference):
_required_fields = set(('projectId',))
_format_str = '%(projectId)s'
typename = 'project'
class DatasetReference(Reference):
_required_fields = set(('projectId', 'datasetId'))
_format_str = '%(projectId)s:%(datasetId)s'
typename = 'dataset'
def GetProjectReference(self):
return ApiClientHelper.ProjectReference.Create(
projectId=self.projectId)
class TableReference(Reference):
_required_fields = set(('projectId', 'datasetId', 'tableId'))
_format_str = '%(projectId)s:%(datasetId)s.%(tableId)s'
typename = 'table'
def GetDatasetReference(self):
return ApiClientHelper.DatasetReference.Create(
projectId=self.projectId, datasetId=self.datasetId)
def GetProjectReference(self):
return ApiClientHelper.ProjectReference.Create(
projectId=self.projectId)
| Python |
#!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
"""Tests for bigquery_client.py."""
import itertools
import json
import tempfile
from google.apputils import googletest
import bigquery_client
class BigqueryClientTest(googletest.TestCase):
def setUp(self):
self.client = bigquery_client.BigqueryClient(api='http', api_version='')
self.reference_tests = {
'prj:': ('prj', '', ''),
'example.com:prj': ('example.com:prj', '', ''),
'example.com:prj-2': ('example.com:prj-2', '', ''),
'www.example.com:prj': ('www.example.com:prj', '', ''),
'prj:ds': ('prj', 'ds', ''),
'example.com:prj:ds': ('example.com:prj', 'ds', ''),
'prj:ds.tbl': ('prj', 'ds', 'tbl'),
'example.com:prj:ds.tbl': ('example.com:prj', 'ds', 'tbl'),
}
self.parse_tests = self.reference_tests.copy()
self.parse_tests.update({
'ds.': ('', 'ds', ''),
'ds.tbl': ('', 'ds', 'tbl'),
'tbl': ('', '', 'tbl'),
})
self.field_names = ('projectId', 'datasetId', 'tableId')
@staticmethod
def _LengthToType(parts):
if len(parts) == 1:
return bigquery_client.ApiClientHelper.ProjectReference
if len(parts) == 2:
return bigquery_client.ApiClientHelper.DatasetReference
if len(parts) == 3:
return bigquery_client.ApiClientHelper.TableReference
return None
def _GetReference(self, parts):
parts = filter(bool, parts)
reference_type = BigqueryClientTest._LengthToType(parts)
args = dict(itertools.izip(self.field_names, parts))
return reference_type(**args)
def testToCamel(self):
self.assertEqual('lowerCamel', bigquery_client._ToLowerCamel('lower_camel'))
def testReadSchemaFromFile(self):
# Test the filename case.
with tempfile.NamedTemporaryFile() as f:
# Write out the results.
print >>f, '['
print >>f, ' { "name": "Number", "type": "integer", "mode": "REQUIRED" },'
print >>f, ' { "name": "Name", "type": "string", "mode": "REQUIRED" },'
print >>f, ' { "name": "Other", "type": "string", "mode": "OPTIONAL" }'
print >>f, ']'
f.flush()
# Read them as JSON.
f.seek(0)
result = json.load(f)
# Compare the results.
self.assertEqual(result, self.client.ReadSchema(f.name))
def testReadSchemaFromString(self):
# Check some cases that should pass.
self.assertEqual(
[{'name': 'foo', 'type': 'INTEGER'}],
bigquery_client.BigqueryClient.ReadSchema('foo:integer'))
self.assertEqual(
[{'name': 'foo', 'type': 'INTEGER'},
{'name': 'bar', 'type': 'STRING'}],
bigquery_client.BigqueryClient.ReadSchema('foo:integer, bar:string'))
self.assertEqual(
[{'name': 'foo', 'type': 'STRING'}],
bigquery_client.BigqueryClient.ReadSchema('foo'))
self.assertEqual(
[{'name': 'foo', 'type': 'STRING'},
{'name': 'bar', 'type': 'STRING'}],
bigquery_client.BigqueryClient.ReadSchema('foo,bar'))
self.assertEqual(
[{'name': 'foo', 'type': 'INTEGER'},
{'name': 'bar', 'type': 'STRING'}],
bigquery_client.BigqueryClient.ReadSchema('foo:integer, bar'))
# Check some cases that should fail.
self.assertRaises(bigquery_client.BigquerySchemaError,
bigquery_client.BigqueryClient.ReadSchema,
'')
self.assertRaises(bigquery_client.BigquerySchemaError,
bigquery_client.BigqueryClient.ReadSchema,
'foo,bar:int:baz')
self.assertRaises(bigquery_client.BigquerySchemaError,
bigquery_client.BigqueryClient.ReadSchema,
'foo:int,,bar:string')
self.assertRaises(bigquery_client.BigquerySchemaError,
bigquery_client.BigqueryClient.ReadSchema,
'../foo/bar/fake_filename')
def testParseIdentifier(self):
for identifier, parse in self.parse_tests.iteritems():
self.assertEquals(parse, bigquery_client.BigqueryClient._ParseIdentifier(
identifier))
def testGetReference(self):
for identifier, parse in self.reference_tests.iteritems():
reference = self._GetReference(parse)
self.assertEquals(reference, self.client.GetReference(identifier))
def testParseDatasetReference(self):
dataset_parses = dict((k, v) for k, v in self.reference_tests.iteritems()
if len(filter(bool, v)) == 2)
for identifier, parse in dataset_parses.iteritems():
reference = self._GetReference(parse)
self.assertEquals(reference, self.client.GetDatasetReference(identifier))
for invalid in ['ds.tbl', 'prj:ds.tbl']:
self.assertRaises(bigquery_client.BigqueryError,
self.client.GetDatasetReference, invalid)
def testParseProjectReference(self):
project_parses = dict((k, v) for k, v in self.reference_tests.iteritems()
if len(filter(bool, v)) == 1)
for identifier, parse in project_parses.iteritems():
reference = self._GetReference(parse)
self.assertEquals(reference, self.client.GetProjectReference(identifier))
invalid_projects = [
'prj:ds', 'example.com:prj:ds', 'ds.', 'ds.tbl', 'prj:ds.tbl']
for invalid in invalid_projects:
self.assertRaises(bigquery_client.BigqueryError,
self.client.GetProjectReference, invalid)
def testParseJobReference(self):
self.assertTrue(self.client.GetJobReference('proj:job_id'))
self.client.project_id = None
self.assertRaises(bigquery_client.BigqueryError,
self.client.GetJobReference, 'job_id')
self.client.project_id = 'proj'
self.assertTrue(self.client.GetJobReference('job_id'))
invalid_job_ids = [
'prj:', 'example.com:prj:ds.tbl', 'ds.tbl', 'prj:ds.tbl']
for invalid in invalid_job_ids:
self.assertRaises(bigquery_client.BigqueryError,
self.client.GetJobReference, invalid)
def testRaiseError(self):
# Confirm we handle arbitrary errors gracefully.
try:
bigquery_client.BigqueryClient.RaiseError({})
except bigquery_client.BigqueryError as _:
pass
def testJsonToInsertEntry(self):
result = [
bigquery_client.JsonToInsertEntry(None, '{"a":1}'),
bigquery_client.JsonToInsertEntry('key', '{"b":2}'),
]
self.assertEquals([None, 'key'], [x[0] for x in result])
self.assertEquals(1, result[0][1]['a'])
self.assertEquals(2, result[1][1]['b'])
self.assertRaisesRegexp(
bigquery_client.BigqueryClientError,
r'Could not parse',
bigquery_client.JsonToInsertEntry, None, '_junk_')
self.assertRaisesRegexp(
bigquery_client.BigqueryClientError,
r'not a JSON object',
bigquery_client.JsonToInsertEntry, None, '[1, 2]')
if __name__ == '__main__':
googletest.main()
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
import remove_pyreadline
import setuptools.command.easy_install as easy_install
import setuptools.package_index
import shutil
import sys
EASY_INSTALL_PTH_FILENAME = 'easy-install.pth'
BACKUP_SUFFIX = '.old'
def locate_package(name):
import pkg_resources
try:
pkg = setuptools.package_index.get_distribution(name)
except pkg_resources.DistributionNotFound:
pkg = None
return pkg
def find_package_consumers(name, deps_to_ignore=None):
installed_packages = list(setuptools.package_index.AvailableDistributions())
if deps_to_ignore is None:
deps_to_ignore = []
consumers = []
for package_name in installed_packages:
if name == package_name:
continue
package_info = setuptools.package_index.get_distribution(package_name)
if package_name in deps_to_ignore:
continue
for req in package_info.requires():
if req.project_name == name:
consumers.append(package_name)
break
return consumers
def remove_package(pkg):
site_packages_dir, egg_name = os.path.split(pkg.location)
easy_install_pth_filename = os.path.join(site_packages_dir,
EASY_INSTALL_PTH_FILENAME)
backup_filename = easy_install_pth_filename + BACKUP_SUFFIX
shutil.copy2(easy_install_pth_filename, backup_filename)
pth_file = easy_install.PthDistributions(easy_install_pth_filename)
pth_file.remove(pkg)
pth_file.save()
if os.path.isdir(pkg.location):
shutil.rmtree(pkg.location)
else:
os.unlink(pkg.location)
def y_or_n_p(prompt):
response = raw_input('%s (y/n) ' % (prompt,)).strip().lower()
while response not in ['y', 'n']:
response = raw_input(' Please answer y or n: ').strip().lower()
return response
def delete_pyreadline():
pkg = locate_package('pyreadline')
if pkg is None:
print "pyreadline not found, exiting."
return
consumers = find_package_consumers('pyreadline')
if consumers:
print 'pyreadline is a dependency of all the following packages:'
for p in consumers:
print ' %s' % (p,)
print
else:
print 'pyreadline is not a dependency of any installed packages.'
print
response = y_or_n_p('Continue and uninstall pyreadline?')
if response == 'n':
print 'Aborting uninstall of pyreadline.'
return
remove_package(pkg)
print 'pyreadline successfully uninstalled!'
def run_main():
print 'This script will attempt to remove pyreadline from your system.'
print
if platform.system() == 'Windows':
print
print '*** WARNING ***'
print 'This is a Windows system, and removal of pyreadline on a Windows'
print 'system is NOT recommended.'
response = y_or_n_p('Are you SURE you want to proceed?')
if response == 'n':
print 'Exiting.'
exit(0)
delete_pyreadline()
if __name__ == '__main__':
run_main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for table_formatter.py."""
from google.apputils import googletest
import table_formatter
class TableFormatterTest(googletest.TestCase):
def setUp(self):
super(TableFormatterTest, self).setUp()
if type(self) != TableFormatterTest:
self.failUnless(hasattr(self, 'format_class'),
'Subclasses must provide self.format_class')
self.formatter = self.format_class()
self.formatter.AddColumns(('foo', 'longer header'),
kwdss=[{}, {'align': 'r'}])
self.formatter.AddRow(['a', 3])
self.formatter.AddRow(['abc', 123])
def testStr(self):
self.failIf(hasattr(self, 'format_class'),
'Subclasses must override testStr')
def testUnicodeRow(self):
row = [11, 'chinese', u'你不能教老狗新把戏']
if type(self) != TableFormatterTest:
formatter = self.format_class()
formatter.AddColumns(('count', 'language', 'message'))
formatter.AddRow(row)
# Note that we don't need any asserts here: the act of calling
# Print will throw if unicode isn't being handled correctly.
formatter.Print()
formatter = self.format_class()
formatter.AddColumns(('message',))
formatter.AddRow(row[2:])
formatter.Print()
self.assertTrue(all(ord(c) <= 127 for c in str(formatter)))
self.assertTrue(any(ord(c) > 127 for c in unicode(formatter)))
class PrettyFormatterTest(TableFormatterTest):
def setUp(self):
# Static method names are too long without abbreviations.
self.PF = table_formatter.PrettyFormatter # pylint: disable=g-bad-name
self.format_class = self.PF
super(PrettyFormatterTest, self).setUp()
def testStr(self):
table_repr = '\n'.join((
'+-----+---------------+',
'| foo | longer header |',
'+-----+---------------+',
'| a | 3 |',
'| abc | 123 |',
'+-----+---------------+'))
self.assertEquals(table_repr, str(self.formatter))
def testCenteredPadding(self):
self.assertEquals((1, 1), self.PF.CenteredPadding(8, 6))
self.assertEquals((2, 1), self.PF.CenteredPadding(8, 5, left_justify=False))
self.assertEquals((1, 2), self.PF.CenteredPadding(8, 5))
self.assertRaises(table_formatter.FormatterException,
self.PF.CenteredPadding, 1, 5)
def testAbbreviate(self):
self.assertEquals('', self.PF.Abbreviate('abc', 0))
self.assertEquals('.', self.PF.Abbreviate('abc', 1))
self.assertEquals('ab...', self.PF.Abbreviate('abcdef', 5))
self.assertEquals('abcdef', self.PF.Abbreviate('abcdef', 6))
self.assertEquals('abcdef', self.PF.Abbreviate('abcdef', 7))
def testFormatCell(self):
entry = 'abc'
self.assertEquals(
[' abc '], list(self.PF.FormatCell(entry, 3)))
self.assertEquals(
[' abc '], list(self.PF.FormatCell(entry, 5, align='l')))
self.assertEquals(
[' abc '], list(self.PF.FormatCell(entry, 5)))
self.assertEquals(
[' abc '], list(self.PF.FormatCell(entry, 5, align='r')))
self.assertEquals(
[' abc '], list(self.PF.FormatCell(entry, 6)))
lines = [
' abc ',
' ',
' ',
]
self.assertEquals(lines, list(self.PF.FormatCell(entry, 6, cell_height=3)))
lines.append(lines[-1])
self.assertEquals(lines, list(self.PF.FormatCell(entry, 6, cell_height=4)))
lines = [
' ',
' abc... ',
' ab ',
' ',
]
self.assertEquals(lines, list(self.PF.FormatCell(
'abcdefghi\nab', 6, cell_height=4, align='l', valign='c')))
lines = [
' abc... ',
' ab ',
' ',
' ',
]
self.assertEquals(lines, list(self.PF.FormatCell(
'abcdefghi\nab', 6, cell_height=4, align='l')))
lines = [
' ',
' ',
' abc... ',
' ab ',
]
self.assertEquals(lines, list(self.PF.FormatCell(
'abcdefghi\nab', 6, cell_height=4, align='l', valign='b')))
self.assertRaises(table_formatter.FormatterException,
self.PF.FormatCell, 'ab\na', 5)
def testFormatRow(self):
formatter = table_formatter.PrettyFormatter()
formatter.AddColumns(('one', 'two'))
formatter.AddRow(['a', 'b'])
self.assertEquals(
['| a | b |'],
list(formatter.FormatRow(formatter.rows[0], 1)))
formatter.AddRow(['a', 'b\nc'])
self.assertEquals(
['| a | b |',
'| | c |',
],
list(formatter.FormatRow(formatter.rows[1], 2)))
self.assertRaises(table_formatter.FormatterException,
formatter.FormatRow, formatter.rows[1], 1)
formatter.AddRow(['a', '\nbbbbbb\nc'])
self.assertEquals(
['| a | |',
'| | bbbbbb |',
'| | c |',
],
list(formatter.FormatRow(formatter.rows[2], 3)))
self.assertEquals(
['| a | |',
'| | b... |',
'| | c |',
],
list(formatter.FormatRow(formatter.rows[2], 3, column_widths=[3, 4])))
def testHeaderLines(self):
formatter = table_formatter.PrettyFormatter()
formatter.AddColumns(('a', 'b'))
formatter.AddRow(['really long string', ''])
self.assertEquals(
['| a | b |'],
list(formatter.HeaderLines()))
def testFormatHeader(self):
formatter = table_formatter.PrettyFormatter()
formatter.AddColumns(('a', 'bcd\nefgh'))
formatter.AddRow(['really long string', ''])
self.assertEquals(
['+--------------------+------+',
'| a | bcd |',
'| | efgh |',
'+--------------------+------+'],
list(formatter.FormatHeader()))
def testAddRow(self):
formatter = table_formatter.PrettyFormatter()
formatter.AddColumns(('a', 'b'))
formatter.AddRow(['foo', 'x'])
self.assertEquals(1, len(formatter))
self.assertEquals([3, 1], formatter.column_widths)
self.assertEquals([1], formatter.row_heights)
formatter.AddRow(['foo\nbar', 'xxxxxxx'])
self.assertEquals(2, len(formatter))
self.assertEquals([3, 7], formatter.column_widths)
self.assertEquals([1, 2], formatter.row_heights)
# Check that we can add non-string entries.
formatter.AddRow([3, {'a': 5}])
def testAddColumn(self):
formatter = table_formatter.PrettyFormatter()
formatter.AddColumn('abc\ndef', align='r')
self.assertEquals([3], formatter.column_widths)
self.assertEquals(2, formatter.header_height)
self.assertRaises(table_formatter.FormatterException,
formatter.AddColumn, 'bad', align='d')
formatter.AddRow([3])
self.assertRaises(table_formatter.FormatterException,
formatter.AddColumn, 'anything')
def testPrintEmptyTable(self):
formatter = table_formatter.PrettyFormatter(skip_header_when_empty=False)
formatter.AddColumns(('a', 'b'))
table_repr = '\n'.join((
'+---+---+',
'| a | b |',
'+---+---+',
'+---+---+'))
self.assertEquals(table_repr, str(formatter))
formatter = table_formatter.PrettyFormatter()
formatter.AddColumns(('a', 'b'))
self.assertEquals(table_repr, str(formatter))
formatter = table_formatter.PrettyFormatter(skip_header_when_empty=True)
formatter.AddColumns(('a', 'b'))
self.assertEquals('', str(formatter))
class SparsePrettyFormatterTest(TableFormatterTest):
def setUp(self):
self.format_class = table_formatter.SparsePrettyFormatter
super(SparsePrettyFormatterTest, self).setUp()
def testStr(self):
table_repr = '\n'.join((
' foo longer header ',
' ----- --------------- ',
' a 3 ',
' abc 123 '))
self.assertEquals(table_repr, str(self.formatter))
def testFormatHeader(self):
formatter = table_formatter.SparsePrettyFormatter()
formatter.AddColumns(('a', 'bcd\nefgh'))
formatter.AddRow(['really long string', ''])
self.assertEquals(
[' a bcd ',
' efgh ',
' -------------------- ------ '],
list(formatter.FormatHeader()))
def testPrintEmptyTable(self):
formatter = table_formatter.SparsePrettyFormatter(
skip_header_when_empty=False)
formatter.AddColumns(('a', 'b'))
table_repr = '\n'.join((
' a b ',
' --- --- '))
self.assertEquals(table_repr, str(formatter))
formatter = table_formatter.SparsePrettyFormatter()
formatter.AddColumns(('a', 'b'))
self.assertEquals(table_repr, str(formatter))
formatter = table_formatter.SparsePrettyFormatter(
skip_header_when_empty=True)
formatter.AddColumns(('a', 'b'))
self.assertEquals('', str(formatter))
class PrettyJsonFormatterTest(TableFormatterTest):
def setUp(self):
self.format_class = table_formatter.PrettyJsonFormatter
super(PrettyJsonFormatterTest, self).setUp()
def testStr(self):
table_repr = '\n'.join((
'[',
' {',
' "foo": "a", ',
' "longer header": 3',
' }, ',
' {',
' "foo": "abc", ',
' "longer header": 123',
' }',
']'))
self.assertEquals(table_repr, str(self.formatter))
class JsonFormatterTest(TableFormatterTest):
def setUp(self):
self.format_class = table_formatter.JsonFormatter
super(JsonFormatterTest, self).setUp()
def testStr(self):
table_repr = ('[{"longer header":3,"foo":"a"},'
'{"longer header":123,"foo":"abc"}]')
self.assertEquals(table_repr, str(self.formatter))
class CsvFormatterTest(TableFormatterTest):
def setUp(self):
self.format_class = table_formatter.CsvFormatter
super(CsvFormatterTest, self).setUp()
def testStr(self):
table_repr = '\n'.join((
'foo,longer header',
'a,3',
'abc,123'))
self.assertEquals(table_repr, str(self.formatter))
class NullFormatterTest(TableFormatterTest):
def setUp(self):
self.format_class = table_formatter.NullFormatter
super(NullFormatterTest, self).setUp()
def testStr(self):
self.assertEquals('', str(self.formatter))
def testUnicodeRow(self):
self.assertEquals('', unicode(self.formatter))
if __name__ == '__main__':
googletest.main()
| Python |
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Table formatting library.
We define a TableFormatter interface, and create subclasses for
several different print formats, including formats intended for both
human and machine consumption:
Human Consumption
-----------------
PrettyFormatter: This prints ASCII-art bordered tables. Inspired
by the prettytable python library. Example:
+-----+---------------+
| foo | longer header |
+-----+---------------+
| a | 3 |
| ... |
| abc | 123 |
+-----+---------------+
SparsePrettyFormatter: This is a PrettyFormatter which simply
doesn't print most of the border. Example:
foo longer header
----- ---------------
a 3
...
abc 123
PrettyJsonFormatter: Prints JSON output in a format easily
read by a human. Example:
[
{
"foo": "a",
"longer header": 3
},
...
{
"foo": "abc",
"longer header": 123
}
]
Machine Consumption
-------------------
CsvFormatter: Prints output in CSV form, with minimal
quoting, '\n' separation between lines, and including
a header line. Example:
foo,longer header
a,3
...
abc,123
JsonFormatter: Prints JSON output in the most compact
form possible. Example:
[{"foo":"a","longer header":3},...,{"foo":"abc","longer header":123}]
Additional formatters can be added by subclassing TableFormatter and
overriding the following methods:
__len__, __unicode__, AddRow, column_names, AddColumn
"""
import cStringIO
import csv
import itertools
import json
import sys
class FormatterException(Exception):
pass
class TableFormatter(object):
"""Interface for table formatters."""
def __init__(self, **kwds):
"""Initializes the base class.
Keyword arguments:
skip_header_when_empty: If true, does not print the table's header
if there are zero rows. This argument has no effect on
PrettyJsonFormatter.
"""
if self.__class__ == TableFormatter:
raise NotImplementedError(
'Cannot instantiate abstract class TableFormatter')
self.skip_header_when_empty = kwds.get('skip_header_when_empty', False)
def __nonzero__(self):
return bool(len(self))
def __len__(self):
raise NotImplementedError('__len__ must be implemented by subclass')
def __str__(self):
return unicode(self).encode(sys.getdefaultencoding(), 'backslashreplace')
def __unicode__(self):
raise NotImplementedError('__unicode__ must be implemented by subclass')
def Print(self):
if self:
# TODO(user): Make encoding a customizable attribute on
# the TableFormatter.
encoding = sys.stdout.encoding or 'utf8'
print unicode(self).encode(encoding, 'backslashreplace')
def AddRow(self, row):
"""Add a new row (an iterable) to this formatter."""
raise NotImplementedError('AddRow must be implemented by subclass')
def AddRows(self, rows):
"""Add all rows to this table."""
for row in rows:
self.AddRow(row)
def AddField(self, field):
"""Add a field as a new column to this formatter."""
# TODO(user): Excise this bigquery-specific method.
align = 'l' if field.get('type', []) == 'STRING' else 'r'
self.AddColumn(field['name'], align=align)
def AddFields(self, fields):
"""Convenience method to add a list of fields."""
for field in fields:
self.AddField(field)
def AddDict(self, d):
"""Add a dict as a row by using column names as keys."""
self.AddRow([d.get(name, '') for name in self.column_names])
@property
def column_names(self):
"""Return the ordered list of column names in self."""
raise NotImplementedError('column_names must be implemented by subclass')
def AddColumn(self, column_name, align='r', **kwds):
"""Add a new column to this formatter."""
raise NotImplementedError('AddColumn must be implemented by subclass')
def AddColumns(self, column_names, kwdss=None):
"""Add a series of columns to this formatter."""
kwdss = kwdss or [{}] * len(column_names)
for column_name, kwds in zip(column_names, kwdss):
self.AddColumn(column_name, **kwds)
class PrettyFormatter(TableFormatter):
"""Formats output as an ASCII-art table with borders."""
def __init__(self, **kwds):
"""Initialize a new PrettyFormatter.
Keyword arguments:
junction_char: (default: +) Character to use for table junctions.
horizontal_char: (default: -) Character to use for horizontal lines.
vertical_char: (default: |) Character to use for vertical lines.
"""
super(PrettyFormatter, self).__init__(**kwds)
self.junction_char = kwds.get('junction_char', '+')
self.horizontal_char = kwds.get('horizontal_char', '-')
self.vertical_char = kwds.get('vertical_char', '|')
self.rows = []
self.row_heights = []
self._column_names = []
self.column_widths = []
self.column_alignments = []
self.header_height = 1
def __len__(self):
return len(self.rows)
def __unicode__(self):
if self or not self.skip_header_when_empty:
lines = itertools.chain(
self.FormatHeader(), self.FormatRows(), self.FormatHrule())
else:
lines = []
return '\n'.join(lines)
@staticmethod
def CenteredPadding(interval, size, left_justify=True):
"""Compute information for centering a string in a fixed space.
Given two integers interval and size, with size <= interval, this
function computes two integers left_padding and right_padding with
left_padding + right_padding + size = interval
and
|left_padding - right_padding| <= 1.
In the case that interval and size have different parity,
left_padding will be larger iff left_justify is True. (That is,
iff the string should be left justified in the "center" space.)
Args:
interval: Size of the fixed space.
size: Size of the string to center in that space.
left_justify: (optional, default: True) Whether the string
should be left-justified in the center space.
Returns:
left_padding, right_padding: The size of the left and right
margins for centering the string.
Raises:
FormatterException: If size > interval.
"""
if size > interval:
raise FormatterException('Illegal state in table formatting')
same_parity = (interval % 2) == (size % 2)
padding = (interval - size) / 2
if same_parity:
return padding, padding
elif left_justify:
return padding, padding + 1
else:
return padding + 1, padding
@staticmethod
def Abbreviate(s, width):
"""Abbreviate a string to at most width characters."""
suffix = '.' * min(width, 3)
return s if len(s) <= width else s[:width - len(suffix)] + suffix
@staticmethod
def FormatCell(entry, cell_width, cell_height=1, align='c', valign='t'):
"""Format an entry into a list of strings for a fixed cell size.
Given a (possibly multi-line) entry and a cell height and width,
we split the entry into a list of lines and format each one into
the given width and alignment. We then pad the list with
additional blank lines of the appropriate width so that the
resulting list has exactly cell_height entries. Each entry
is also padded with one space on either side.
We abbreviate strings for width, but we require that the
number of lines in entry is at most cell_height.
Args:
entry: String to format, which may have newlines.
cell_width: Maximum width for lines in the cell.
cell_height: Number of lines in the cell.
align: Alignment to use for lines of text.
valign: Vertical alignment in the cell. One of 't',
'c', or 'b' (top, center, and bottom, respectively).
Returns:
An iterator yielding exactly cell_height lines, each of
exact width cell_width + 2, corresponding to this cell.
Raises:
FormatterException: If there are too many lines in entry.
ValueError: If the valign is invalid.
"""
entry_lines = [PrettyFormatter.Abbreviate(line, cell_width)
for line in entry.split('\n')]
if len(entry_lines) > cell_height:
raise FormatterException('Too many lines (%s) for a cell of size %s' % (
len(entry_lines), cell_height))
if valign == 't':
top_lines = []
bottom_lines = itertools.repeat(' ' * (cell_width + 2),
cell_height - len(entry_lines))
elif valign == 'c':
top_padding, bottom_padding = PrettyFormatter.CenteredPadding(
cell_height, len(entry_lines))
top_lines = itertools.repeat(' ' * (cell_width + 2), top_padding)
bottom_lines = itertools.repeat(' ' * (cell_width + 2), bottom_padding)
elif valign == 'b':
bottom_lines = []
top_lines = itertools.repeat(' ' * (cell_width + 2),
cell_height - len(entry_lines))
else:
raise ValueError('Unknown value for valign: %s' % (valign,))
content_lines = []
for line in entry_lines:
if align == 'c':
left_padding, right_padding = PrettyFormatter.CenteredPadding(
cell_width, len(line))
content_lines.append(' %s%s%s ' % (
' ' * left_padding, line, ' ' * right_padding))
elif align in ('l', 'r'):
fmt = ' %*s ' if align == 'r' else ' %-*s '
content_lines.append(fmt % (cell_width, line))
else:
raise FormatterException('Unknown alignment: %s' % (align,))
return itertools.chain(top_lines, content_lines, bottom_lines)
def FormatRow(self, entries, row_height,
column_alignments=None, column_widths=None):
"""Format a row into a list of strings.
Given a list of entries, which must be the same length as the
number of columns in this table, and a desired row height, we
generate a list of strings corresponding to the printed
representation of that row.
Args:
entries: List of entries to format.
row_height: Number of printed lines corresponding to this row.
column_alignments: (optional, default self.column_alignments)
The alignment to use for each column.
column_widths: (optional, default self.column_widths) The widths
of each column.
Returns:
An iterator over the strings in the printed representation
of this row.
"""
column_alignments = column_alignments or self.column_alignments
column_widths = column_widths or self.column_widths
# pylint: disable=g-long-lambda
curried_format = lambda entry, width, align: self.__class__.FormatCell(
unicode(entry), width, cell_height=row_height, align=align)
printed_rows = itertools.izip(*itertools.imap(
curried_format, entries, column_widths, column_alignments))
return (self.vertical_char.join(itertools.chain([''], cells, ['']))
for cells in printed_rows)
def HeaderLines(self):
"""Return an iterator over the row(s) for the column names."""
aligns = itertools.repeat('c')
return self.FormatRow(self.column_names, self.header_height,
column_alignments=aligns)
def FormatHrule(self):
"""Return a list containing an hrule for this table."""
entries = (''.join(itertools.repeat('-', width + 2))
for width in self.column_widths)
return [self.junction_char.join(itertools.chain([''], entries, ['']))]
def FormatHeader(self):
"""Return an iterator over the lines for the header of this table."""
return itertools.chain(
self.FormatHrule(), self.HeaderLines(), self.FormatHrule())
def FormatRows(self):
"""Return an iterator over all the rows in this table."""
return itertools.chain(*itertools.imap(
self.FormatRow, self.rows, self.row_heights))
def AddRow(self, row):
"""Add a row to this table.
Args:
row: A list of length equal to the number of columns in this table.
Raises:
FormatterException: If the row length is invalid.
"""
if len(row) != len(self.column_names):
raise FormatterException('Invalid row length: %s' % (len(row),))
split_rows = [unicode(entry).split('\n') for entry in row]
self.row_heights.append(max(len(lines) for lines in split_rows))
column_widths = (max(len(line) for line in entry) for entry in split_rows)
self.column_widths = [max(width, current) for width, current
in itertools.izip(column_widths, self.column_widths)]
self.rows.append(row)
def AddColumn(self, column_name, align='l', **kwds):
"""Add a column to this table.
Args:
column_name: Name for the new column.
align: (optional, default: 'l') Alignment for the new column entries.
Raises:
FormatterException: If the table already has any rows, or if the
provided alignment is invalid.
"""
if self:
raise FormatterException(
'Cannot add a new column to an initialized table')
if align not in ('l', 'c', 'r'):
raise FormatterException('Invalid column alignment: %s' % (align,))
lines = column_name.split('\n')
self.column_widths.append(max(len(line) for line in lines))
self.column_alignments.append(align)
self.column_names.append(column_name)
self.header_height = max(len(lines), self.header_height)
@property
def column_names(self):
return self._column_names
class SparsePrettyFormatter(PrettyFormatter):
"""Formats output as a table with a header and separator line."""
def __init__(self, **kwds):
"""Initialize a new SparsePrettyFormatter."""
default_kwds = {'junction_char': ' ',
'vertical_char': ' '}
default_kwds.update(kwds)
super(SparsePrettyFormatter, self).__init__(**default_kwds)
def __unicode__(self):
if self or not self.skip_header_when_empty:
lines = itertools.chain(self.FormatHeader(), self.FormatRows())
else:
lines = []
return '\n'.join(lines)
def FormatHeader(self):
"""Return an iterator over the header lines for this table."""
return itertools.chain(self.HeaderLines(), self.FormatHrule())
class CsvFormatter(TableFormatter):
"""Formats output as CSV with header lines.
The resulting CSV file includes a header line, uses Unix-style
newlines, and only quotes those entries which require it, namely
those that contain quotes, newlines, or commas.
"""
def __init__(self, **kwds):
super(CsvFormatter, self).__init__(**kwds)
self._buffer = cStringIO.StringIO()
self._header = []
self._table = csv.writer(
self._buffer, quoting=csv.QUOTE_MINIMAL, lineterminator='\n')
def __nonzero__(self):
return bool(self._buffer.tell())
def __len__(self):
return len(unicode(self).splitlines())
def __unicode__(self):
if self or not self.skip_header_when_empty:
lines = [','.join(self._header), self._buffer.getvalue()]
else:
lines = []
# Note that we need to explicitly decode here to work around
# the fact that the CSV module does not work with unicode.
return '\n'.join(line.decode('utf8') for line in lines).rstrip()
@property
def column_names(self):
return self._header[:]
def AddColumn(self, column_name, **kwds):
if self:
raise FormatterException(
'Cannot add a new column to an initialized table')
self._header.append(column_name)
def AddRow(self, row):
self._table.writerow([unicode(entry).encode('utf8', 'backslashreplace')
for entry in row])
class JsonFormatter(TableFormatter):
"""Formats output in maximally compact JSON."""
def __init__(self, **kwds):
super(JsonFormatter, self).__init__(**kwds)
self._field_names = []
self._table = []
def __len__(self):
return len(self._table)
def __unicode__(self):
return json.dumps(self._table, separators=(',', ':'), ensure_ascii=False)
@property
def column_names(self):
return self._field_names[:]
def AddColumn(self, column_name, **kwds):
if self:
raise FormatterException(
'Cannot add a new column to an initialized table')
self._field_names.append(column_name)
def AddRow(self, row):
if len(row) != len(self._field_names):
raise FormatterException('Invalid row: %s' % (row,))
self._table.append(dict(zip(self._field_names, row)))
class PrettyJsonFormatter(JsonFormatter):
"""Formats output in human-legible JSON."""
def __unicode__(self):
return json.dumps(self._table, sort_keys=True, indent=2, ensure_ascii=False)
class NullFormatter(TableFormatter):
"""Formatter that prints no output at all."""
def __init__(self, **kwds):
super(NullFormatter, self).__init__(**kwds)
self._column_names = []
self._rows = []
def __nonzero__(self):
return bool(self._rows)
def __len__(self):
return len(self._rows)
def __unicode__(self):
return ''
def AddRow(self, row):
self._rows.append(row)
def AddRows(self, rows):
for row in rows:
self.AddRow(row)
@property
def column_names(self):
return self._column_names[:]
def AddColumn(self, column_name, **kwds):
self._column_names.append(column_name)
def GetFormatter(table_format):
"""Map a format name to a TableFormatter object."""
if table_format == 'csv':
table_formatter = CsvFormatter()
elif table_format == 'pretty':
table_formatter = PrettyFormatter()
elif table_format == 'json':
table_formatter = JsonFormatter()
elif table_format == 'prettyjson':
table_formatter = PrettyJsonFormatter()
elif table_format == 'sparse':
table_formatter = SparsePrettyFormatter()
elif table_format == 'none':
table_formatter = NullFormatter()
else:
raise FormatterException('Unknown format: %s' % table_format)
return table_formatter
| Python |
#!/usr/bin/env python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration."""
import platform
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup # pylint: disable=g-import-not-at-top
# Configure the required packages and scripts to install, depending on
# Python version and OS.
REQUIRED_PACKAGES = [
'google-apputils',
'python-gflags',
'google-api-python-client==1.2',
'oauth2client==1.2',
'httplib2',
]
CONSOLE_SCRIPTS = [
'bq = bq:run_main',
]
if platform.system() == 'Windows':
REQUIRED_PACKAGES.append('pyreadline')
py_version = platform.python_version()
if py_version < '2.6.5' or py_version >= '3':
raise ValueError('BigQuery requires Python >= 2.6.5.')
_BQ_VERSION = '2.0.17'
setup(name='bigquery',
version=_BQ_VERSION,
description='BigQuery command-line tool',
url='http://code.google.com/p/google-bigquery-tools/',
author='Google Inc.',
author_email='bigquery-team@google.com',
# Contained modules and scripts.
py_modules=[
'bq',
'bigquery_client',
'table_formatter',
],
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
install_requires=REQUIRED_PACKAGES,
provides=[
'bigquery (%s)' % (_BQ_VERSION,),
],
# Information for packaging of the discovery document.
include_package_data=True,
packages=['discovery'],
package_data={
'discovery': ['*'],
},
# PyPI package information.
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='google bigquery library',
)
| Python |
#!/usr/bin/env python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c11"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
except pkg_resources.DistributionNotFound:
pass
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| Python |
#!/usr/bin/env python
#
# Copyright 2012 Google Inc. All Rights Reserved.
"""Python script for interacting with BigQuery."""
import cmd
import codecs
import datetime
import httplib
import json
import os
import pdb
import pipes
import platform
import shlex
import sys
import time
import traceback
import types
import apiclient
import httplib2
import oauth2client
import oauth2client.client
import oauth2client.file
import oauth2client.gce
import oauth2client.tools
from google.apputils import app
from google.apputils import appcommands
import gflags as flags
import table_formatter
import bigquery_client
flags.DEFINE_string(
'apilog', None,
'Turn on logging of all server requests and responses. If no string is '
'provided, log to stdout; if a string is provided, instead log to that '
'file.')
flags.DEFINE_string(
'api',
'https://www.googleapis.com',
'API endpoint to talk to.')
flags.DEFINE_string(
'api_version', 'v2',
'API version to use.')
flags.DEFINE_boolean(
'debug_mode', False,
'Show tracebacks on Python exceptions.')
flags.DEFINE_string(
'trace', None,
'A tracing token of the form "trace:<traceid>" '
'to include in api requests.')
flags.DEFINE_string(
'bigqueryrc', os.path.join(os.path.expanduser('~'), '.bigqueryrc'),
'Path to configuration file. The configuration file specifies '
'new defaults for any flags, and can be overrridden by specifying the '
'flag on the command line. If the --bigqueryrc flag is not specified, the '
'BIGQUERYRC environment variable is used. If that is not specified, the '
'path "~/.bigqueryrc" is used.')
flags.DEFINE_string(
'credential_file', os.path.join(os.path.expanduser('~'),
'.bigquery.v2.token'),
'Filename used for storing the BigQuery OAuth token.')
flags.DEFINE_string(
'discovery_file', '',
'Filename for JSON document to read for discovery.')
flags.DEFINE_boolean(
'synchronous_mode', True,
'If True, wait for command completion before returning, and use the '
'job completion status for error codes. If False, simply create the '
'job, and use the success of job creation as the error code.',
short_name='sync')
flags.DEFINE_string(
'project_id', '',
'Default project to use for requests.')
flags.DEFINE_string(
'dataset_id', '',
'Default dataset to use for requests. (Ignored when not applicable.)')
# This flag is "hidden" at the global scope to avoid polluting help
# text on individual commands for rarely used functionality.
flags.DEFINE_string(
'job_id', None,
'A unique job_id to use for the request. If not specified, this client '
'will generate a job_id. Applies only to commands that launch jobs, '
'such as cp, extract, link, load, and query.')
flags.DEFINE_boolean(
'fingerprint_job_id', False,
'Whether to use a job id that is derived from a fingerprint of the job '
'configuration. This will prevent the same job from running multiple times '
'accidentally.')
flags.DEFINE_boolean(
'quiet', False,
'If True, ignore status updates while jobs are running.',
short_name='q')
flags.DEFINE_boolean(
'headless',
False,
'Whether this bq session is running without user interaction. This '
'affects behavior that expects user interaction, like whether '
'debug_mode will break into the debugger and lowers the frequency '
'of informational printing.')
flags.DEFINE_enum(
'format', None,
['none', 'json', 'prettyjson', 'csv', 'sparse', 'pretty'],
'Format for command output. Options include:'
'\n pretty: formatted table output'
'\n sparse: simpler table output'
'\n prettyjson: easy-to-read JSON format'
'\n json: maximally compact JSON'
'\n csv: csv format with header'
'\nThe first three are intended to be human-readable, and the latter '
'three are for passing to another program. If no format is selected, '
'one will be chosen based on the command run.')
flags.DEFINE_multistring(
'job_property', None,
'Additional key-value pairs to include in the properties field of '
'the job configuration') # No period: Multistring adds flagspec suffix.
flags.DEFINE_boolean(
'use_gce_service_account', False,
'Use this when running on a Google Compute Engine instance to use service '
'account credentials instead of stored credentials. For more information, '
'see: https://developers.google.com/compute/docs/authentication')
flags.DEFINE_string(
'service_account', '',
'Use this service account email address for authorization. '
'For example, 1234567890@developer.gserviceaccount.com.'
)
flags.DEFINE_string(
'service_account_private_key_file', '',
'Filename that contains the service account private key. '
'Required if --service_account is specified.')
flags.DEFINE_string(
'service_account_private_key_password', 'notasecret',
'Password for private key. This password must match the password '
'you set on the key when you created it in the Google APIs Console. '
'Defaults to the default Google APIs Console private key password.')
flags.DEFINE_string(
'service_account_credential_file', None,
'File to be used as a credential store for service accounts. '
'Must be set if using a service account.')
flags.DEFINE_integer(
'max_rows_per_request', None,
'Specifies the max number of rows to return per read.')
FLAGS = flags.FLAGS
# These are long names.
# pylint: disable=g-bad-name
JobReference = bigquery_client.ApiClientHelper.JobReference
ProjectReference = bigquery_client.ApiClientHelper.ProjectReference
DatasetReference = bigquery_client.ApiClientHelper.DatasetReference
TableReference = bigquery_client.ApiClientHelper.TableReference
BigqueryClient = bigquery_client.BigqueryClient
JobIdGeneratorIncrementing = bigquery_client.JobIdGeneratorIncrementing
JobIdGeneratorRandom = bigquery_client.JobIdGeneratorRandom
JobIdGeneratorFingerprint = bigquery_client.JobIdGeneratorFingerprint
# pylint: enable=g-bad-name
def _VersionNumberReleased():
"""Return the released version of bq."""
try:
import pkg_resources # pylint: disable=g-import-not-at-top
version = pkg_resources.get_distribution('bigquery').version
return 'v%s' % (version,)
except ImportError:
return '<unknown>'
def _VersionNumber():
"""Return the version of bq."""
return _VersionNumberReleased()
_CLIENT_USER_AGENT = 'bq/' + _VersionNumber()
_CLIENT_SCOPE = [
'https://www.googleapis.com/auth/bigquery',
]
_CLIENT_ID = '977385342095.apps.googleusercontent.com'
_CLIENT_INFO = {
'client_id': _CLIENT_ID,
'client_secret': 'wbER7576mc_1YOII0dGk7jEE',
'scope': _CLIENT_SCOPE,
'user_agent': _CLIENT_USER_AGENT,
}
_BIGQUERY_TOS_MESSAGE = (
'In order to get started, please visit the Google APIs Console to '
'create a project and agree to our Terms of Service:\n'
'\thttp://code.google.com/apis/console\n\n'
'For detailed sign-up instructions, please see our Getting Started '
'Guide:\n'
'\thttps://developers.google.com/bigquery/docs/getting-started\n\n'
'Once you have completed the sign-up process, please try your command '
'again.')
_DELIMITER_MAP = {
'tab': '\t',
'\\t': '\t',
}
# These aren't relevant for user-facing docstrings:
# pylint: disable=g-doc-return-or-yield
# pylint: disable=g-doc-args
# TODO(user): Write some explanation of the structure of this file.
####################
# flags processing
####################
def _ValidateGlobalFlags():
"""Validate combinations of global flag values."""
if FLAGS.service_account and FLAGS.use_gce_service_account:
raise app.UsageError(
'Cannot specify both --service_account and --use_gce_service_account.')
def _GetBigqueryRcFilename():
"""Return the name of the bigqueryrc file to use.
In order, we look for a flag the user specified, an environment
variable, and finally the default value for the flag.
Returns:
bigqueryrc filename as a string.
"""
return ((FLAGS['bigqueryrc'].present and FLAGS.bigqueryrc) or
os.environ.get('BIGQUERYRC') or
FLAGS.bigqueryrc)
def _ProcessBigqueryrc():
"""Updates FLAGS with values found in the bigqueryrc file."""
bigqueryrc = _GetBigqueryRcFilename()
if not os.path.exists(bigqueryrc):
return
with open(bigqueryrc) as rcfile:
for line in rcfile:
if line.lstrip().startswith('#') or not line.strip():
continue
elif line.lstrip().startswith('['):
# TODO(user): Support command-specific flag sections.
continue
flag, equalsign, value = line.partition('=')
# if no value given, assume stringified boolean true
if not equalsign:
value = 'true'
flag = flag.strip()
value = value.strip()
while flag.startswith('-'):
flag = flag[1:]
# We want flags specified at the command line to override
# those in the flagfile.
if flag not in FLAGS:
raise app.UsageError(
'Unknown flag %s found in bigqueryrc file' % (flag,))
if not FLAGS[flag].present:
FLAGS[flag].Parse(value)
elif FLAGS[flag].Type().startswith('multi'):
old_value = getattr(FLAGS, flag)
FLAGS[flag].Parse(value)
setattr(FLAGS, flag, old_value + getattr(FLAGS, flag))
def _ResolveApiInfoFromFlags():
"""Determine an api and api_version."""
api_version = FLAGS.api_version
api = FLAGS.api
return {'api': api, 'api_version': api_version}
def _UseServiceAccount():
return bool(FLAGS.use_gce_service_account or FLAGS.service_account)
def _GetServiceAccountCredentialsFromFlags(storage): # pylint: disable=unused-argument
if FLAGS.use_gce_service_account:
return oauth2client.gce.AppAssertionCredentials(_CLIENT_SCOPE)
if not oauth2client.client.HAS_OPENSSL:
raise app.UsageError(
'BigQuery requires OpenSSL to be installed in order to use '
'service account credentials. Please install OpenSSL '
'and the Python OpenSSL package.')
if FLAGS.service_account_private_key_file:
try:
with file(FLAGS.service_account_private_key_file, 'rb') as f:
key = f.read()
except IOError as e:
raise app.UsageError(
'Service account specified, but private key in file "%s" '
'cannot be read:\n%s' % (FLAGS.service_account_private_key_file, e))
else:
raise app.UsageError(
'Service account authorization requires the '
'service_account_private_key_file flag to be set.')
return oauth2client.client.SignedJwtAssertionCredentials(
FLAGS.service_account, key, _CLIENT_SCOPE,
private_key_password=FLAGS.service_account_private_key_password,
user_agent=_CLIENT_USER_AGENT)
def _GetCredentialsFromOAuthFlow(storage):
print
print '******************************************************************'
print '** No OAuth2 credentials found, beginning authorization process **'
print '******************************************************************'
print
if FLAGS.headless:
print 'Running in headless mode, exiting.'
sys.exit(1)
while True:
# If authorization fails, we want to retry, rather than let this
# cascade up and get caught elsewhere. If users want out of the
# retry loop, they can ^C.
try:
flow = oauth2client.client.OAuth2WebServerFlow(**_CLIENT_INFO)
credentials = oauth2client.tools.run(flow, storage)
break
except (oauth2client.client.FlowExchangeError, SystemExit), e:
# Here SystemExit is "no credential at all", and the
# FlowExchangeError is "invalid" -- usually because you reused
# a token.
print 'Invalid authorization: %s' % (e,)
print
except httplib2.HttpLib2Error as e:
print 'Error communicating with server. Please check your internet '
print 'connection and try again.'
print
print 'Error is: %s' % (e,)
sys.exit(1)
print
print '************************************************'
print '** Continuing execution of BigQuery operation **'
print '************************************************'
print
return credentials
def _GetCredentialsFromFlags():
# In the case of a GCE service account, we can skip the entire
# process of loading from storage.
if FLAGS.use_gce_service_account:
return _GetServiceAccountCredentialsFromFlags(None)
if FLAGS.service_account:
credentials_getter = _GetServiceAccountCredentialsFromFlags
credential_file = FLAGS.service_account_credential_file
if not credential_file:
raise app.UsageError(
'The flag --service_account_credential_file must be specified '
'if --service_account is used.')
else:
credentials_getter = _GetCredentialsFromOAuthFlow
credential_file = FLAGS.credential_file
try:
# Note that oauth2client.file ensures the file is created with
# the correct permissions.
storage = oauth2client.file.Storage(credential_file)
except OSError, e:
raise bigquery_client.BigqueryError(
'Cannot create credential file %s: %s' % (FLAGS.credential_file, e))
try:
credentials = storage.get()
except BaseException, e:
BigqueryCmd.ProcessError(
e, name='GetCredentialsFromFlags',
message_prefix=(
'Credentials appear corrupt. Please delete the credential file '
'and try your command again. You can delete your credential '
'file using "bq init --delete_credentials".\n\nIf that does '
'not work, you may have encountered a bug in the BigQuery CLI.'))
sys.exit(1)
if credentials is None or credentials.invalid:
credentials = credentials_getter(storage)
credentials.set_store(storage)
return credentials
def _GetFormatterFromFlags(secondary_format='sparse'):
if FLAGS['format'].present:
return table_formatter.GetFormatter(FLAGS.format)
else:
return table_formatter.GetFormatter(secondary_format)
def _ExpandForPrinting(fields, rows, formatter):
"""Expand entries that require special bq-specific formatting."""
def NormalizeTimestamp(entry):
try:
date = datetime.datetime.utcfromtimestamp(float(entry))
return date.strftime('%Y-%m-%d %H:%M:%S')
except ValueError:
return '<date out of range for display>'
column_normalizers = {}
for i, field in enumerate(fields):
if field['type'].upper() == 'TIMESTAMP':
column_normalizers[i] = NormalizeTimestamp
def NormalizeNone():
if isinstance(formatter, table_formatter.JsonFormatter):
return None
elif isinstance(formatter, table_formatter.CsvFormatter):
return ''
else:
return 'NULL'
def NormalizeEntry(i, entry):
if entry is None:
return NormalizeNone()
elif i in column_normalizers:
return column_normalizers[i](entry)
return entry
return ([NormalizeEntry(i, e) for i, e in enumerate(row)] for row in rows)
def _PrintDryRunInfo(job):
num_bytes = job['statistics']['query']['totalBytesProcessed']
if FLAGS.format in ['prettyjson', 'json']:
_PrintFormattedJsonObject(job)
elif FLAGS.format == 'csv':
print num_bytes
else:
print (
'Query successfully validated. Assuming the tables are not modified, '
'running this query will process %s bytes of data.' % (num_bytes,))
def _PrintFormattedJsonObject(obj):
if FLAGS.format == 'prettyjson':
print json.dumps(obj, sort_keys=True, indent=2)
else:
print json.dumps(obj, separators=(',', ':'))
def _GetJobIdFromFlags():
"""Returns the job id or job generator from the flags."""
if FLAGS.fingerprint_job_id and FLAGS.job_id:
raise app.UsageError(
'The fingerprint_job_id flag cannot be specified with the job_id '
'flag.')
if FLAGS.fingerprint_job_id:
return JobIdGeneratorFingerprint()
elif FLAGS.job_id is None:
return JobIdGeneratorIncrementing(JobIdGeneratorRandom())
elif FLAGS.job_id:
return FLAGS.job_id
else:
# User specified a job id, but it was empty. Let the
# server come up with a job id.
return None
def _GetWaitPrinterFactoryFromFlags():
"""Returns the default wait_printer_factory to use while waiting for jobs."""
if FLAGS.quiet:
return BigqueryClient.QuietWaitPrinter
if FLAGS.headless:
return BigqueryClient.TransitionWaitPrinter
return BigqueryClient.VerboseWaitPrinter
def _PromptWithDefault(message):
"""Prompts user with message, return key pressed or '' on enter."""
if FLAGS.headless:
print 'Running --headless, accepting default for prompt: %s' % (message,)
return ''
return raw_input(message).lower()
def _PromptYN(message):
"""Prompts user with message, returning the key 'y', 'n', or '' on enter."""
response = None
while response not in ['y', 'n', '']:
response = _PromptWithDefault(message)
return response
def _NormalizeFieldDelimiter(field_delimiter):
"""Validates and returns the correct field_delimiter."""
# The only non-string delimiter we allow is None, which represents
# no field delimiter specified by the user.
if field_delimiter is None:
return field_delimiter
try:
# We check the field delimiter flag specifically, since a
# mis-entered Thorn character generates a difficult to
# understand error during request serialization time.
_ = field_delimiter.decode(sys.stdin.encoding or 'utf8')
except UnicodeDecodeError:
raise app.UsageError(
'The field delimiter flag is not valid. Flags must be '
'specified in your default locale. For example, '
'the Latin 1 representation of Thorn is byte code FE, '
'which in the UTF-8 locale would be expressed as C3 BE.')
# Allow TAB and \\t substitution.
key = field_delimiter.lower()
return _DELIMITER_MAP.get(key, field_delimiter)
class TablePrinter(object):
"""Base class for printing a table, with a default implementation."""
def __init__(self, **kwds):
super(TablePrinter, self).__init__()
# Most extended classes will require state.
for key, value in kwds.iteritems():
setattr(self, key, value)
def PrintTable(self, fields, rows):
formatter = _GetFormatterFromFlags(secondary_format='pretty')
formatter.AddFields(fields)
rows = _ExpandForPrinting(fields, rows, formatter)
formatter.AddRows(rows)
formatter.Print()
class Factory(object):
"""Class encapsulating factory creation of BigqueryClient."""
_BIGQUERY_CLIENT_FACTORY = None
class ClientTablePrinter(object):
_TABLE_PRINTER = None
@classmethod
def GetTablePrinter(cls):
if cls._TABLE_PRINTER is None:
cls._TABLE_PRINTER = TablePrinter()
return cls._TABLE_PRINTER
@classmethod
def SetTablePrinter(cls, printer):
if not isinstance(printer, TablePrinter):
raise TypeError('Printer must be an instance of TablePrinter.')
cls._TABLE_PRINTER = printer
@classmethod
def GetBigqueryClientFactory(cls):
if cls._BIGQUERY_CLIENT_FACTORY is None:
cls._BIGQUERY_CLIENT_FACTORY = bigquery_client.BigqueryClient
return cls._BIGQUERY_CLIENT_FACTORY
@classmethod
def SetBigqueryClientFactory(cls, factory):
if not issubclass(factory, bigquery_client.BigqueryClient):
raise TypeError('Factory must be subclass of BigqueryClient.')
cls._BIGQUERY_CLIENT_FACTORY = factory
class Client(object):
"""Class wrapping a singleton bigquery_client.BigqueryClient."""
client = None
@staticmethod
def Create(**kwds):
"""Build a new BigqueryClient configured from kwds and FLAGS."""
def KwdsOrFlags(name):
return kwds[name] if name in kwds else getattr(FLAGS, name)
# Note that we need to handle possible initialization tasks
# for the case of being loaded as a library.
_ProcessBigqueryrc()
bigquery_client.ConfigurePythonLogger(FLAGS.apilog)
credentials = _GetCredentialsFromFlags()
assert credentials is not None
client_args = {}
global_args = ('credential_file', 'job_property',
'project_id', 'dataset_id', 'trace', 'sync',
'api', 'api_version')
for name in global_args:
client_args[name] = KwdsOrFlags(name)
client_args['wait_printer_factory'] = _GetWaitPrinterFactoryFromFlags()
if FLAGS.discovery_file:
with open(FLAGS.discovery_file) as f:
client_args['discovery_document'] = f.read()
bigquery_client_factory = Factory.GetBigqueryClientFactory()
return bigquery_client_factory(credentials=credentials, **client_args)
@classmethod
def Get(cls):
"""Return a BigqueryClient initialized from flags."""
if cls.client is None:
try:
cls.client = Client.Create()
except ValueError, e:
# Convert constructor parameter errors into flag usage errors.
raise app.UsageError(e)
return cls.client
@classmethod
def Delete(cls):
"""Delete the existing client.
This is needed when flags have changed, and we need to force
client recreation to reflect new flag values.
"""
cls.client = None
def _Typecheck(obj, types, message=None): # pylint: disable=redefined-outer-name
"""Raises a user error if obj is not an instance of types."""
if not isinstance(obj, types):
message = message or 'Type of %s is not one of %s' % (obj, types)
raise app.UsageError(message)
# TODO(user): This code uses more than the average amount of
# Python magic. Explain what the heck is going on throughout.
class NewCmd(appcommands.Cmd):
"""Featureful extension of appcommands.Cmd."""
def __init__(self, name, flag_values):
super(NewCmd, self).__init__(name, flag_values)
run_with_args = getattr(self, 'RunWithArgs', None)
self._new_style = isinstance(run_with_args, types.MethodType)
if self._new_style:
func = run_with_args.im_func
code = func.func_code # pylint: disable=redefined-outer-name
self._full_arg_list = list(code.co_varnames[:code.co_argcount])
# TODO(user): There might be some corner case where this
# is *not* the right way to determine bound vs. unbound method.
if isinstance(run_with_args.im_self, run_with_args.im_class):
self._full_arg_list.pop(0)
self._max_args = len(self._full_arg_list)
self._min_args = self._max_args - len(func.func_defaults or [])
self._star_args = bool(code.co_flags & 0x04)
self._star_kwds = bool(code.co_flags & 0x08)
if self._star_args:
self._max_args = sys.maxint
self._debug_mode = FLAGS.debug_mode
self.surface_in_shell = True
self.__doc__ = self.RunWithArgs.__doc__
elif self.Run.im_func is NewCmd.Run.im_func:
raise appcommands.AppCommandsError(
'Subclasses of NewCmd must override Run or RunWithArgs')
def __getattr__(self, name):
if name in self._command_flags:
return self._command_flags[name].value
return super(NewCmd, self).__getattribute__(name)
def _GetFlag(self, flagname):
if flagname in self._command_flags:
return self._command_flags[flagname]
else:
return None
def Run(self, argv):
"""Run this command.
If self is a new-style command, we set up arguments and call
self.RunWithArgs, gracefully handling exceptions. If not, we
simply call self.Run(argv).
Args:
argv: List of arguments as strings.
Returns:
0 on success, nonzero on failure.
"""
if not self._new_style:
return super(NewCmd, self).Run(argv)
original_values = self._command_flags.FlagValuesDict()
try:
args = self._command_flags(argv)[1:]
for flag, value in self._command_flags.FlagValuesDict().iteritems():
setattr(self, flag, value)
if value == original_values[flag]:
original_values.pop(flag)
new_args = []
for argname in self._full_arg_list[:self._min_args]:
flag = self._GetFlag(argname)
if flag is not None and flag.present:
new_args.append(flag.value)
elif args:
new_args.append(args.pop(0))
else:
print 'Not enough positional args, still looking for %s' % (argname,)
if self.usage:
print 'Usage: %s' % (self.usage,)
return 1
new_kwds = {}
for argname in self._full_arg_list[self._min_args:]:
flag = self._GetFlag(argname)
if flag is not None and flag.present:
new_kwds[argname] = flag.value
elif args:
new_kwds[argname] = args.pop(0)
if args and not self._star_args:
print 'Too many positional args, still have %s' % (args,)
return 1
new_args.extend(args)
if self._debug_mode:
return self.RunDebug(new_args, new_kwds)
else:
return self.RunSafely(new_args, new_kwds)
finally:
for flag, value in original_values.iteritems():
setattr(self, flag, value)
self._command_flags[flag].Parse(value)
def RunCmdLoop(self, argv):
"""Hook for use in cmd.Cmd-based command shells."""
try:
args = shlex.split(argv)
except ValueError, e:
raise SyntaxError(BigqueryCmd.EncodeForPrinting(e))
return self.Run([self._command_name] + args)
def _HandleError(self, e):
message = str(e)
if isinstance(e, bigquery_client.BigqueryClientConfigurationError):
message += ' Try running "bq init".'
print 'Exception raised in %s operation: %s' % (self._command_name, message)
return 1
def RunDebug(self, args, kwds):
"""Run this command in debug mode."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException, e:
# Don't break into the debugger for expected exceptions.
if isinstance(e, app.UsageError) or (
isinstance(e, bigquery_client.BigqueryError) and
not isinstance(e, bigquery_client.BigqueryInterfaceError)):
return self._HandleError(e)
print
print '****************************************************'
print '** Unexpected Exception raised in bq execution! **'
if FLAGS.headless:
print '** --headless mode enabled, exiting. **'
print '** See STDERR for traceback. **'
else:
print '** --debug_mode enabled, starting pdb. **'
print '****************************************************'
print
traceback.print_exc()
print
if not FLAGS.headless:
pdb.post_mortem()
return 1
return return_value
def RunSafely(self, args, kwds):
"""Run this command, turning exceptions into print statements."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException, e:
return self._HandleError(e)
return return_value
class BigqueryCmd(NewCmd):
"""Bigquery-specific NewCmd wrapper."""
def RunSafely(self, args, kwds):
"""Run this command, printing information about any exceptions raised."""
try:
return_value = self.RunWithArgs(*args, **kwds)
except BaseException, e:
return BigqueryCmd.ProcessError(e, name=self._command_name)
return return_value
@staticmethod
def EncodeForPrinting(s):
"""Safely encode a string as the encoding for sys.stdout."""
encoding = sys.stdout.encoding or 'ascii'
return unicode(s).encode(encoding, 'backslashreplace')
@staticmethod
def ProcessError(
e, name='unknown',
message_prefix='You have encountered a bug in the BigQuery CLI.'):
"""Translate an error message into some printing and a return code."""
response = []
retcode = 1
contact_us_msg = (
'Google engineers monitor and answer questions on Stack Overflow, with '
'the tag google-bigquery:\n'
' http://stackoverflow.com/questions/ask?tags=google-bigquery\n'
'Please include a brief description of the steps that led to this '
'issue, as well as the following information: \n\n')
error_details = (
'========================================\n'
'== Platform ==\n'
' %s\n'
'== bq version ==\n'
' %s\n'
'== Command line ==\n'
' %s\n'
'== UTC timestamp ==\n'
' %s\n'
'== Error trace ==\n'
'%s'
'========================================\n') % (
':'.join([
platform.python_implementation(),
platform.python_version(),
platform.platform()]),
_VersionNumber(),
sys.argv,
time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime()),
''.join(traceback.format_tb(sys.exc_info()[2]))
)
codecs.register_error('strict', codecs.replace_errors)
message = BigqueryCmd.EncodeForPrinting(e)
if isinstance(e, (bigquery_client.BigqueryNotFoundError,
bigquery_client.BigqueryDuplicateError)):
response.append('BigQuery error in %s operation: %s' % (name, message))
retcode = 2
elif isinstance(e, bigquery_client.BigqueryTermsOfServiceError):
response.append(str(e) + '\n')
response.append(_BIGQUERY_TOS_MESSAGE)
elif isinstance(e, bigquery_client.BigqueryInvalidQueryError):
response.append('Error in query string: %s' % (message,))
elif (isinstance(e, bigquery_client.BigqueryError)
and not isinstance(e, bigquery_client.BigqueryInterfaceError)):
response.append('BigQuery error in %s operation: %s' % (name, message))
elif isinstance(e, (app.UsageError, TypeError)):
response.append(message)
elif (isinstance(e, SyntaxError) or
isinstance(e, bigquery_client.BigquerySchemaError)):
response.append('Invalid input: %s' % (message,))
elif isinstance(e, flags.FlagsError):
response.append('Error parsing command: %s' % (message,))
elif isinstance(e, KeyboardInterrupt):
response.append('')
else: # pylint: disable=broad-except
# Errors with traceback information are printed here.
# The traceback module has nicely formatted the error trace
# for us, so we don't want to undo that via TextWrap.
if isinstance(e, bigquery_client.BigqueryInterfaceError):
message_prefix = (
'Bigquery service returned an invalid reply in %s operation: %s.'
'\n\n'
'Please make sure you are using the latest version '
'of the bq tool and try again. '
'If this problem persists, you may have encountered a bug in the '
'bigquery client.' % (name, message))
elif isinstance(e, oauth2client.client.Error):
message_prefix = (
'Authorization error. This may be a network connection problem, '
'so please try again. If this problem persists, the credentials '
'may be corrupt. Try deleting and re-creating your credentials. '
'You can delete your credentials using '
'"bq init --delete_credentials".'
'\n\n'
'If this problem still occurs, you may have encountered a bug '
'in the bigquery client.')
elif (isinstance(e, httplib.HTTPException)
or isinstance(e, apiclient.errors.Error)
or isinstance(e, httplib2.HttpLib2Error)):
message_prefix = (
'Network connection problem encountered, please try again.'
'\n\n'
'If this problem persists, you may have encountered a bug in the '
'bigquery client.')
print flags.TextWrap(message_prefix + ' ' + contact_us_msg)
print error_details
response.append('Unexpected exception in %s operation: %s' % (
name, message))
print flags.TextWrap('\n'.join(response))
return retcode
def PrintJobStartInfo(self, job):
"""Print a simple status line."""
reference = BigqueryClient.ConstructObjectReference(job)
print 'Successfully started %s %s' % (self._command_name, reference)
class _Load(BigqueryCmd):
usage = """load <destination_table> <source> <schema>"""
def __init__(self, name, fv):
super(_Load, self).__init__(name, fv)
flags.DEFINE_string(
'field_delimiter', None,
'The character that indicates the boundary between columns in the '
'input file. "\\t" and "tab" are accepted names for tab.',
short_name='F', flag_values=fv)
flags.DEFINE_enum(
'encoding', None,
['UTF-8', 'ISO-8859-1'],
'The character encoding used by the input file. Options include:'
'\n ISO-8859-1 (also known as Latin-1)'
'\n UTF-8',
short_name='E', flag_values=fv)
flags.DEFINE_integer(
'skip_leading_rows', None,
'The number of rows at the beginning of the source file to skip.',
flag_values=fv)
flags.DEFINE_string(
'schema', None,
'Either a filename or a comma-separated list of fields in the form '
'name[:type].',
flag_values=fv)
flags.DEFINE_boolean(
'replace', False,
'If true erase existing contents before loading new data.',
flag_values=fv)
flags.DEFINE_string(
'quote', None,
'Quote character to use to enclose records. Default is ". '
'To indicate no quote character at all, use an empty string.',
flag_values=fv)
flags.DEFINE_integer(
'max_bad_records', 0,
'Maximum number of bad records allowed before the entire job fails.',
flag_values=fv)
flags.DEFINE_boolean(
'allow_quoted_newlines', None,
'Whether to allow quoted newlines in CSV import data.',
flag_values=fv)
flags.DEFINE_boolean(
'allow_jagged_rows', None,
'Whether to allow missing trailing optional columns '
'in CSV import data.',
flag_values=fv)
flags.DEFINE_enum(
'source_format', None,
['CSV',
'NEWLINE_DELIMITED_JSON',
'DATASTORE_BACKUP'],
'Format of source data. Options include:'
'\n CSV'
'\n NEWLINE_DELIMITED_JSON'
'\n DATASTORE_BACKUP',
flag_values=fv)
def RunWithArgs(self, destination_table, source, schema=None):
"""Perform a load operation of source into destination_table.
Usage:
load <destination_table> <source> [<schema>]
The <destination_table> is the fully-qualified table name of table to
create, or append to if the table already exists.
The <source> argument can be a path to a single local file, or a
comma-separated list of URIs.
The <schema> argument should be either the name of a JSON file or a text
schema. This schema should be omitted if the table already has one.
In the case that the schema is provided in text form, it should be a
comma-separated list of entries of the form name[:type], where type will
default to string if not specified.
In the case that <schema> is a filename, it should contain a
single array object, each entry of which should be an object with
properties 'name', 'type', and (optionally) 'mode'. See the online
documentation for more detail:
https://code.google.com/apis/bigquery/docs/uploading.html#createtable
Note: the case of a single-entry schema with no type specified is
ambiguous; one can use name:string to force interpretation as a
text schema.
Examples:
bq load ds.new_tbl ./info.csv ./info_schema.json
bq load ds.new_tbl gs://mybucket/info.csv ./info_schema.json
bq load ds.small gs://mybucket/small.csv name:integer,value:string
bq load ds.small gs://mybucket/small.csv field1,field2,field3
Arguments:
destination_table: Destination table name.
source: Name of local file to import, or a comma-separated list of
URI paths to data to import.
schema: Either a text schema or JSON file, as above.
"""
client = Client.Get()
table_reference = client.GetTableReference(destination_table)
opts = {
'encoding': self.encoding,
'skip_leading_rows': self.skip_leading_rows,
'max_bad_records': self.max_bad_records,
'allow_quoted_newlines': self.allow_quoted_newlines,
'job_id': _GetJobIdFromFlags(),
'source_format': self.source_format,
}
if self.replace:
opts['write_disposition'] = 'WRITE_TRUNCATE'
if self.field_delimiter:
opts['field_delimiter'] = _NormalizeFieldDelimiter(self.field_delimiter)
if self.quote is not None:
opts['quote'] = _NormalizeFieldDelimiter(self.quote)
if self.allow_jagged_rows is not None:
opts['allow_jagged_rows'] = self.allow_jagged_rows
job = client.Load(table_reference, source, schema=schema, **opts)
if not FLAGS.sync:
self.PrintJobStartInfo(job)
class _Query(BigqueryCmd):
usage = """query <sql>"""
def __init__(self, name, fv):
super(_Query, self).__init__(name, fv)
flags.DEFINE_string(
'destination_table', '',
'Name of destination table for query results.',
flag_values=fv)
flags.DEFINE_integer(
'start_row', 0,
'First row to return in the result.',
short_name='s', flag_values=fv)
flags.DEFINE_integer(
'max_rows', 100,
'How many rows to return in the result.',
short_name='n', flag_values=fv)
flags.DEFINE_boolean(
'batch', False,
'Whether to run the query in batch mode.',
flag_values=fv)
flags.DEFINE_boolean(
'append_table', False,
'When a destination table is specified, whether or not to append.',
flag_values=fv)
flags.DEFINE_boolean(
'rpc', False,
'If true, use rpc-style query API instead of jobs.insert().',
flag_values=fv)
flags.DEFINE_boolean(
'replace', False,
'If true, erase existing contents before loading new data.',
flag_values=fv)
flags.DEFINE_boolean(
'allow_large_results', None,
'Enables larger destination table sizes.',
flag_values=fv)
flags.DEFINE_boolean(
'dry_run', None,
'Whether the query should be validated without executing.',
flag_values=fv)
flags.DEFINE_boolean(
'require_cache', None,
'Whether to only run the query if it is already cached.',
flag_values=fv)
flags.DEFINE_boolean(
'use_cache', None,
'Whether to use the query cache to avoid rerunning cached queries.',
flag_values=fv)
flags.DEFINE_float(
'min_completion_ratio', None,
'[Experimental] The minimum fraction of data that must be scanned '
'before a query returns. If not set, the default server value (1.0) '
'will be used.',
lower_bound=0, upper_bound=1.0,
flag_values=fv)
def RunWithArgs(self, *args):
"""Execute a query.
Examples:
bq query 'select count(*) from publicdata:samples.shakespeare'
Usage:
query <sql_query>
"""
# Set up the params that are the same for rpc-style and jobs.insert()-style
# queries.
kwds = {
'dry_run': self.dry_run,
'use_cache': self.use_cache,
'min_completion_ratio': self.min_completion_ratio,
}
query = ' '.join(args)
client = Client.Get()
if self.rpc:
if self.allow_large_results:
raise app.UsageError(
'allow_large_results cannot be specified in rpc mode.')
if self.destination_table:
raise app.UsageError(
'destination_table cannot be specified in rpc mode.')
if FLAGS.job_id or FLAGS.fingerprint_job_id:
raise app.UsageError(
'job_id and fingerprint_job_id cannot be specified in rpc mode.')
if self.batch:
raise app.UsageError(
'batch cannot be specified in rpc mode.')
kwds['max_results'] = self.max_rows
fields, rows = client.RunQueryRpc(query, **kwds)
Factory.ClientTablePrinter.GetTablePrinter().PrintTable(fields, rows)
else:
if self.destination_table and self.append_table:
kwds['write_disposition'] = 'WRITE_APPEND'
if self.destination_table and self.replace:
kwds['write_disposition'] = 'WRITE_TRUNCATE'
if self.require_cache:
kwds['create_disposition'] = 'CREATE_NEVER'
if self.batch:
kwds['priority'] = 'BATCH'
kwds['destination_table'] = self.destination_table
kwds['allow_large_results'] = self.allow_large_results
kwds['job_id'] = _GetJobIdFromFlags()
job = client.Query(query, **kwds)
if self.dry_run:
_PrintDryRunInfo(job)
elif not FLAGS.sync:
self.PrintJobStartInfo(job)
else:
fields, rows = client.ReadSchemaAndJobRows(job['jobReference'],
start_row=self.start_row,
max_rows=self.max_rows)
Factory.ClientTablePrinter.GetTablePrinter().PrintTable(fields, rows)
class _Extract(BigqueryCmd):
usage = """extract <source_table> <destination_uri>"""
def __init__(self, name, fv):
super(_Extract, self).__init__(name, fv)
flags.DEFINE_string(
'field_delimiter', None,
'The character that indicates the boundary between columns in the '
'output file. "\\t" and "tab" are accepted names for tab.',
short_name='F', flag_values=fv)
flags.DEFINE_enum(
'destination_format', None,
['CSV', 'NEWLINE_DELIMITED_JSON'],
'The format with which to write the extracted data. Tables with '
'nested or repeated fields cannot be extracted to CSV.',
flag_values=fv)
def RunWithArgs(self, source_table, destination_uri):
"""Perform an extract operation of source_table into destination_uri.
Usage:
extract <source_table> <destination_uri>
Examples:
bq extract ds.summary gs://mybucket/summary.csv
Arguments:
source_table: Source table to extract.
destination_uri: Google Storage uri.
"""
client = Client.Get()
kwds = {
'job_id': _GetJobIdFromFlags(),
}
table_reference = client.GetTableReference(source_table)
job = client.Extract(
table_reference, destination_uri,
field_delimiter=_NormalizeFieldDelimiter(self.field_delimiter),
destination_format=self.destination_format, **kwds)
if not FLAGS.sync:
self.PrintJobStartInfo(job)
class _List(BigqueryCmd):
usage = """ls [(-j|-p|-d)] [-a] [-n <number>] [<identifier>]"""
def __init__(self, name, fv):
super(_List, self).__init__(name, fv)
flags.DEFINE_boolean(
'all', None,
'Show all results. For jobs, will show jobs from all users. For '
'datasets, will list hidden datasets.',
short_name='a', flag_values=fv)
flags.DEFINE_boolean(
'all_jobs', None,
'DEPRECATED. Use --all instead',
flag_values=fv)
flags.DEFINE_boolean(
'jobs', False,
'Show jobs described by this identifier.',
short_name='j', flag_values=fv)
flags.DEFINE_integer(
'max_results', None,
'Maximum number to list.',
short_name='n', flag_values=fv)
flags.DEFINE_boolean(
'projects', False,
'Show all projects.',
short_name='p', flag_values=fv)
flags.DEFINE_boolean(
'datasets', False,
'Show datasets described by this identifier.',
short_name='d', flag_values=fv)
def RunWithArgs(self, identifier=''):
"""List the objects contained in the named collection.
List the objects in the named project or dataset. A trailing : or
. can be used to signify a project or dataset.
* With -j, show the jobs in the named project.
* With -p, show all projects.
Examples:
bq ls
bq ls -j proj
bq ls -p -n 1000
bq ls mydataset
bq ls -a
"""
# pylint: disable=g-doc-exception
if self.j and self.p:
raise app.UsageError(
'Cannot specify more than one of -j and -p.')
if self.p and identifier:
raise app.UsageError('Cannot specify an identifier with -p')
# Copy deprecated flag specifying 'all' to current one.
if self.all_jobs is not None:
self.a = self.all_jobs
client = Client.Get()
formatter = _GetFormatterFromFlags()
if identifier:
reference = client.GetReference(identifier)
else:
try:
reference = client.GetReference(identifier)
except bigquery_client.BigqueryError:
# We want to let through the case of no identifier, which
# will fall through to the second case below.
reference = None
# If we got a TableReference, we might be able to make sense
# of it as a DatasetReference, as in 'ls foo' with dataset_id
# set.
if isinstance(reference, TableReference):
try:
reference = client.GetDatasetReference(identifier)
except bigquery_client.BigqueryError:
pass
_Typecheck(reference, (types.NoneType, ProjectReference, DatasetReference),
('Invalid identifier "%s" for ls, cannot call list on object '
'of type %s') % (identifier, type(reference).__name__))
if self.d and isinstance(reference, DatasetReference):
reference = reference.GetProjectReference()
if self.j:
reference = client.GetProjectReference(identifier)
_Typecheck(reference, ProjectReference,
'Cannot determine job(s) associated with "%s"' % (identifier,))
project_reference = client.GetProjectReference(identifier)
BigqueryClient.ConfigureFormatter(formatter, JobReference)
results = map( # pylint: disable=g-long-lambda
client.FormatJobInfo,
client.ListJobs(reference=project_reference,
max_results=self.max_results,
all_users=self.a))
elif self.p or reference is None:
BigqueryClient.ConfigureFormatter(formatter, ProjectReference)
results = map( # pylint: disable=g-long-lambda
client.FormatProjectInfo,
client.ListProjects(max_results=self.max_results))
elif isinstance(reference, ProjectReference):
BigqueryClient.ConfigureFormatter(formatter, DatasetReference)
results = map( # pylint: disable=g-long-lambda
client.FormatDatasetInfo,
client.ListDatasets(reference, max_results=self.max_results,
list_all=self.a))
else: # isinstance(reference, DatasetReference):
BigqueryClient.ConfigureFormatter(formatter, TableReference)
results = map( # pylint: disable=g-long-lambda
client.FormatTableInfo,
client.ListTables(reference, max_results=self.max_results))
for result in results:
formatter.AddDict(result)
formatter.Print()
class _Delete(BigqueryCmd):
usage = """rm [-f] [-r] [(-d|-t)] <identifier>"""
def __init__(self, name, fv):
super(_Delete, self).__init__(name, fv)
flags.DEFINE_boolean(
'dataset', False,
'Remove dataset described by this identifier.',
short_name='d', flag_values=fv)
flags.DEFINE_boolean(
'table', False,
'Remove table described by this identifier.',
short_name='t', flag_values=fv)
flags.DEFINE_boolean(
'force', False,
"Ignore existing tables and datasets, don't prompt.",
short_name='f', flag_values=fv)
flags.DEFINE_boolean(
'recursive', False,
'Remove dataset and any tables it may contain.',
short_name='r', flag_values=fv)
def RunWithArgs(self, identifier):
"""Delete the dataset or table described by identifier.
Always requires an identifier, unlike the show and ls commands.
By default, also requires confirmation before deleting. Supports
the -d and -t flags to signify that the identifier is a dataset
or table.
* With -f, don't ask for confirmation before deleting.
* With -r, remove all tables in the named dataset.
Examples:
bq rm ds.table
bq rm -r -f old_dataset
"""
client = Client.Get()
# pylint: disable=g-doc-exception
if self.d and self.t:
raise app.UsageError('Cannot specify more than one of -d and -t.')
if not identifier:
raise app.UsageError('Must provide an identifier for rm.')
if self.t:
reference = client.GetTableReference(identifier)
elif self.d:
reference = client.GetDatasetReference(identifier)
else:
reference = client.GetReference(identifier)
_Typecheck(reference, (DatasetReference, TableReference),
'Invalid identifier "%s" for rm.' % (identifier,))
if isinstance(reference, TableReference) and self.r:
raise app.UsageError(
'Cannot specify -r with %r' % (reference,))
if not self.force:
if ((isinstance(reference, DatasetReference) and
client.DatasetExists(reference)) or
(isinstance(reference, TableReference)
and client.TableExists(reference))):
if 'y' != _PromptYN('rm: remove %r? (y/N) ' % (reference,)):
print 'NOT deleting %r, exiting.' % (reference,)
return 0
if isinstance(reference, DatasetReference):
client.DeleteDataset(reference,
ignore_not_found=self.force,
delete_contents=self.recursive)
elif isinstance(reference, TableReference):
client.DeleteTable(reference,
ignore_not_found=self.force)
class _Copy(BigqueryCmd):
usage = """cp [-n] <source_table> <dest_table>"""
def __init__(self, name, fv):
super(_Copy, self).__init__(name, fv)
flags.DEFINE_boolean(
'no_clobber', False,
'Do not overwrite an existing table.',
short_name='n', flag_values=fv)
flags.DEFINE_boolean(
'force', False,
"Ignore existing destination tables, don't prompt.",
short_name='f', flag_values=fv)
flags.DEFINE_boolean(
'append_table', False,
'Append to an existing table.',
short_name='a', flag_values=fv)
def RunWithArgs(self, source_table, dest_table):
"""Copies one table to another.
Examples:
bq cp dataset.old_table dataset2.new_table
"""
client = Client.Get()
source_reference = client.GetTableReference(source_table)
dest_reference = client.GetTableReference(dest_table)
if self.append_table:
write_disposition = 'WRITE_APPEND'
ignore_already_exists = True
elif self.no_clobber:
write_disposition = 'WRITE_EMPTY'
ignore_already_exists = True
else:
write_disposition = 'WRITE_TRUNCATE'
ignore_already_exists = False
if not self.force:
if client.TableExists(dest_reference):
if 'y' != _PromptYN('cp: replace %r? (y/N) ' % (dest_reference,)):
print 'NOT copying %r, exiting.' % (source_reference,)
return 0
kwds = {
'write_disposition': write_disposition,
'ignore_already_exists': ignore_already_exists,
'job_id': _GetJobIdFromFlags(),
}
job = client.CopyTable(source_reference, dest_reference, **kwds)
if job is None:
print "Table '%s' already exists, skipping" % (dest_reference,)
elif not FLAGS.sync:
self.PrintJobStartInfo(job)
else:
print "Table '%s' successfully copied to '%s'" % (
source_reference, dest_reference)
class _Make(BigqueryCmd):
usage = """mk [-d] <identifier> OR mk [-t] <identifier> [<schema>]"""
def __init__(self, name, fv):
super(_Make, self).__init__(name, fv)
flags.DEFINE_boolean(
'force', False,
'Ignore errors reporting that the object already exists.',
short_name='f', flag_values=fv)
flags.DEFINE_boolean(
'dataset', False,
'Create dataset with this name.',
short_name='d', flag_values=fv)
flags.DEFINE_boolean(
'table', False,
'Create table with this name.',
short_name='t', flag_values=fv)
flags.DEFINE_string(
'schema', '',
'Either a filename or a comma-separated list of fields in the form '
'name[:type].',
flag_values=fv)
flags.DEFINE_string(
'description', None,
'Description of the dataset or table.',
flag_values=fv)
flags.DEFINE_integer(
'expiration', None,
'Expiration time, in seconds from now, of a table.',
flag_values=fv)
def RunWithArgs(self, identifier='', schema=''):
# pylint: disable=g-doc-exception
"""Create a dataset or table with this name.
See 'bq help load' for more information on specifying the schema.
Examples:
bq mk new_dataset
bq mk new_dataset.new_table
bq --dataset_id=new_dataset mk table
bq mk -t new_dataset.newtable name:integer,value:string
"""
client = Client.Get()
if self.d and self.t:
raise app.UsageError('Cannot specify both -d and -t.')
if self.t:
reference = client.GetTableReference(identifier)
elif self.d or not identifier:
reference = client.GetDatasetReference(identifier)
else:
reference = client.GetReference(identifier)
_Typecheck(reference, (DatasetReference, TableReference),
"Invalid identifier '%s' for mk." % (identifier,))
if isinstance(reference, DatasetReference):
if self.schema:
raise app.UsageError('Cannot specify schema with a dataset.')
if self.expiration:
raise app.UsageError('Cannot specify an expiration for a dataset.')
if client.DatasetExists(reference):
message = "Dataset '%s' already exists." % (reference,)
if not self.f:
raise bigquery_client.BigqueryError(message)
else:
print message
return
client.CreateDataset(reference, ignore_existing=True,
description=self.description)
print "Dataset '%s' successfully created." % (reference,)
elif isinstance(reference, TableReference):
if client.TableExists(reference):
message = "Table '%s' already exists." % (reference,)
if not self.f:
raise bigquery_client.BigqueryError(message)
else:
print message
return
if schema:
schema = bigquery_client.BigqueryClient.ReadSchema(schema)
else:
schema = None
expiration = None
if self.expiration:
expiration = int(self.expiration + time.time()) * 1000
client.CreateTable(reference, ignore_existing=True, schema=schema,
description=self.description,
expiration=expiration)
print "Table '%s' successfully created." % (reference,)
class _Update(BigqueryCmd):
usage = """update [-d] [-t] <identifier> [<schema>]"""
def __init__(self, name, fv):
super(_Update, self).__init__(name, fv)
flags.DEFINE_boolean(
'dataset', False,
'Updates a dataset with this name.',
short_name='d', flag_values=fv)
flags.DEFINE_boolean(
'table', False,
'Updates a table with this name.',
short_name='t', flag_values=fv)
flags.DEFINE_string(
'schema', '',
'Either a filename or a comma-separated list of fields in the form '
'name[:type].',
flag_values=fv)
flags.DEFINE_string(
'description', None,
'Description of the dataset or table.',
flag_values=fv)
flags.DEFINE_integer(
'expiration', None,
'Expiration time, in seconds from now, of a table.',
flag_values=fv)
def RunWithArgs(self, identifier='', schema=''):
# pylint: disable=g-doc-exception
"""Updates a dataset or table with this name.
See 'bq help load' for more information on specifying the schema.
Examples:
bq update --description "Dataset description" existing_dataset
bq update --description "My table" dataset.table
bq update -t new_dataset.newtable name:integer,value:string
"""
client = Client.Get()
if self.d and self.t:
raise app.UsageError('Cannot specify both -d and -t.')
if self.t:
reference = client.GetTableReference(identifier)
elif self.d or not identifier:
reference = client.GetDatasetReference(identifier)
else:
reference = client.GetReference(identifier)
_Typecheck(reference, (DatasetReference, TableReference),
"Invalid identifier '%s' for mk." % (identifier,))
if isinstance(reference, DatasetReference):
if self.schema:
raise app.UsageError('Cannot specify schema with a dataset.')
if self.expiration:
raise app.UsageError('Cannot specify an expiration for a dataset.')
client.UpdateDataset(reference, description=self.description)
print "Dataset '%s' successfully updated." % (reference,)
elif isinstance(reference, TableReference):
if schema:
schema = bigquery_client.BigqueryClient.ReadSchema(schema)
else:
schema = None
expiration = None
if self.expiration:
expiration = int(self.expiration + time.time()) * 1000
client.UpdateTable(reference, schema=schema,
description=self.description,
expiration=expiration)
print "Table '%s' successfully updated." % (reference,)
class _Show(BigqueryCmd):
usage = """show [<identifier>]"""
def __init__(self, name, fv):
super(_Show, self).__init__(name, fv)
flags.DEFINE_boolean(
'job', False,
'If true, interpret this identifier as a job id.',
short_name='j', flag_values=fv)
flags.DEFINE_boolean(
'dataset', False,
'Show dataset with this name.',
short_name='d', flag_values=fv)
def RunWithArgs(self, identifier=''):
"""Show all information about an object.
Examples:
bq show -j <job_id>
bq show dataset
bq show dataset.table
"""
# pylint: disable=g-doc-exception
client = Client.Get()
if self.j:
reference = client.GetJobReference(identifier)
elif self.d:
reference = client.GetDatasetReference(identifier)
else:
reference = client.GetReference(identifier)
if reference is None:
raise app.UsageError('Must provide an identifier for show.')
object_info = client.GetObjectInfo(reference)
# The JSON formats are handled separately so that they don't print
# the record as a list of one record.
if FLAGS.format in ['prettyjson', 'json']:
_PrintFormattedJsonObject(object_info)
elif FLAGS.format in [None, 'sparse', 'pretty']:
formatter = _GetFormatterFromFlags()
BigqueryClient.ConfigureFormatter(
formatter, type(reference), print_format='show')
object_info = BigqueryClient.FormatInfoByKind(object_info)
formatter.AddDict(object_info)
print '%s %s\n' % (reference.typename.capitalize(), reference)
formatter.Print()
print
if (isinstance(reference, JobReference) and
object_info['State'] == 'FAILURE'):
error_result = object_info['status']['errorResult']
error_ls = object_info['status'].get('errors', [])
error = bigquery_client.BigqueryError.Create(
error_result, error_result, error_ls)
print 'Errors encountered during job execution. %s\n' % (error,)
else:
formatter = _GetFormatterFromFlags()
formatter.AddColumns(object_info.keys())
formatter.AddDict(object_info)
formatter.Print()
class _Head(BigqueryCmd):
usage = """head [-n <max rows>] [-j] [-t] <identifier>"""
def __init__(self, name, fv):
super(_Head, self).__init__(name, fv)
flags.DEFINE_boolean(
'job', False,
'Reads the results of a query job.',
short_name='j', flag_values=fv)
flags.DEFINE_boolean(
'table', False,
'Reads rows from a table.',
short_name='t', flag_values=fv)
flags.DEFINE_integer(
'start_row', 0,
'The number of rows to skip before showing table data.',
short_name='s', flag_values=fv)
flags.DEFINE_integer(
'max_rows', 100,
'The number of rows to print when showing table data.',
short_name='n', flag_values=fv)
def RunWithArgs(self, identifier=''):
"""Displays rows in a table.
Examples:
bq head dataset.table
bq head -j job
bq head -n 10 dataset.table
bq head -s 5 -n 10 dataset.table
"""
client = Client.Get()
if self.j and self.t:
raise app.UsageError('Cannot specify both -j and -t.')
if self.j:
reference = client.GetJobReference(identifier)
else:
reference = client.GetTableReference(identifier)
if isinstance(reference, JobReference):
fields, rows = client.ReadSchemaAndJobRows(dict(reference),
start_row=self.s,
max_rows=self.n)
elif isinstance(reference, TableReference):
fields, rows = client.ReadSchemaAndRows(dict(reference),
start_row=self.s,
max_rows=self.n)
else:
raise app.UsageError("Invalid identifier '%s' for head." % (identifier,))
Factory.ClientTablePrinter.GetTablePrinter().PrintTable(fields, rows)
class _Insert(BigqueryCmd):
usage = """insert <table identifier> [file]"""
def __init__(self, name, fv):
super(_Insert, self).__init__(name, fv)
def RunWithArgs(self, identifier='', filename=None):
"""Inserts rows in a table.
Inserts the records formatted as newline delimited JSON from file
into the specified table. If file is not specified, reads from stdin.
If there were any insert errors it prints the errors to stdout.
Examples:
bq insert dataset.table /tmp/mydata.json
echo '{"a":1, "b":2}' | bq insert dataset.table
"""
if filename:
with open(filename, 'r') as json_file:
return self._DoInsert(identifier, json_file)
else:
return self._DoInsert(identifier, sys.stdin)
def _DoInsert(self, identifier, json_file):
"""Insert the contents of the file into a table."""
client = Client.Get()
reference = client.GetReference(identifier)
_Typecheck(reference, (TableReference,),
'Must provide a table identifier for insert.')
reference = dict(reference)
batch = []
def Flush():
result = client.InsertTableRows(reference, batch)
del batch[:]
return result, result.get('insertErrors', None)
result = {}
errors = None
lineno = 1
for line in json_file:
try:
batch.append(bigquery_client.JsonToInsertEntry(None, line))
lineno += 1
except bigquery_client.BigqueryClientError, e:
raise app.UsageError('Line %d: %s' % (lineno, str(e)))
if (FLAGS.max_rows_per_request and
len(batch) == FLAGS.max_rows_per_request):
result, errors = Flush()
if errors: break
if batch and errors is None:
result, errors = Flush()
if FLAGS.format in ['prettyjson', 'json']:
_PrintFormattedJsonObject(result)
elif FLAGS.format in [None, 'sparse', 'pretty']:
if errors:
for entry in result['insertErrors']:
entry_errors = entry['errors']
sys.stdout.write('record %d errors: ' % (entry['index'],))
for error in entry_errors:
print '\t%s: %s' % (error['reason'], error['message'])
return 1 if errors else 0
class _Wait(BigqueryCmd):
usage = """wait [<job_id>] [<secs>]"""
def RunWithArgs(self, job_id='', secs=sys.maxint):
# pylint: disable=g-doc-exception
"""Wait some number of seconds for a job to finish.
Poll job_id until either (1) the job is DONE or (2) the
specified number of seconds have elapsed. Waits forever
if unspecified. If no job_id is specified, and there is
only one running job, we poll that job.
Examples:
bq wait # Waits forever for the currently running job.
bq wait job_id # Waits forever
bq wait job_id 100 # Waits 100 seconds
bq wait job_id 0 # See if a job is done.
Arguments:
job_id: Job ID to wait on.
secs: Number of seconds to wait (must be >= 0).
"""
try:
secs = BigqueryClient.NormalizeWait(secs)
except ValueError:
raise app.UsageError('Invalid wait time: %s' % (secs,))
client = Client.Get()
if not job_id:
running_jobs = client.ListJobRefs(state_filter=['PENDING', 'RUNNING'])
if len(running_jobs) != 1:
raise bigquery_client.BigqueryError(
'No job_id provided, found %d running jobs' % (len(running_jobs),))
job_reference = running_jobs.pop()
else:
job_reference = client.GetJobReference(job_id)
client.WaitJob(job_reference=job_reference, wait=secs)
# pylint: disable=g-bad-name
class CommandLoop(cmd.Cmd):
"""Instance of cmd.Cmd built to work with NewCmd."""
class TerminateSignal(Exception):
"""Exception type used for signaling loop completion."""
pass
def __init__(self, commands, prompt=None):
cmd.Cmd.__init__(self)
self._commands = {'help': commands['help']}
self._special_command_names = ['help', 'repl', 'EOF']
for name, command in commands.iteritems():
if (name not in self._special_command_names and
isinstance(command, NewCmd) and
command.surface_in_shell):
self._commands[name] = command
setattr(self, 'do_%s' % (name,), command.RunCmdLoop)
self._default_prompt = prompt or 'BigQuery> '
self._set_prompt()
self._last_return_code = 0
@property
def last_return_code(self):
return self._last_return_code
def _set_prompt(self):
client = Client().Get()
if client.project_id:
path = str(client.GetReference())
self.prompt = '%s> ' % (path,)
else:
self.prompt = self._default_prompt
def do_EOF(self, *unused_args):
"""Terminate the running command loop.
This function raises an exception to avoid the need to do
potentially-error-prone string parsing inside onecmd.
Returns:
Never returns.
Raises:
CommandLoop.TerminateSignal: always.
"""
raise CommandLoop.TerminateSignal()
def postloop(self):
print 'Goodbye.'
def completedefault(self, unused_text, line, unused_begidx, unused_endidx):
if not line:
return []
else:
command_name = line.partition(' ')[0].lower()
usage = ''
if command_name in self._commands:
usage = self._commands[command_name].usage
elif command_name == 'set':
usage = 'set (project_id|dataset_id) <name>'
elif command_name == 'unset':
usage = 'unset (project_id|dataset_id)'
if usage:
print
print usage
print '%s%s' % (self.prompt, line),
return []
def emptyline(self):
print 'Available commands:',
print ' '.join(list(self._commands))
def precmd(self, line):
"""Preprocess the shell input."""
if line == 'EOF':
return line
if line.startswith('exit') or line.startswith('quit'):
return 'EOF'
words = line.strip().split()
if len(words) > 1 and words[0].lower() == 'select':
return 'query %s' % (pipes.quote(line),)
if len(words) == 1 and words[0] not in ['help', 'ls', 'version']:
return 'help %s' % (line.strip(),)
return line
def onecmd(self, line):
"""Process a single command.
Runs a single command, and stores the return code in
self._last_return_code. Always returns False unless the command
was EOF.
Args:
line: (str) Command line to process.
Returns:
A bool signaling whether or not the command loop should terminate.
"""
try:
self._last_return_code = cmd.Cmd.onecmd(self, line)
except CommandLoop.TerminateSignal:
return True
except BaseException, e:
name = line.split(' ')[0]
BigqueryCmd.ProcessError(e, name=name)
self._last_return_code = 1
return False
def get_names(self):
names = dir(self)
commands = (name for name in self._commands
if name not in self._special_command_names)
names.extend('do_%s' % (name,) for name in commands)
names.append('do_select')
names.remove('do_EOF')
return names
def do_set(self, line):
"""Set the value of the project_id or dataset_id flag."""
client = Client().Get()
name, value = (line.split(' ') + ['', ''])[:2]
if (name not in ('project_id', 'dataset_id') or
not 1 <= len(line.split(' ')) <= 2):
print 'set (project_id|dataset_id) <name>'
elif name == 'dataset_id' and not client.project_id:
print 'Cannot set dataset_id with project_id unset'
else:
setattr(client, name, value)
self._set_prompt()
return 0
def do_unset(self, line):
"""Unset the value of the project_id or dataset_id flag."""
name = line.strip()
client = Client.Get()
if name not in ('project_id', 'dataset_id'):
print 'unset (project_id|dataset_id)'
else:
setattr(client, name, '')
if name == 'project_id':
client.dataset_id = ''
self._set_prompt()
return 0
def do_help(self, command_name):
"""Print the help for command_name (if present) or general help."""
# TODO(user): Add command-specific flags.
def FormatOneCmd(name, command, command_names):
indent_size = appcommands.GetMaxCommandLength() + 3
if len(command_names) > 1:
indent = ' ' * indent_size
command_help = flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=indent,
firstline_indent='')
first_help_line, _, rest = command_help.partition('\n')
first_line = '%-*s%s' % (indent_size, name + ':', first_help_line)
return '\n'.join((first_line, rest))
else:
default_indent = ' '
return '\n' + flags.TextWrap(
command.CommandGetHelp('', cmd_names=command_names),
indent=default_indent,
firstline_indent=default_indent) + '\n'
if not command_name:
print '\nHelp for Bigquery commands:\n'
command_names = list(self._commands)
print '\n\n'.join(
FormatOneCmd(name, command, command_names)
for name, command in self._commands.iteritems()
if name not in self._special_command_names)
print
elif command_name in self._commands:
print FormatOneCmd(command_name, self._commands[command_name],
command_names=[command_name])
return 0
def postcmd(self, stop, line):
return bool(stop) or line == 'EOF'
# pylint: enable=g-bad-name
class _Repl(BigqueryCmd):
"""Start an interactive bq session."""
def __init__(self, name, fv):
super(_Repl, self).__init__(name, fv)
self.surface_in_shell = False
flags.DEFINE_string(
'prompt', '',
'Prompt to use for BigQuery shell.',
flag_values=fv)
def RunWithArgs(self):
"""Start an interactive bq session."""
repl = CommandLoop(appcommands.GetCommandList(), prompt=self.prompt)
print 'Welcome to BigQuery! (Type help for more information.)'
while True:
try:
repl.cmdloop()
break
except KeyboardInterrupt:
print
return repl.last_return_code
class _Init(BigqueryCmd):
"""Create a .bigqueryrc file and set up OAuth credentials."""
def __init__(self, name, fv):
super(_Init, self).__init__(name, fv)
self.surface_in_shell = False
flags.DEFINE_boolean(
'delete_credentials', None,
'If specified, the credentials file associated with this .bigqueryrc '
'file is deleted.',
flag_values=fv)
def DeleteCredentials(self):
"""Deletes this user's credential file."""
_ProcessBigqueryrc()
filename = FLAGS.service_account_credential_file or FLAGS.credential_file
if not os.path.exists(filename):
print 'Credential file %s does not exist.' % (filename,)
return 0
try:
if 'y' != _PromptYN('Delete credential file %s? (y/N) ' % (filename,)):
print 'NOT deleting %s, exiting.' % (filename,)
return 0
os.remove(filename)
except OSError, e:
print 'Error removing %s: %s' % (filename, e)
return 1
def RunWithArgs(self):
"""Authenticate and create a default .bigqueryrc file."""
_ProcessBigqueryrc()
bigquery_client.ConfigurePythonLogger(FLAGS.apilog)
if self.delete_credentials:
return self.DeleteCredentials()
bigqueryrc = _GetBigqueryRcFilename()
# Delete the old one, if it exists.
print
print 'Welcome to BigQuery! This script will walk you through the '
print 'process of initializing your .bigqueryrc configuration file.'
print
if os.path.exists(bigqueryrc):
print ' **** NOTE! ****'
print 'An existing .bigqueryrc file was found at %s.' % (bigqueryrc,)
print 'Are you sure you want to continue and overwrite your existing '
print 'configuration?'
print
if 'y' != _PromptYN('Overwrite %s? (y/N) ' % (bigqueryrc,)):
print 'NOT overwriting %s, exiting.' % (bigqueryrc,)
return 0
print
try:
os.remove(bigqueryrc)
except OSError, e:
print 'Error removing %s: %s' % (bigqueryrc, e)
return 1
print 'First, we need to set up your credentials if they do not '
print 'already exist.'
print
client = Client.Get()
entries = {'credential_file': FLAGS.credential_file}
projects = client.ListProjects()
print 'Credential creation complete. Now we will select a default project.'
print
if not projects:
print 'No projects found for this user. Please go to '
print ' https://code.google.com/apis/console'
print 'and create a project.'
print
else:
print 'List of projects:'
formatter = _GetFormatterFromFlags()
formatter.AddColumn('#')
BigqueryClient.ConfigureFormatter(formatter, ProjectReference)
for index, project in enumerate(projects):
result = BigqueryClient.FormatProjectInfo(project)
result.update({'#': index + 1})
formatter.AddDict(result)
formatter.Print()
if len(projects) == 1:
project_reference = BigqueryClient.ConstructObjectReference(
projects[0])
print 'Found only one project, setting %s as the default.' % (
project_reference,)
print
entries['project_id'] = project_reference.projectId
else:
print 'Found multiple projects. Please enter a selection for '
print 'which should be the default, or leave blank to not '
print 'set a default.'
print
response = None
while not isinstance(response, int):
response = _PromptWithDefault(
'Enter a selection (1 - %s): ' % (len(projects),))
try:
if not response or 1 <= int(response) <= len(projects):
response = int(response or 0)
except ValueError:
pass
print
if response:
project_reference = BigqueryClient.ConstructObjectReference(
projects[response - 1])
entries['project_id'] = project_reference.projectId
try:
with open(bigqueryrc, 'w') as rcfile:
for flag, value in entries.iteritems():
print >>rcfile, '%s = %s' % (flag, value)
except IOError, e:
print 'Error writing %s: %s' % (bigqueryrc, e)
return 1
print 'BigQuery configuration complete! Type "bq" to get started.'
print
_ProcessBigqueryrc()
# Destroy the client we created, so that any new client will
# pick up new flag values.
Client.Delete()
return 0
class _Version(BigqueryCmd):
usage = """version"""
def RunWithArgs(self):
"""Return the version of bq."""
version = _VersionNumber()
print 'This is BigQuery CLI %s' % (version,)
def main(argv):
try:
FLAGS.auth_local_webserver = False
_ValidateGlobalFlags()
bq_commands = {
# Keep the commands alphabetical.
'cp': _Copy,
'extract': _Extract,
'head': _Head,
'init': _Init,
'insert': _Insert,
'load': _Load,
'ls': _List,
'mk': _Make,
'query': _Query,
'rm': _Delete,
'shell': _Repl,
'show': _Show,
'update': _Update,
'version': _Version,
'wait': _Wait,
}
for command, function in bq_commands.iteritems():
if command not in appcommands.GetCommandList():
appcommands.AddCmd(command, function)
if (not argv or
(len(argv) > 1 and
argv[1] not in ['init', 'help', 'version'] and
argv[1] in appcommands.GetCommandList())):
# Service Accounts don't use cached oauth credentials and
# all bigqueryrc defaults are technically optional.
if not _UseServiceAccount():
if not (os.path.exists(_GetBigqueryRcFilename()) or
os.path.exists(FLAGS.credential_file)):
appcommands.GetCommandByName('init').Run([])
except KeyboardInterrupt, e:
print 'Control-C pressed, exiting.'
sys.exit(1)
except BaseException, e: # pylint: disable=broad-except
print 'Error initializing bq client: %s' % (e,)
if FLAGS.debug_mode or FLAGS.headless:
traceback.print_exc()
if not FLAGS.headless:
pdb.post_mortem()
sys.exit(1)
# pylint: disable=g-bad-name
def run_main():
"""Function to be used as setuptools script entry point.
Appcommands assumes that it always runs as __main__, but launching
via a setuptools-generated entry_point breaks this rule. We do some
trickery here to make sure that appcommands and flags find their
state where they expect to by faking ourselves as __main__.
"""
# Put the flags for this module somewhere the flags module will look
# for them.
# pylint: disable=protected-access
new_name = flags._GetMainModule()
sys.modules[new_name] = sys.modules['__main__']
for flag in FLAGS.FlagsByModuleDict().get(__name__, []):
FLAGS._RegisterFlagByModule(new_name, flag)
for key_flag in FLAGS.KeyFlagsByModuleDict().get(__name__, []):
FLAGS._RegisterKeyFlagForModule(new_name, key_flag)
# pylint: enable=protected-access
# Now set __main__ appropriately so that appcommands will be
# happy.
sys.modules['__main__'] = sys.modules[__name__]
appcommands.Run()
sys.modules['__main__'] = sys.modules.pop(new_name)
if __name__ == '__main__':
appcommands.Run()
| Python |
#!/usr/bin/env python
#
# Copyright 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Sample app demonstrates extraction of GAE Datastore data to Google BigQuery
Uses the App Engine MapReduce mapper pipeline to read entities
out of the App Engine Datastore, write processed entities into
Cloud Storage in CSV format, then starts another pipeline that
creates a BigQuery ingestion job. Uses code from the log2bq
project: http://code.google.com/p/log2bq/
"""
__author__ = 'manoochehri@google.com (Michael Manoochehri)'
import time
import calendar
import datetime
import httplib2
from google.appengine.api import taskqueue
from google.appengine.api import users
from google.appengine.ext import blobstore
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
from mapreduce.lib import files
from mapreduce import base_handler
from mapreduce import mapreduce_pipeline
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
SCOPE = 'https://www.googleapis.com/auth/bigquery'
PROJECT_ID = 'XXXXXXXXXXXX' # Your Project ID here
BQ_DATASET_ID = 'datastore_data'
GS_BUCKET = 'datastore_csvoutput'
ENTITY_KIND = 'main.ProductSalesData'
class ProductSalesData(db.Model):
product_id = db.IntegerProperty(required=True)
date = db.DateTimeProperty(verbose_name=None,
auto_now=True,
auto_now_add=True)
store = db.StringProperty(required=True)
class DatastoreMapperPipeline(base_handler.PipelineBase):
def run(self, entity_type):
output = yield mapreduce_pipeline.MapperPipeline(
"Datastore Mapper %s" % entity_type,
"main.datastore_map",
"mapreduce.input_readers.DatastoreInputReader",
output_writer_spec="mapreduce.output_writers.FileOutputWriter",
params={
"input_reader":{
"entity_kind": entity_type,
},
"output_writer":{
"filesystem": "gs",
"gs_bucket_name": GS_BUCKET,
"output_sharding":"none",
}
},
shards=12)
yield CloudStorageToBigQuery(output)
class CloudStorageToBigQuery(base_handler.PipelineBase):
def run(self, csv_output):
credentials = AppAssertionCredentials(scope=SCOPE)
http = credentials.authorize(httplib2.Http())
bigquery_service = build("bigquery", "v2", http=http)
jobs = bigquery_service.jobs()
table_name = 'datastore_data_%s' % datetime.datetime.utcnow().strftime(
'%m%d%Y_%H%M%S')
files = [str(f.replace('/gs/', 'gs://')) for f in csv_output]
result = jobs.insert(projectId=PROJECT_ID,
body=build_job_data(table_name,files))
result.execute()
def build_job_data(table_name, files):
return {"projectId": PROJECT_ID,
"configuration":{
"load": {
"sourceUris": files,
"schema":{
"fields":[
{
"name":"product_id",
"type":"INTEGER",
},
{
"name":"date",
"type":"INTEGER",
},
{
"name":"store",
"type":"STRING",
}
]
},
"destinationTable":{
"projectId": PROJECT_ID,
"datasetId": BQ_DATASET_ID,
"tableId": table_name,
},
"maxBadRecords": 0,
}
}
}
def datastore_map(entity_type):
data = db.to_dict(entity_type)
resultlist = [data.get('product_id'),
timestamp_to_posix(data.get('date')),
data.get('store')]
result = ','.join(['"%s"' % field for field in resultlist])
yield("%s\n" % result)
def timestamp_to_posix(timestamp):
return int(time.mktime(timestamp.timetuple()))
class DatastoretoBigQueryStart(webapp.RequestHandler):
def get(self):
pipeline = DatastoreMapperPipeline(ENTITY_KIND)
pipeline.start()
path = pipeline.base_path + "/status?root=" + pipeline.pipeline_id
self.redirect(path)
class AddDataHandler(webapp.RequestHandler):
def get(self):
for i in range(0,9):
data = ProductSalesData(product_id=i,
store='Store %s' % str(i))
self.response.out.write('Added sample Datastore entity #%s<br />' % str(i))
data.put()
self.response.out.write('<a href="/start">Click here</a> to start the Datastore to BigQuery pipeline.')
application = webapp.WSGIApplication(
[('/start', DatastoretoBigQueryStart),
('/add_data', AddDataHandler)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
"""Library to make BigQuery v2 client requests."""
__author__ = 'kbrisbin@google.com (Kathryn Hurley)'
import cgi
import errors
import logging
from apiclient.discovery import build
from apiclient.errors import HttpError
TIMEOUT_MS = 1000
BIGQUERY_API_VERSION = 'v2'
class BigQueryClient(object):
"""BigQuery version 2 client."""
def __init__(self, project_id, api_version=BIGQUERY_API_VERSION):
"""Creates the BigQuery client connection.
Args:
project_id: either the numeric ID or your registered ID.
This defines the project to receive the bill for query usage.
api_version: version of BigQuery API to construct.
"""
self.service = build('bigquery', api_version)
self.project_id = project_id
def query(self, authorized_http, query):
"""Issues an synchronous query to bigquery v2.
Args:
authorized_http: the authorized Http instance.
query: string SQL query to run.
Returns:
The string job reference.
Raises:
QueryError if the query fails.
"""
logging.info(query)
job_collection = self.service.jobs()
job_data = {
'projectId': self.project_id,
'configuration': {
'query': {
'query': query
}
}
}
request = job_collection.insert(
projectId=self.project_id,
body=job_data)
try:
response = request.execute(authorized_http)
except HttpError:
raise errors.QueryError
return response['jobReference']['jobId']
def poll(self, authorized_http, job_id, timeout_ms=TIMEOUT_MS):
"""Polls the job to get results.
Args:
authorized_http: the authorized Http instance.
job_id: the running job.
timeout_ms: the number of milliseconds to wait for results.
Returns:
The job results.
Raises:
PollError when the poll fails.
"""
job_collection = self.service.jobs()
request = job_collection.getQueryResults(
projectId=self.project_id,
jobId=job_id,
timeoutMs=timeout_ms)
try:
response = request.execute(authorized_http)
except HttpError, err:
logging.error(cgi.escape(err._get_reason()))
raise errors.PollError
if 'jobComplete' in response:
complete = response['jobComplete']
if complete:
rows = response['rows']
schema = response['schema']
converter = self.Converter(schema)
formatted_rows = []
for row in rows:
formatted_rows.append(converter.convert_row(row))
response['formattedRows'] = formatted_rows
return response
class Converter(object):
"""Does schema-based type conversion of result data."""
def __init__(self, schema_row):
"""Sets up the schema converter.
Args:
schema_row: a dict containing BigQuery schema definitions.
"""
self.schema = []
for field in schema_row['fields']:
self.schema.append(field['type'])
def convert_row(self, row):
"""Converts a row of data into a tuple with type conversion applied.
Args:
row: a row of BigQuery data.
Returns:
A tuple with the converted data values for the row.
"""
i = 0
data = []
for entry in row['f']:
data.append(self.convert(entry['v'], self.schema[i]))
i += 1
return tuple(data)
def convert(self, entry, schema_type):
"""Converts an entry based on the schema type given.
Args:
entry: the data entry to convert.
schema_type: appropriate type for the entry.
Returns:
The data entry, either as passed in, or converted to the given type.
"""
if schema_type == u'FLOAT' and entry is not None:
return float(entry)
elif schema_type == u'INTEGER' and entry is not None:
return int(entry)
else:
return entry
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
"""BigQuery App Engine demo.
Demos how to start a BigQuery job running, then poll the job
to get the results when it's complete.
"""
__author__ = 'kbrisbin@google.com (Kathryn Hurley)'
import bigqueryv2
import errors
import httplib2
import os
import simplejson
from oauth2client.appengine import oauth2decorator_from_clientsecrets
from google.appengine.api import memcache
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
# Project ID for your BigQuery Project in the API Console
PROJECT_ID = '[YOUR PROJECT ID]'
# CLIENT_SECRETS, name of a file containing the OAuth 2.0 information for this
# application, including client_id and client_secret, which are found
# on the API Access tab on the Google APIs
# Console <http://code.google.com/apis/console>
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
decorator = oauth2decorator_from_clientsecrets(
filename=CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/bigquery',
message=template.render(
os.path.join(os.path.dirname(__file__), 'templates/error.html'),
{'clientSecrets': CLIENT_SECRETS}))
BQ = bigqueryv2.BigQueryClient(PROJECT_ID)
class MainHandler(webapp.RequestHandler):
"""Display the index page."""
@decorator.oauth_aware
def get(self):
"""Main handler.
Displays index page if logged in.
Otherwise, starts OAuth 2.0 dance.
"""
path = os.path.join(os.path.dirname(__file__), 'templates/index.html')
if decorator.has_credentials():
self.redirect('/about')
variables = {'url': decorator.authorize_url()}
self.response.out.write(template.render(path, variables))
class QueryPage(webapp.RequestHandler):
"""Display the query page."""
@decorator.oauth_required
def get(self):
"""Display the query HTML page."""
path = os.path.join(os.path.dirname(__file__), 'templates/query.html')
self.response.out.write(template.render(path, {}))
class QueryHandler(webapp.RequestHandler):
"""Handle queries to BigQuery."""
@decorator.oauth_required
def get(self):
"""Poll the job to see if it's complete."""
authorized_http = decorator.http()
job_id = self.request.get('jobId')
try:
response = BQ.poll(authorized_http, job_id)
json_response = simplejson.dumps(response)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json_response)
except errors.PollError:
self.response.set_status(500, 'Error during Poll')
@decorator.oauth_required
def post(self):
"""Post a new query job to BigQuery."""
authorized_http = decorator.http()
query = self.request.get('query')
try:
job_id = BQ.query(authorized_http, query)
json_response = simplejson.dumps({'jobId': job_id})
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json_response)
except errors.QueryError:
self.response.set_status(500, 'Error during Query')
app = webapp.WSGIApplication(
[
('/', MainHandler),
('/about', QueryPage),
('/query', QueryHandler),
],
debug=True
)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
"""Any errors occurring during execution.
- Error during query.
- Error during poll.
"""
class Error(Exception):
"""Base exception."""
pass
class QueryError(Error):
"""Exception raised for errors during query."""
pass
class PollError(Error):
"""Exception raised for errors during job poll."""
pass
| Python |
#!/usr/bin/env python
#
# Copyright 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import bqclient
import httplib2
import logging
import os
from django.utils import simplejson as json
from google.appengine.api import memcache
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp.template import render
from oauth2client.appengine import oauth2decorator_from_clientsecrets
# CLIENT_SECRETS, name of a file containing the OAuth 2.0 information for this
# application, including client_id and client_secret, which are found
# on the API Access tab on the Google APIs
# Console <http://code.google.com/apis/console>
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# BILLING_PROJECT_ID for a project where you and your users
# are viewing members. This is where the bill will be sent.
# During the limited availability preview, there is no bill.
# Replace the BILLING_PROJECT_ID value with the Client ID value
# from your project, the same numeric value you used in client_secrets.json
BILLING_PROJECT_ID = "99999999999"
DATA_PROJECT_ID = "publicdata"
DATASET = "samples"
TABLE = "natality"
QUERY = """
select state, SUM(gestation_weeks) / COUNT(gestation_weeks) as weeks
from publicdata:samples.natality
where year > 1990 and year < 2005 and IS_EXPLICITLY_DEFINED(gestation_weeks)
group by state order by weeks
"""
decorator = oauth2decorator_from_clientsecrets(CLIENT_SECRETS,
'https://www.googleapis.com/auth/bigquery')
http = httplib2.Http(memcache)
bq = bqclient.BigQueryClient(http, decorator)
class MainHandler(webapp.RequestHandler):
def _bq2geo(self, bqdata):
"""geodata output for region maps must be in the format region, value.
Assume the BigQuery query output is in this format and get names from schema.
"""
logging.info(bqdata)
columnNameGeo = bqdata['schema']['fields'][0]['name']
columnNameVal = bqdata['schema']['fields'][1]['name']
logging.info("Column Names=%s, %s" % (columnNameGeo, columnNameVal))
geodata = { 'cols': ({'id':columnNameGeo, 'label':columnNameGeo, 'type':'string'},
{'id':columnNameVal, 'label':columnNameVal, 'type':'number'})}
geodata['rows'] = [];
logging.info(geodata)
for row in bqdata['rows']:
newrow = ({'c':[]})
newrow['c'].append({'v': 'US-'+row['f'][0]['v']})
newrow['c'].append({'v':row['f'][1]['v']})
geodata['rows'].append(newrow)
logging.info('FINAL GEODATA---')
logging.info(geodata)
return json.dumps(geodata)
@decorator.oauth_required
def get(self):
logging.info('Last mod time: %s' % bq.getLastModTime(
DATA_PROJECT_ID, DATASET, TABLE))
data = { 'data': self._bq2geo(bq.Query(QUERY, BILLING_PROJECT_ID)),
'query': QUERY }
template = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(render(template, data))
application = webapp.WSGIApplication([
('/', MainHandler),
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
import httplib2
from apiclient.discovery import build
from oauth2client.appengine import oauth2decorator_from_clientsecrets
class BigQueryClient(object):
def __init__(self, http, decorator):
"""Creates the BigQuery client connection"""
self.service = build('bigquery', 'v2', http=http)
self.decorator = decorator
def getTableData(self, project, dataset, table):
# The credentials must already exist before you call decorator.http()
# So you cannot pre-generate 'decorated' in the BigQueryClient constructor,
# only from within a method protected by .oauth_required
decorated = self.decorator.http()
return self.service.tables().get(projectId=project, datasetId=dataset,
tableId=table).execute(decorated)
def getLastModTime(self, project, dataset, table):
data = self.getTableData(project, dataset, table)
if data is not None and 'lastModifiedTime' in data:
return data['lastModifiedTime']
else:
return None
def Query(self, query, project, timeout_ms=10000):
query_config = {
'query': query,
'timeoutMs': timeout_ms
}
decorated = self.decorator.http()
result_json = (self.service.jobs()
.query(projectId=project, body=query_config)
.execute(decorated))
return result_json
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Dimitrios Georgiou <dim.geo at gmail.com>
#
# This program can be distributed under the terms of the GPLv3.
#
import os, sys, shelve, StringIO, bsdiff, pickle, zlib
from errno import *
from stat import *
import fuse
from fuse import Fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError, \
"your fuse-py doesn't know of fuse.__version__, probably it's too old."
fuse.fuse_python_api = (0, 2)
fuse.feature_assert('stateful_files', 'has_init')
dfiles=dict()
datastore=dict()
def objecttozip(data):
return zlib.compress(pickle.dumps(data))
def ziptoobject(zdata):
return pickle.loads(zlib.decompress(zdata))
def getoriginalname(name):
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(name):
return dper[name][2]
else:
return getoriginalpath(name)
dper.close()
def getoriginalpath(path):
dper=shelve.open(datastore['a'],flag = 'r')
keylen=0 # slashes....
specialkey=''
for key in dper.keys():
if path.startswith(key):
if len(key) - len(key.translate(None,'/')) > keylen:
specialkey=key
keylen=len(key) - len(key.translate(None,'/'))
#print specialkey, keylen
if(specialkey!=''):
realpath=path.replace(specialkey,dper[specialkey][2])
else:
realpath=path
#print path, realpath
dper.close()
return realpath
def getoriginalfile(path):
realpath=getoriginalpath(path)
originalfile=StringIO.StringIO()
with open(realpath,'rb') as fl:
originalfile.write(fl.read())
originalfile.seek(0)
return originalfile
def getmodifiedfile(path):
modifiedfile=StringIO.StringIO()
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(path):
zdiff,new_len,oldpath=dper[path]
original_file=getoriginalfile(oldpath)
original_data=original_file.read()
original_file.close()
if zdiff=='':
modifiedfile.write(original_data)
else:
newdiff=ziptoobject(zdiff)
new_data=bsdiff.Patch(original_data,new_len,newdiff[0],newdiff[1],newdiff[2])
modifiedfile.write(new_data)
else:
original_file=getoriginalfile(path)
original_data=original_file.read()
original_file.close()
modifiedfile.write(original_data)
modifiedfile.seek(0)
dper.close()
return modifiedfile
def setmodifiedfile(path,newfile):
modfile=getmodifiedfile(path)
moddata=modfile.read()
modfile.close()
pos=newfile.tell()
newfile.seek(0)
newdata=newfile.read()
newfile.seek(pos)
if moddata != newdata:
savedoldpath=path
olddata=''
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(path):
zdiff,new_len,oldpath=dper[path]
#print oldpath
oldfile=getoriginalfile(oldpath)
olddata=oldfile.read()
oldfile.close()
savedoldpath=oldpath
else:
savedoldpath=getoriginalpath(path)
oldfile=getoriginalfile(path)
olddata=oldfile.read()
oldfile.close()
dper.close()
diff=bsdiff.Diff(olddata,newdata)
full_diff=objecttozip(diff),len(newdata),savedoldpath
dpersistence=shelve.open(datastore['a'],flag = 'w', writeback=True)
dpersistence[path] = full_diff
dpersistence.close()
class MyStat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 0
self.st_uid = 0
self.st_gid = 0
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
class FDiff(Fuse):
def __init__(self, *args, **kw):
Fuse.__init__(self, *args, **kw)
self.root = '/home'
self.datastorage='/home/datapersistence'
def getattr(self, path):
st=MyStat()
name="." + path
realname=getoriginalname(name)
mdata = os.lstat(realname)
st.st_mode = mdata.st_mode
st.st_ino = mdata.st_ino
st.st_dev = mdata.st_dev
st.st_nlink = mdata.st_nlink
st.st_uid = mdata.st_uid
st.st_gid = mdata.st_gid
st.st_size = mdata.st_size
st.st_atime = mdata.st_atime
st.st_mtime = mdata.st_mtime
st.st_ctime = mdata.st_ctime
#print name
dper=shelve.open(self.datastorage,flag = 'r')
#for key in dper.keys():
# print key, dper[key][2]
#print '\n'
if dper.has_key(name):
#print dper[name][2]
st.st_size = dper[name][1]
dper.close()
if dfiles.has_key(name) and not dfiles[name].closed:
pos=dfiles[name].tell()
dfiles[name].seek(0)
st.st_size=len(dfiles[name].read())
dfiles[name].seek(pos)
#print st.st_size
return st
def readdir(self, path, offset):
#print "." + path
lspath="." + path
reallspath=getoriginalpath(lspath)
oldlist=os.listdir(reallspath)
#print oldlist
dper=shelve.open(self.datastorage,flag = 'r')
for k,v in dper.iteritems():
if path!='/':
newname=k.replace(lspath+'/','')
oldname=v[2].replace(reallspath+'/','')
else:
newname=k.replace(lspath,'')
oldname=v[2].replace(reallspath,'')
#print newname,oldname
if oldname.find('/') == -1:
oldlist.remove(oldname)
if newname.find('/') == -1:
oldlist.append(newname)
dper.close()
#print oldlist
print '\n'
for e in oldlist:
yield fuse.Direntry(e)
def rename(self, path, path1):
oldname="." + path
newname="." + path1
#print oldname, newname
if oldname == newname:
return
#if not os.path.exists(oldname):
# return
#if os.path.exists(newname):
# return
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
for key in dper.keys():
if key.startswith(oldname):
newkey=key.replace(oldname,newname)
if len(newkey)==len(newname) or newkey[len(newname)]=='/':
dper[newkey]=dper[key]
del dper[key]
print key
dper.close()
realoldname=getoriginalpath(oldname)
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
if not dper.has_key(newname):
mdata=os.lstat(realoldname)
dper[newname]='',mdata.st_size,realoldname
#if dper.has_key(oldname):
# dper[newname]=dper[oldname]
# del dper[oldname]
#else:
# mdata=os.lstat(oldname)
# dper[newname]='',mdata.st_size,oldname
dper.close()
def truncate(self, path, len):
if dfiles.has_key("." + path) and not dfiles["." + path].closed:
dfiles["." + path].truncate(len)
setmodifiedfile("." + path,dfiles["." + path])
else:
myfile=getmodifiedfile("." + path)
myfile.truncate(len)
setmodifiedfile("." + path,myfile)
myfile.close()
def unlink(self, path):
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
if dper.has_key("." + path):
del dper["." + path]
#else:
# os.unlink("." + path)
dper.close()
def statfs(self):
return os.statvfs(".")
def fsinit(self):
#print self.root
#print self.datastorage
dper=shelve.open(self.datastorage,flag = 'c', writeback=True)
dper.close()
datastore['a']=self.datastorage
#print self.fuse_args.mountpoint
os.chdir(self.root)
class FDiffFile(object):
def __init__(self, path, flags, *mode):
self.nam="."+path
dfiles["."+path]=self.file=getmodifiedfile("."+path)
def read(self, length, offset):
self.file.seek(offset)
return self.file.read(length)
def write(self, buf, offset):
self.file.seek(offset)
self.file.write(buf)
return len(buf)
def release(self, flags):
self.file.close()
def _fflush(self):
setmodifiedfile(self.nam,self.file)
def fsync(self, isfsyncfile):
self._fflush()
def flush(self):
self._fflush()
def fgetattr(self):
st=MyStat()
realname=getoriginalname(self.nam)
mdata = os.stat(realname)
st.st_mode = mdata.st_mode
st.st_ino = mdata.st_ino
st.st_dev = mdata.st_dev
st.st_nlink = mdata.st_nlink
st.st_uid = mdata.st_uid
st.st_gid = mdata.st_gid
st.st_size = mdata.st_size
st.st_atime = mdata.st_atime
st.st_mtime = mdata.st_mtime
st.st_ctime = mdata.st_ctime
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(self.nam):
st.st_size = dper[self.nam][1]
dper.close()
pos=self.file.tell()
self.file.seek(0)
st.st_size=len(self.file.read())
self.file.seek(pos)
return st
def ftruncate(self, len):
self.file.truncate(len)
def main(self, *a, **kw):
self.file_class = self.FDiffFile
return Fuse.main(self, *a, **kw)
def main():
usage = """
mirror the filesystem tree from some point on. Store differences in a file.
""" + Fuse.fusage
server = FDiff(version="%prog " + fuse.__version__,
usage=usage,
dash_s_do='setsingle')
server.parser.add_option(mountopt="root", metavar="PATH", default='/',
help="mirror filesystem from under PATH [default: %default]")
server.parser.add_option(mountopt="datastorage",metavar="PATH",default='/home/datapersistence',
help="file which stores data [default: %default]")
server.parse(values=server, errex=1)
try:
if server.fuse_args.mount_expected():
os.chdir(server.root)
except OSError:
print >> sys.stderr, "can't enter root of underlying filesystem"
sys.exit(1)
server.main()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Dimitrios Georgiou <dim.geo at gmail.com>
#
# This program can be distributed under the terms of the GPLv3.
#
import os, sys, shelve, StringIO, bsdiff, pickle, zlib
from errno import *
from stat import *
import fuse
from fuse import Fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError, \
"your fuse-py doesn't know of fuse.__version__, probably it's too old."
fuse.fuse_python_api = (0, 2)
fuse.feature_assert('stateful_files', 'has_init')
dfiles=dict()
datastore=dict()
def objecttozip(data):
return zlib.compress(pickle.dumps(data))
def ziptoobject(zdata):
return pickle.loads(zlib.decompress(zdata))
def getoriginalname(name):
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(name):
return dper[name][2]
else:
return getoriginalpath(name)
dper.close()
def getoriginalpath(path):
dper=shelve.open(datastore['a'],flag = 'r')
keylen=0 # slashes....
specialkey=''
for key in dper.keys():
if path.startswith(key):
if len(key) - len(key.translate(None,'/')) > keylen:
specialkey=key
keylen=len(key) - len(key.translate(None,'/'))
#print specialkey, keylen
if(specialkey!=''):
realpath=path.replace(specialkey,dper[specialkey][2])
else:
realpath=path
#print path, realpath
dper.close()
return realpath
def getoriginalfile(path):
realpath=getoriginalpath(path)
originalfile=StringIO.StringIO()
with open(realpath,'rb') as fl:
originalfile.write(fl.read())
originalfile.seek(0)
return originalfile
def getmodifiedfile(path):
modifiedfile=StringIO.StringIO()
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(path):
zdiff,new_len,oldpath=dper[path]
original_file=getoriginalfile(oldpath)
original_data=original_file.read()
original_file.close()
if zdiff=='':
modifiedfile.write(original_data)
else:
newdiff=ziptoobject(zdiff)
new_data=bsdiff.Patch(original_data,new_len,newdiff[0],newdiff[1],newdiff[2])
modifiedfile.write(new_data)
else:
original_file=getoriginalfile(path)
original_data=original_file.read()
original_file.close()
modifiedfile.write(original_data)
modifiedfile.seek(0)
dper.close()
return modifiedfile
def setmodifiedfile(path,newfile):
modfile=getmodifiedfile(path)
moddata=modfile.read()
modfile.close()
pos=newfile.tell()
newfile.seek(0)
newdata=newfile.read()
newfile.seek(pos)
if moddata != newdata:
savedoldpath=path
olddata=''
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(path):
zdiff,new_len,oldpath=dper[path]
#print oldpath
oldfile=getoriginalfile(oldpath)
olddata=oldfile.read()
oldfile.close()
savedoldpath=oldpath
else:
savedoldpath=getoriginalpath(path)
oldfile=getoriginalfile(path)
olddata=oldfile.read()
oldfile.close()
dper.close()
diff=bsdiff.Diff(olddata,newdata)
full_diff=objecttozip(diff),len(newdata),savedoldpath
dpersistence=shelve.open(datastore['a'],flag = 'w', writeback=True)
dpersistence[path] = full_diff
dpersistence.close()
class MyStat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 0
self.st_uid = 0
self.st_gid = 0
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
class FDiff(Fuse):
def __init__(self, *args, **kw):
Fuse.__init__(self, *args, **kw)
self.root = '/home'
self.datastorage='/home/datapersistence'
def getattr(self, path):
st=MyStat()
name="." + path
realname=getoriginalname(name)
mdata = os.lstat(realname)
st.st_mode = mdata.st_mode
st.st_ino = mdata.st_ino
st.st_dev = mdata.st_dev
st.st_nlink = mdata.st_nlink
st.st_uid = mdata.st_uid
st.st_gid = mdata.st_gid
st.st_size = mdata.st_size
st.st_atime = mdata.st_atime
st.st_mtime = mdata.st_mtime
st.st_ctime = mdata.st_ctime
#print name
dper=shelve.open(self.datastorage,flag = 'r')
#for key in dper.keys():
# print key, dper[key][2]
#print '\n'
if dper.has_key(name):
#print dper[name][2]
st.st_size = dper[name][1]
dper.close()
if dfiles.has_key(name) and not dfiles[name].closed:
pos=dfiles[name].tell()
dfiles[name].seek(0)
st.st_size=len(dfiles[name].read())
dfiles[name].seek(pos)
#print st.st_size
return st
def readdir(self, path, offset):
#print "." + path
lspath="." + path
reallspath=getoriginalpath(lspath)
oldlist=os.listdir(reallspath)
#print oldlist
dper=shelve.open(self.datastorage,flag = 'r')
for k,v in dper.iteritems():
if path!='/':
newname=k.replace(lspath+'/','')
oldname=v[2].replace(reallspath+'/','')
else:
newname=k.replace(lspath,'')
oldname=v[2].replace(reallspath,'')
#print newname,oldname
if oldname.find('/') == -1:
oldlist.remove(oldname)
if newname.find('/') == -1:
oldlist.append(newname)
dper.close()
#print oldlist
print '\n'
for e in oldlist:
yield fuse.Direntry(e)
def rename(self, path, path1):
oldname="." + path
newname="." + path1
#print oldname, newname
if oldname == newname:
return
#if not os.path.exists(oldname):
# return
#if os.path.exists(newname):
# return
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
for key in dper.keys():
if key.startswith(oldname):
newkey=key.replace(oldname,newname)
if len(newkey)==len(newname) or newkey[len(newname)]=='/':
dper[newkey]=dper[key]
del dper[key]
print key
dper.close()
realoldname=getoriginalpath(oldname)
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
if not dper.has_key(newname):
mdata=os.lstat(realoldname)
dper[newname]='',mdata.st_size,realoldname
#if dper.has_key(oldname):
# dper[newname]=dper[oldname]
# del dper[oldname]
#else:
# mdata=os.lstat(oldname)
# dper[newname]='',mdata.st_size,oldname
dper.close()
def truncate(self, path, len):
if dfiles.has_key("." + path) and not dfiles["." + path].closed:
dfiles["." + path].truncate(len)
setmodifiedfile("." + path,dfiles["." + path])
else:
myfile=getmodifiedfile("." + path)
myfile.truncate(len)
setmodifiedfile("." + path,myfile)
myfile.close()
def unlink(self, path):
dper=shelve.open(self.datastorage,flag = 'w', writeback=True)
if dper.has_key("." + path):
del dper["." + path]
#else:
# os.unlink("." + path)
dper.close()
def statfs(self):
return os.statvfs(".")
def fsinit(self):
#print self.root
#print self.datastorage
dper=shelve.open(self.datastorage,flag = 'c', writeback=True)
dper.close()
datastore['a']=self.datastorage
#print self.fuse_args.mountpoint
os.chdir(self.root)
class FDiffFile(object):
def __init__(self, path, flags, *mode):
self.nam="."+path
dfiles["."+path]=self.file=getmodifiedfile("."+path)
def read(self, length, offset):
self.file.seek(offset)
return self.file.read(length)
def write(self, buf, offset):
self.file.seek(offset)
self.file.write(buf)
return len(buf)
def release(self, flags):
self.file.close()
def _fflush(self):
setmodifiedfile(self.nam,self.file)
def fsync(self, isfsyncfile):
self._fflush()
def flush(self):
self._fflush()
def fgetattr(self):
st=MyStat()
realname=getoriginalname(self.nam)
mdata = os.stat(realname)
st.st_mode = mdata.st_mode
st.st_ino = mdata.st_ino
st.st_dev = mdata.st_dev
st.st_nlink = mdata.st_nlink
st.st_uid = mdata.st_uid
st.st_gid = mdata.st_gid
st.st_size = mdata.st_size
st.st_atime = mdata.st_atime
st.st_mtime = mdata.st_mtime
st.st_ctime = mdata.st_ctime
dper=shelve.open(datastore['a'],flag = 'r')
if dper.has_key(self.nam):
st.st_size = dper[self.nam][1]
dper.close()
pos=self.file.tell()
self.file.seek(0)
st.st_size=len(self.file.read())
self.file.seek(pos)
return st
def ftruncate(self, len):
self.file.truncate(len)
def main(self, *a, **kw):
self.file_class = self.FDiffFile
return Fuse.main(self, *a, **kw)
def main():
usage = """
mirror the filesystem tree from some point on. Store differences in a file.
""" + Fuse.fusage
server = FDiff(version="%prog " + fuse.__version__,
usage=usage,
dash_s_do='setsingle')
server.parser.add_option(mountopt="root", metavar="PATH", default='/',
help="mirror filesystem from under PATH [default: %default]")
server.parser.add_option(mountopt="datastorage",metavar="PATH",default='/home/datapersistence',
help="file which stores data [default: %default]")
server.parse(values=server, errex=1)
try:
if server.fuse_args.mount_expected():
os.chdir(server.root)
except OSError:
print >> sys.stderr, "can't enter root of underlying filesystem"
sys.exit(1)
server.main()
if __name__ == '__main__':
main()
| Python |
Import('env')
name = 'feca'
inc = env.Dir('feca')
deps = ['mili']
env.CreateHeaderOnlyLibrary(name, inc, deps)
| Python |
Import('env')
name = 'feca'
inc = env.Dir('.')
src = env.Glob('*.cpp')
deps = ['gmock','gtest_main', 'gtest']
env.CreateTest(name, inc, src, deps)
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.db import models
class Order( models.Model ):
# Order information
orderID = models.CharField( max_length=6, unique=True )
cpID = models.CharField( max_length=6 )
professor = models.CharField( max_length=64 )
# Order production
status = models.CharField( max_length=16, blank=True )
producingCenter = models.ForeignKey( 'Center', blank=True )
dueDateTime = models.DateTimeField( blank=True )
# Order delivery
deliveryAddress = models.ForeignKey( 'Address', blank=True )
def __unicode__( self ):
return '%s-%s %s' % ( self.cpID, self.orderID, self.professor )
def __repr__( self ):
return '<Order: %s>' % ( self.__unicode__() )
class Center( models.Model ):
number = models.CharField( max_length=4, unique=True )
name = models.CharField( max_length=64 )
email = models.EmailField()
address = models.ForeignKey( 'Address', blank=True )
def __unicode__( self ):
return '%s %s' % ( self.number, self.name )
def __repr__( self ):
return '<Center: %s>' % ( self.__unicode__() )
class Address( models.Model ):
name = models.CharField( max_length=64 )
contact = models.CharField( max_length=32, blank=True )
phone = models.CharField( max_length=12 )
street = models.CharField( max_length=256 )
street2 = models.CharField( max_length=16, blank=True )
city = models.CharField( max_length=32 )
state = models.CharField( max_length=2 )
zip = models.CharField( max_length=5 )
def __unicode__( self ):
return self.name
def __repr__( self ):
return '<Address: %s>' % ( self.__unicode__() )
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from django.conf.urls.defaults import *
from fedexweb.xanedu.views import *
urlpatterns = patterns( '',
( r'^$', index ),
( r'^(?P<orderID>\d{6})/$', detail ),
)
| Python |
from django.contrib import admin
from fedexweb.xanedu.models import *
class OrderAdmin( admin.ModelAdmin ):
list_display = ( 'status', 'orderID', 'cpID', 'professor', 'producingCenter' )
class CenterAdmin( admin.ModelAdmin ):
list_display = ( 'number', 'name', 'email' )
class AddressAdmin( admin.ModelAdmin ):
list_display = ( 'name', 'street', 'street2', 'city', 'state', 'zip' )
admin.site.register( Order, OrderAdmin )
admin.site.register( Center, CenterAdmin )
admin.site.register( Address, AddressAdmin )
| Python |
from django.http import HttpResponse
def index( request ):
return HttpResponse( 'Welcome to xanedu index' )
def detail( request, orderID ):
return HttpResponse( 'Welcome to xanedu %s' % orderID )
| Python |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^fedexweb/', include('fedexweb.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
( r'^admin/', include(admin.site.urls) ),
( r'^pud/', include( 'fedexweb.pud.urls' ) ),
( r'^xanedu/', include( 'fedexweb.xanedu.urls' ) ),
)
| Python |
# Django settings for fedexweb project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'fedexweb.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '-^bcw13_qw=4x62_km3h9%sndkm4y#l*b=s5ka32a7)!y5p$hg'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
PROJECT_PATH = os.path.abspath( os.curdir )
PROJECT_DIR = os.path.basename( os.path.abspath( os.curdir ) )
ROOT_URLCONF = 'fedexweb.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join( PROJECT_PATH, 'pud/templates' ),
os.path.join( PROJECT_PATH, 'xanedu/templates' ),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'%s.pud' % PROJECT_DIR,
'%s.xanedu' % PROJECT_DIR,
)
| Python |
from django.db import models
from django import forms
class Delivery( models.Model ):
origin = models.ForeignKey( 'Address', related_name='origin', blank=True )
destination = models.ForeignKey( 'Address', related_name='destination', blank=True )
ready = models.DateTimeField()
due = models.DateTimeField()
packages = models.ManyToManyField( 'Package' )
def __unicode__( self ):
return '%s -> %s' % ( self.origin, self.destination )
def __repr__( self ):
return '<Delivery: %s>' % ( self.__unicode__() )
class Package( models.Model ):
type = models.CharField( max_length=16 )
size = models.CharField( max_length=16 )
weight = models.DecimalField( max_digits=5, decimal_places=2 )
def __unicode__( self ):
return self.type
def __repr__( self ):
return '<Package: %s>' % ( self.__unicode__() )
class Center( models.Model ):
number = models.CharField( max_length=4, unique=True )
name = models.CharField( max_length=64 )
email = models.EmailField()
address = models.ForeignKey( 'Address', blank=True )
def __unicode__( self ):
return '%s %s' % ( self.number, self.name )
def __repr__( self ):
return '<Center: %s>' % ( self.__unicode__() )
class Address( models.Model ):
name = models.CharField( max_length=64 )
contact = models.CharField( max_length=32, blank=True )
phone = models.CharField( max_length=12 )
street = models.CharField( max_length=256 )
street2 = models.CharField( max_length=16, blank=True )
city = models.CharField( max_length=32 )
state = models.CharField( max_length=2 )
zip = models.CharField( max_length=5 )
def __unicode__( self ):
return self.name
def __repr__( self ):
return '<Address: %s>' % ( self.__unicode__() )
class DeliveryForm( forms.ModelForm ):
class Meta:
model = Delivery
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from django.conf.urls.defaults import *
from fedexweb.pud.views import *
urlpatterns = patterns( '',
( r'^$', index ),
( r'^(?P<id>\d+)/$', detail ),
( r'^(?P<id>\d+)/label/$', label ),
)
| Python |
from django.contrib import admin
from fedexweb.pud.models import *
class CenterAdmin( admin.ModelAdmin ):
list_display = ( 'number', 'name', 'email' )
class AddressAdmin( admin.ModelAdmin ):
list_display = ( 'name', 'street', 'street2', 'city', 'state', 'zip' )
class DeliveryAdmin( admin.ModelAdmin ):
list_display = ( 'origin', 'destination', 'ready', 'due' )
class PackageAdmin( admin.ModelAdmin ):
list_display = ( 'type', 'size', 'weight' )
admin.site.register( Center, CenterAdmin )
admin.site.register( Address, AddressAdmin )
admin.site.register( Delivery, DeliveryAdmin )
admin.site.register( Package, PackageAdmin )
| Python |
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from fedexweb.pud.models import *
def index( request ):
puds = Delivery.objects.all()
return render_to_response( 'index.html', {'puds': puds} )
def detail( request, id ):
if request.method == 'GET':
pud = get_object_or_404( Delivery, id=id )
form = DeliveryForm( instance=pud )
elif request.method == 'POST':
form = DeliveryForm( request.POST )
if form.is_valid():
form.save()
# Redirect if the form is valid
return HttpResponseRedirect( '/pud/%d/label' % id )
# Return either an empty form or a form with errors
return render_to_response( 'detail.html', {'form': form} )
def label( request, id ):
pud = get_object_or_404( Delivery, id=id )
return render_to_response( 'label.html', {'pud': pud} )
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# -*- encoding:UTF-8 -*-
from scipy import *
import bisect
import time, os, subprocess
from pylab import figure, show, rand
from matplotlib.patches import Ellipse
from matplotlib.pyplot import *
#~ from guppy import hpy # for memory profiling
#Algunas constantes
epsilon = 1E-10
RUNNING_ID = (int(time.time()) % 10000) # Un id de 4 digitos
DESTDIR = "../videos/%04d"%RUNNING_ID
VIDEO_PREFIX = "t%08.2f.png"
NUMBER_OF_ITERATIONS = 50
# For each iteration
CONF_PRINT_TEF = 1 # Future events table
CONF_PRINT_STATE_VECTOR = 1 # Sytem state vector (position, velocity, change_times)
CONF_PLOT_DISCRETE_T = 0 # Plots the system at collision times
# Between iterations
CONF_PLOT_CONTINUOUS_T = 1 # Plots the system at constant time rate (better for animation)
CONF_PLOT_TIMESTEP = 0.5
CONF_MAKE_VIDEO = 1
CONF_PLAY_VIDEO = 0
CONF_OPEN_FOLDER = 1
mycolorfunc = matplotlib.pyplot.cm.gist_rainbow
def load_vectors(filename):
"""Devuelve (N, pos, vel, radii):
N: Numero de atomos
pos: vector de posiciones
vel: vector de velocidades
radii: vector de radios
masses: atoms mass vector
"""
# Read lines as a list
fh = open(filename, "r")
lines = fh.readlines()
fh.close()
# Filtering blank lines
lines = filter(lambda x: not x.isspace(), lines)
print lines
#Inicialización del vector de átomos
N = len(lines)
nrows = N
pos = zeros((nrows,3),dtype=double)
vel = zeros((nrows,3),dtype=double)
radii = zeros((nrows,),dtype=double)
masses = zeros((nrows,), dtype=double)
for r in range(nrows):
row = lines[r].split()
if len(row) < 8:
print "Error: line#",r,"has", len(row), "columns"
exit(1)
pos[r] = row[0:3]
vel[r] = row[3:6]
radii[r] = row[6]
masses[r] = row[7]
return (N, pos, vel, radii, masses)
#Inicialización del vector de átomos
(number_of_atoms, atoms_pos, atoms_vel, atoms_radii, atoms_masses) = load_vectors("atoms.txt")
atoms_time = zeros(number_of_atoms)
container_radius = 11
all_atoms_radius = 1
future = []
class event():
#-------------------------------------------------------------------
# Métodos __especiales__
#-------------------------------------------------------------------
# in C++: constructor
def __init__(self, t=0, action=None, params=None):
self.t, self.action, self.params = t, action, params
# Representación como string
# in C++: operator <<
def __str__(self):
return 'Event id=%08x t=%g: %s(%s)'%(id(self), self.t, self.action.__name__, self.params)
# Operador de comparación para ordenación por tiempo
def __cmp__(self,other):
return sign(self.t - other.t)
#-------------------------------------------------------------------
# Métodos propios
#-------------------------------------------------------------------
def execute(self):
#~ print "Calling %s(%s)..."%(self.action.__name__, self.params)
self.action(self.t, self.params)
def estimate_collision_time_with_container(idAtom):
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
radius = atoms_radii[idAtom]
# Coeficientes de la ecuación de segundo grado
# (p.x+v.x*t)^2 +(p.y+v.y*t)^2 +(p.z+v.z*t)^2 = (container_radius - atom_radius)^2
# t^2 (v.x^2+vy^2+vz^2) + t 2(vx.px + vy.py + vz.pz) + (px^2+py^2+pz^2)-(container_radius<-all_atoms_radius)^2 = 0
# t^2 (v.v) + t (2.p.v) + (p.p-(container_radius<-all_atoms_radius)^2) = 0
# (dot = producto punto o interno o euclideo)
a = dot(v,v)
b = 2*dot(p,v)
c = dot(p, p)-(container_radius-radius)**2
det = b**2-4*a*c
if det > epsilon:
x1 = (-b + sqrt(det))/(2*a)
x2 = (-b - sqrt(det))/(2*a)
# Una raiz debe ser positiva y la otra negativa o cero
#~ assert x1*x2 < epsilon # comparación con tolerancia
delta_t = max(x1,x2)
return t + delta_t
else:
# Si la velocidad es 0 o si no está dentro del container,
# podría no chocar (caso A3)
return -1
def collide_with_container(t, params):
print "<<< Colliding with container " , params
idAtom = params[0]
p = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
last_t = atoms_time[idAtom] # Time of last trajectory change
# Nueva posición y velocidad
p = p + v*(t-last_t)
v = v - (2*dot(p,v)/dot(p,p))*p
atoms_pos[idAtom] = p
atoms_vel[idAtom] = v
atoms_time[idAtom] = t
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(t,idAtom,p[0],p[1],p[2],v[0],v[1],v[2])
# Purgar eventos
# Limpieza de eventos con id actual
for e in future:
if (idAtom in e.params and e.t > t):
print ">>> Removing ", e
# future2.remove(e)
future[:] = [x for x in future if not( idAtom in x.params and x.t > t)]
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(idAtom)
def estimate_collision_time_with_atoms(idAtom, r=range(number_of_atoms)):
# Segun Rapaport04, p392
# Se aplica la formula 14.2.2
pi_ = atoms_pos[idAtom] # Position vector
vi = atoms_vel[idAtom] # Velocity vector
ti = atoms_time[idAtom]
possible_collisions = []
for idAtom_partner in r:
if idAtom_partner==idAtom:
continue
diam = atoms_radii[idAtom]+atoms_radii[idAtom_partner]
pj_ = atoms_pos[idAtom_partner] # Position vector
vj = atoms_vel[idAtom_partner] # Velocity vector
tj = atoms_time[idAtom_partner]
tref = max(ti,tj)
pi = pi_ + vi*(tref-ti)
pj = pj_ + vj*(tref-tj)
r = pi-pj
v = vi-vj
b = dot(r,v)
det = b*b-dot(v,v)*(dot(r,r)-diam**2)
if (b<0 and det>=0): #condicion segun bibliografia necesaria
delta_t = (-b-sqrt(det))/dot(v,v)
if (delta_t>=0):
possible_collisions.append( (tref+delta_t, idAtom_partner) )
return possible_collisions
def collide_between_atoms(dest_t, atoms_pair):
print "<<< Colliding atoms...", atoms_pair
id0, id1 = atoms_pair
pp = ([atoms_pos[id0],atoms_pos[id1]]) # Position vector
vv = ([atoms_vel[id0],atoms_vel[id1]]) # Velocity vector
tt = (atoms_time[id0],atoms_time[id1]) # Time of last trajectory change
diam = atoms_radii[id0]+atoms_radii[id1]
mm = (atoms_masses[id0], atoms_masses[id1])
# Nueva posición
pp[0] += vv[0]*(dest_t - tt[0])
pp[1] += vv[1]*(dest_t - tt[1])
#~ print "@@", pp[0], pp[1]
# Nueva velocidad
r = pp[0]-pp[1]
v = vv[0]-vv[1]
b = dot(r,v)
#~ det=dot(b,b)-dot(v,v)*(dot(r,r)-diam**2)
#~ print "@@", vv[0], vv[1], v
det=b**2-dot(v,v)*(dot(r,r)-diam**2)
delta_v = (-b/diam**2)*r
vv[0] += delta_v * 2*mm[1]/(mm[0]+mm[1])
vv[1] -= delta_v * 2*mm[0]/(mm[0]+mm[1])
#~ atoms_pos[id0] = pp[0]
#~ atoms_pos[id1] = pp[1]
#~ atoms_vel[id0],atoms_vel[id1] = vv
atoms_time[id0],atoms_time[id1] = dest_t, dest_t
# Limpieza de eventos con id actual
# TODO: Considerar epsilon en la comparacion de tiempos
for e in future:
if ((id0 in e.params or id1 in e.params) and e.t > dest_t):
print ">>> Removing ", e
# future.remove(e)
future[:] = [x for x in future if not ((id0 in x.params or id1 in x.params) and x.t > dest_t)]
#~ print '\n\nAt %f, atom #%d \npos=%s\nvel=%s'%(t,idAtom,str(p),str(v))
for i in (0,1):
#~ !print '\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt[i],atoms_pair[i],pp[i][0],pp[i][1],pp[i][2],vv[i][0],vv[i][1],vv[i][2])
# Estimar tiempo de colisión con la nueva trayectoria y agendar
schedule_collision_events(atoms_pair[i])
def schedule_collision_events(idAtom):
t = estimate_collision_time_with_container(idAtom)
if t > 0:
e = event(t, collide_with_container, (idAtom,))
print "%%% Scheduling ", e
bisect.insort(future, e)
collisions = estimate_collision_time_with_atoms(idAtom)
for (t,idPartner) in collisions:
e = event(t, collide_between_atoms,(idAtom, idPartner) )
print "%%% Scheduling ", e
bisect.insort(future, e)
def print_header():
print '#Iter\t Time Atom Position Velocity '
print '=====\t======== ==== ==================== ===================='
def print_header2():
print '#Iter\t Time Atom Position Velocity Energy'
print '=====\t======== ==== ==================== ==================== ==========='
def plot_state(instant_t):
fig = figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim(-12, 12)
ax.set_ylim(-12, 12)
container = Ellipse(xy=(0,0), width=22, height=22)
container.set_alpha(0.1)
ax.add_artist(container)
for idAtom in range(number_of_atoms):
pp = atoms_pos[idAtom] # Position vector
v = atoms_vel[idAtom] # Velocity vector
t = atoms_time[idAtom] # Change time vector
p = pp +v*(instant_t-t)
d = 2*atoms_radii[idAtom] # Diameter
ax.add_artist(Ellipse(xy=p[0:2], width=d, height=d, color=mycolorfunc(1.0*idAtom/number_of_atoms)))
fig.savefig(DESTDIR+"/"+VIDEO_PREFIX%instant_t)
close(fig)
if __name__ == "__main__":
#~ DESTDIR = "video/%04d"%RUNNING_ID
os.makedirs(DESTDIR)
# Se cargan todos los átomos con sus estimaciones iniciales
for idAtom in range(number_of_atoms):
t = estimate_collision_time_with_container(idAtom)
if t >= 0:
e = event(t, collide_with_container, (idAtom,))
bisect.insort(future, e)
#~ future_events.append(e)
for idAtom in range(number_of_atoms):
collisions = estimate_collision_time_with_atoms(idAtom,range(idAtom+1,number_of_atoms))
for (t,idPartner) in collisions:
bisect.insort(future, event(t, collide_between_atoms,(idAtom, idPartner) ))
# Se ejecuta el scheduler
t = 0
plot_state(t)
for i in range(NUMBER_OF_ITERATIONS):
if CONF_PRINT_STATE_VECTOR:
print "========================================"
print_header2()
tot_energy = 0.0
for ii in range(number_of_atoms):
pp = atoms_pos[ii] # Position vector
vv = atoms_vel[ii] # Velocity vector
tt = atoms_time[ii] # Time of last trajectory change
mm = atoms_masses[ii]
energy = mm*dot(vv,vv)
tot_energy += energy
print '=\t%8.3f %4d %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f'%(tt,ii,pp[0],pp[1],pp[2],vv[0],vv[1],vv[2],energy)
print "======================================== Total Energy = ", tot_energy
if CONF_PRINT_TEF:
print "==== Future events table ========"
for e in future:
print "tef>", e
e = future.pop(0)
print '<*> Event OrderNo #%d, id=%08x, at time=%f'%(i, id(e), e.t)
if CONF_PLOT_CONTINUOUS_T:
while t < e.t:
plot_state(t)
t += CONF_PLOT_TIMESTEP
if CONF_PLOT_DISCRETE_T:
plot_state(e.t)
e.execute()
if CONF_MAKE_VIDEO:
print
os.chdir(DESTDIR)
subprocess.Popen(["mencoder", "-o", "%04d.avi"%RUNNING_ID, "mf://*.png", "-ovc", "lavc"])
if CONF_PLAY_VIDEO:
subprocess.Popen(["mplayer", "-loop", "0", "%04d.avi"%RUNNING_ID])
if CONF_OPEN_FOLDER:
subprocess.Popen(["nautilus", "."])
#~ print "===Memory usage==="
#~ print hpy().heap() # Show current reachable heap
| Python |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright 2012 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
createOSXinstallPkg
Created by Greg Neagle on 2012-07-16.
"""
import sys
import os
import optparse
import plistlib
import shutil
import subprocess
import tempfile
from xml.dom import minidom
from xml.parsers.expat import ExpatError
DEBUG = False
DEFAULT_INSTALLKBYTES = 8 * 1024 * 1024
def cleanUp():
'''Cleanup our TMPDIR'''
if TMPDIR:
shutil.rmtree(TMPDIR, ignore_errors=True)
def fail(errmsg=''):
'''Print any error message to stderr,
clean up install data, and exit'''
if errmsg:
print >> sys.stderr, errmsg
cleanUp()
# exit
exit(1)
def cleanupPackage(output_pkg_path):
'''Attempt to clean up our output package
if we fail during creation'''
if not DEBUG:
try:
shutil.rmtree(output_pkg_path)
except EnvironmentError:
pass
# dmg helpers
def mountdmg(dmgpath, use_shadow=False):
"""
Attempts to mount the dmg at dmgpath
and returns a list of mountpoints
If use_shadow is true, mount image with shadow file
"""
mountpoints = []
dmgname = os.path.basename(dmgpath)
cmd = ['/usr/bin/hdiutil', 'attach', dmgpath,
'-mountRandom', TMPDIR, '-nobrowse', '-plist',
'-owners', 'on']
if use_shadow:
shadowname = dmgname + '.shadow'
shadowpath = os.path.join(TMPDIR, shadowname)
cmd.extend(['-shadow', shadowpath])
else:
shadowpath = None
proc = subprocess.Popen(cmd, bufsize=-1,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(pliststr, err) = proc.communicate()
if proc.returncode:
print >> sys.stderr, 'Error: "%s" while mounting %s.' % (err, dmgname)
if pliststr:
plist = plistlib.readPlistFromString(pliststr)
for entity in plist['system-entities']:
if 'mount-point' in entity:
mountpoints.append(entity['mount-point'])
return mountpoints, shadowpath
def unmountdmg(mountpoint):
"""
Unmounts the dmg at mountpoint
"""
proc = subprocess.Popen(['/usr/bin/hdiutil', 'detach', mountpoint],
bufsize=-1, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(unused_output, err) = proc.communicate()
if proc.returncode:
print >> sys.stderr, 'Polite unmount failed: %s' % err
print >> sys.stderr, 'Attempting to force unmount %s' % mountpoint
# try forcing the unmount
retcode = subprocess.call(['/usr/bin/hdiutil', 'detach', mountpoint,
'-force'])
if retcode:
print >> sys.stderr, 'Failed to unmount %s' % mountpoint
def makePkgDirs(pkgpath):
'''Makes the package directories for a package with
full pathname "pkgpath"'''
if os.path.exists(pkgpath):
fail('Package exists at %s' % pkgpath)
try:
os.makedirs(os.path.join(pkgpath, 'Contents/Resources/en.lproj'))
os.makedirs(os.path.join(pkgpath,
'Contents/Resources/Mac OS X Install Data'))
except OSError, err:
fail('Error creating package directories at %s: %s'
% (pkgpath, err))
def makeDescriptionPlist(output_pkg_path, os_version='10.7', build_number=None):
'''Writes a Resources/en.lproj/Description.plist
for the OS to be installed'''
pkg_description = {}
title = 'Mac OS X'
description = 'Unattended custom install of Mac OS X'
if os_version.startswith('10.7'):
title = 'Mac OS X Lion'
description = 'Unattended custom install of Mac OS X Lion'
elif os_version.startswith('10.8'):
title = 'OS X Mountain Lion'
description = 'Unattended custom install of OS X Mountain Lion.'
description += ' version %s' % os_version
if build_number:
description += ' build %s' % build_number
pkg_description['IFPkgDescriptionDescription'] = description
pkg_description['IFPkgDescriptionTitle'] = title
output_file = os.path.join(output_pkg_path,
'Contents/Resources/en.lproj/Description.plist')
try:
plistlib.writePlist(pkg_description, output_file)
except (OSError, ExpatError), err:
cleanupPackage(output_pkg_path)
fail('Error creating file at %s: %s'
% (output_file, err))
def makeInfoPlist(output_pkg_path, os_version='10.7', build_number=None,
pkg_id=None, installKBytes=DEFAULT_INSTALLKBYTES):
'''Creates Contents/Info.plist for package'''
if not pkg_id:
if os_version.startswith('10.7'):
pkg_id = 'com.googlecode.munki.installlion.pkg'
if os_version.startswith('10.8'):
pkg_id = 'com.googlecode.munki.installmountainlion.pkg'
else:
pkg_id = 'com.googlecode.munki.installosx.pkg'
info = {
'CFBundleIdentifier': pkg_id,
'CFBundleShortVersionString': str(os_version),
'IFMajorVersion': 1,
'IFMinorVersion': 0,
'IFPkgFlagDefaultLocation': '/tmp',
'IFPkgFlagFollowLinks': True,
'IFPkgFlagAuthorizationAction': 'RootAuthorization',
'IFPkgFlagInstallFat': False,
'IFPkgFlagInstalledSize': int(installKBytes),
'IFPkgFlagIsRequired': False,
'IFPkgFlagRestartAction': 'RequiredRestart',
'IFPkgFlagRootVolumeOnly': False,
'IFPkgFormatVersion': 0.10000000149011612
}
if build_number:
info['CFBundleGetInfoString'] = (
'%s Build %s' % (os_version, build_number))
output_file = os.path.join(output_pkg_path, 'Contents/Info.plist')
try:
plistlib.writePlist(info, output_file)
except (OSError, ExpatError), err:
cleanupPackage(output_pkg_path)
fail('Error creating file at %s: %s'
% (output_file, err))
def writefile(stringdata, path):
'''Writes string data to path.'''
fileobject = open(path, mode='w', buffering=1)
print >> fileobject, stringdata
fileobject.close()
def writePkgInfo(output_pkg_path):
'''Creates Contents/PkgInfo file'''
output_file = os.path.join(output_pkg_path, 'Contents/PkgInfo')
try:
writefile('pkmkrpkg1', output_file)
except (OSError, IOError), err:
cleanupPackage(output_pkg_path)
fail('Error creating file at %s: %s'
% (output_file, err))
def write_package_version(output_pkg_path):
'''Creates Contents/Resources/package_version file'''
output_file = os.path.join(output_pkg_path,
'Contents/Resources/package_version')
try:
writefile('major: 1\nminor: 0', output_file)
except (OSError, IOError), err:
cleanupPackage(output_pkg_path)
fail('Error creating file at %s: %s'
% (output_file, err))
def makeArchiveAndBom(output_pkg_path):
'''Creates an empty Archive.pax.gz and Archive.bom file'''
emptydir = os.path.join(TMPDIR, 'EmptyDir')
if os.path.exists(emptydir):
try:
os.rmdir(emptydir)
except OSError, err:
print >> sys.stderr, ('Existing dir at %s' % emptydir)
exit(1)
try:
os.mkdir(emptydir)
except OSError, err:
cleanupPackage(output_pkg_path)
fail('Can\'t create dir at %s' % emptydir)
# Make an Archive.pax.gz of the contents of the empty directory
archiveName = os.path.join(output_pkg_path, 'Contents/Archive.pax')
# record our current working dir
cwd = os.getcwd()
# change into our EmptyDir so we can use pax to archive the
# (non-existent) contents
os.chdir(emptydir)
try:
subprocess.check_call(
['/bin/pax', '-w', '-x', 'cpio', '-f', archiveName, '.'])
except subprocess.CalledProcessError, err:
cleanupPackage(output_pkg_path)
fail('Can\'t create archive at %s: %s'
% (archiveName, err))
# change working dir back
os.chdir(cwd)
try:
subprocess.check_call(['/usr/bin/gzip', archiveName])
except subprocess.CalledProcessError, err:
cleanupPackage(output_pkg_path)
fail('Can\'t gzip archive at %s: %s'
% (archiveName, err))
# now make a BOM file
bomName = os.path.join(output_pkg_path, 'Contents/Archive.bom')
try:
subprocess.check_call(['/usr/bin/mkbom', emptydir, bomName])
except subprocess.CalledProcessError, err:
cleanupPackage(output_pkg_path)
fail('Can\'t make BOM file at %s: %s'
% (bomName, err))
try:
os.rmdir(emptydir)
except OSError:
pass
def getOSversionInfoFromDist(distfile):
'''Gets osVersion and osBuildVersion if present in
dist file for OSXInstall.mpkg'''
try:
dom = minidom.parse(distfile)
except ExpatError, err:
print >> sys.stderr, 'Error parsing %s: %s' % (distfile, err)
return None, None
osVersion = None
osBuildVersion = None
elements = dom.getElementsByTagName('options')
if len(elements):
options = elements[0]
if 'osVersion' in options.attributes.keys():
osVersion = options.attributes['osVersion'].value
if 'osBuildVersion' in options.attributes.keys():
osBuildVersion = options.attributes['osBuildVersion'].value
return osVersion, osBuildVersion
def getItemsFromDist(filename):
'''Gets the title, script, installation-check and volume-check elements
from an OSXInstall.mpkg distribution file'''
try:
dom = minidom.parse(filename)
except ExpatError:
print >> sys.stderr, 'Error parsing %s' % filename
return None
item_dict = {'title': '',
'script': '',
'installation_check': '',
'volume_check': ''}
title_elements = dom.getElementsByTagName('title')
if len(title_elements):
item_dict['title'] = title_elements[0].firstChild.wholeText
script_elements = dom.getElementsByTagName('script')
if len(script_elements):
item_dict['script'] = script_elements[0].toprettyxml()
installation_check_elements = dom.getElementsByTagName('installation-check')
if len(installation_check_elements):
item_dict['installation_check'] = \
installation_check_elements[0].toprettyxml()
volume_check_elements = dom.getElementsByTagName('volume-check')
if len(volume_check_elements):
item_dict['volume_check'] = volume_check_elements[0].toprettyxml()
return item_dict
def make_distribution(output_pkg_path, source_pkg_dist,
installKBytes=DEFAULT_INSTALLKBYTES):
'''Makes a distribution file for the target package based on one
from OSInstall.mpkg in the app or InstallESD.dmg'''
item_dict = getItemsFromDist(source_pkg_dist)
# disable the check for command line installs
item_dict['script'] = item_dict['script'].replace(
'system.env.COMMAND_LINE_INSTALL',
'system.env.COMMAND_LINE_INSTALL_DISABLED')
dist_header = ('<?xml version="1.0" encoding="utf-8"?>\n'
'<installer-script minSpecVersion="1.000000">\n')
dist_title = ' <title>%s</title>' % item_dict.get('title', 'Mac OS X')
dist_options = '''
<options customize="never" allow-external-scripts="yes" rootVolumeOnly="false"/>'
'''
dist_choices_outline = '''
<choices-outline>
<line choice='manual'/>
</choices-outline>
'''
dist_choice_id = '''
<choice id='manual'>
<pkg-ref id='manual' auth='Root'>.</pkg-ref>
</choice>
'''
dist_pkg_ref = '''
<pkg-ref id='manual' installKBytes='%s' onConclusion='RequireRestart' version='1.0'/>''' % installKBytes
dist_footer = '\n</installer-script>'
dist = dist_header + dist_title + dist_options
dist += item_dict['script']
dist += '\n ' + item_dict['installation_check']
dist += '\n ' + item_dict['volume_check']
dist += dist_choices_outline + dist_choice_id + dist_pkg_ref + dist_footer
output_file = os.path.join(output_pkg_path, 'Contents/distribution.dist')
try:
writefile(dist, output_file)
except (OSError, IOError), err:
cleanupPackage(output_pkg_path)
fail('Error creating file at %s: %s'
% (output_file, err))
def copyLocalizedResources(pkgpath, source_pkg_resources):
'''Copies Resources/English.lprog/*.strings to
Contents/Resources/en.lproj of target package so InstallCheck
and VolumeCheck scripts can display meaningful error messages'''
source_dir = os.path.join(source_pkg_resources, 'English.lproj')
dest_dir = os.path.join(pkgpath, 'Contents/Resources/en.lproj')
if os.path.isdir(source_dir) and os.path.isdir(dest_dir):
for item in os.listdir(source_dir):
if item.endswith('.strings'):
itempath = os.path.join(source_dir, item)
try:
shutil.copy(itempath, dest_dir)
except IOError:
# not fatal, but warn anyway.
print >> sys.stderr, (
'Could not copy %s to %s'% (itempath, dest_dir))
def copy_postflight_script(output_pkg_path):
'''Copies the postflight script into the output package'''
destination = os.path.join(output_pkg_path, 'Contents/Resources/postflight')
mydir = os.path.dirname(os.path.abspath(__file__))
postflight_script_name = 'installosxpkg_postflight'
locations = [os.path.join(mydir, postflight_script_name),
os.path.join(mydir, 'Resources', postflight_script_name)]
for location in locations:
if os.path.exists(location):
try:
shutil.copy(location, destination)
# make sure it's executable
subprocess.check_call(
['/bin/chmod', 'a+x', destination])
return True
except (OSError, subprocess.CalledProcessError), err:
cleanupPackage(output_pkg_path)
fail('Error with postflight script: %s' % err)
# if we get here, we couldn't find the postflight script.
cleanupPackage(output_pkg_path)
fail('Could not find postflight script.')
def makePackage(output_pkg_path, expanded_osinstall_mpkg,
os_version, build_number, pkg_id=None):
'''Makes our output package'''
makePkgDirs(output_pkg_path)
makeArchiveAndBom(output_pkg_path)
makeInfoPlist(output_pkg_path, os_version, build_number,
pkg_id=pkg_id)
writePkgInfo(output_pkg_path)
makeDescriptionPlist(output_pkg_path, os_version=os_version,
build_number=build_number)
write_package_version(output_pkg_path)
copy_postflight_script(output_pkg_path)
# copy some items from OSInstall.mpkg
dist_file = os.path.join(expanded_osinstall_mpkg, 'Distribution')
make_distribution(output_pkg_path, dist_file, installKBytes=8*1024*1024)
source_pkg_resources = os.path.join(expanded_osinstall_mpkg, 'Resources')
copyLocalizedResources(output_pkg_path, source_pkg_resources)
def get_dir_size(some_dir):
'''Gets the total size of some_dir. Very helpful in determining
the size of bundle packages.'''
total_size = 0
for dirpath, unused_dirnames, filenames in os.walk(some_dir):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
total_size += os.path.getsize(filepath)
return total_size
def get_size_of_all_packages(pkglist):
'''Gets the total size of all the extra packages'''
total_pkg_size = 0
for item in pkglist:
if os.path.isdir(item):
total_pkg_size += get_dir_size(item)
else:
total_pkg_size += os.path.getsize(item)
return total_pkg_size/1024
def get_available_free_space_in_dmg(some_dmg):
'''Returns free disk space on some_dmg in Kbytes'''
(mountpoints, unused_shadowpath) = mountdmg(some_dmg)
if mountpoints:
stat = os.statvfs(mountpoints[0])
free = stat.f_bavail * stat.f_frsize
unmountdmg(mountpoints[0])
return int(free/1024)
else:
return -1
def expandOSInstallMpkg(osinstall_mpkg):
'''Expands the flat OSInstall.mpkg We need the Distribution file
and some .strings files from within. Returns path to the exapnded
package.'''
expanded_osinstall_mpkg = os.path.join(TMPDIR, 'OSInstall_mpkg')
cmd = ['/usr/sbin/pkgutil', '--expand', osinstall_mpkg,
expanded_osinstall_mpkg]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
fail('Failed to expand %s' % osinstall_mpkg)
return expanded_osinstall_mpkg
def downloadURL(URL, to_file=None):
'''Downloads URL to the current directory or as string'''
cmd = ['/usr/bin/curl', '--silent', '--show-error', '--url', URL]
if to_file:
cmd.extend(['-o', to_file])
proc = subprocess.Popen(cmd, shell=False, bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output, err) = proc.communicate()
if proc.returncode:
print >> sys.stderr, 'Error %s retrieving %s' % (proc.returncode, URL)
print >> sys.stderr, err
return None
if to_file:
return to_file
else:
return output
def findIncompatibleAppListPkgURL(catalog_url, package_name):
'''Searches SU catalog to find a download URL for
package_name. If there's more than one, returns the
one with the most recent PostDate.'''
def sort_by_PostDate(a, b):
"""Internal comparison function for use with sorting"""
return cmp(b['PostDate'], a['PostDate'])
catalog_str = downloadURL(catalog_url)
try:
catalog = plistlib.readPlistFromString(catalog_str)
except ExpatError:
print >> sys.stderr, 'Could not parse catalog!'
return None
product_list = []
if 'Products' in catalog:
for product_key in catalog['Products'].keys():
product = catalog['Products'][product_key]
for package in product.get('Packages', []):
url = package.get('URL','')
if url.endswith(package_name):
product_list.append({'PostDate': product['PostDate'],
'URL': url})
if product_list:
product_list.sort(sort_by_PostDate)
return product_list[0]['URL']
return None
def getPkgAndMakeIndexSproduct(destpath, os_vers='10.7'):
'''Gets IncompatibleAppList package and creates index.sproduct'''
LION_PKGNAME = 'MacOS_10_7_IncompatibleAppList.pkg'
LION_CATALOG_URL = ('http://swscan.apple.com/content/catalogs/others/'
'index-lion-snowleopard-leopard.merged-1.sucatalog')
MTN_LION_PKGNAME = 'OSX_10_8_IncompatibleAppList.pkg'
MTN_LION_CATALOG_URL = ('https://swscan.apple.com/content/catalogs/others/'
'index-mountainlion-lion-snowleopard-leopard'
'.merged-1.sucatalog')
if os_vers.startswith('10.7'):
catalog_url = LION_CATALOG_URL
package_name = LION_PKGNAME
os_vers = '10.7'
elif os_vers.startswith('10.8'):
catalog_url = MTN_LION_CATALOG_URL
package_name = MTN_LION_PKGNAME
os_vers = '10.8'
else:
print >> sys.stderr, 'Unsupported OS version!'
return
destpath = os.path.abspath(destpath)
if not os.path.isdir(destpath):
print >> sys.stderr, 'Directory %s doesn\'t exist!' % destpath
return
url = findIncompatibleAppListPkgURL(catalog_url, package_name)
if url:
package_path = os.path.join(destpath, package_name)
print 'Downloading %s to %s...' % (url, package_path)
package_path = downloadURL(url, to_file=package_path)
if package_path and os.path.exists(package_path):
# make index.sproduct
pkg_info = {}
pkg_info['Identifier'] = 'com.apple.pkg.CompatibilityUpdate'
pkg_info['Size'] = int(os.path.getsize(package_path))
pkg_info['URL'] = package_name
#pkg_info['Version'] = os_vers
# nope. Version is 10.7 even for ML (!)
pkg_info['Version'] = '10.7'
index_dict = {}
index_dict['Packages'] = [pkg_info]
plist_path = os.path.join(destpath, 'index.sproduct')
print "Writing index.sproduct to %s..." % plist_path
try:
plistlib.writePlist(index_dict, plist_path)
except OSError, err:
print >> sys.stderr, 'Write error: %s' % err
else:
print >> sys.stderr, 'Couldn\'t download %s' % url
else:
print >> sys.stderr, 'Couldn\'t find IncompatibleAppList package.'
def makeEmptyInstallerChoiceChanges(output_pkg_path):
'''Creates an empty MacOSXInstaller.choiceChanges file'''
destpath = os.path.join(output_pkg_path,
'Contents/Resources/Mac OS X Install Data',
'MacOSXInstaller.choiceChanges')
changes = []
try:
plistlib.writePlist(changes, destpath)
except OSError, err:
print >> sys.stderr, 'Error writing %s: %s' % (destpath, err)
class AddPackageError(Exception):
'''Errors generated by addPackagesToInstallESD'''
pass
def addPackagesToInstallESD(installesd_dmg, packages, output_dmg_path):
'''Adds additional packages to the InstallESD.dmg and creates an
OSInstall.collection file for use by the installer. New dmg is
created at output_dmg_path'''
# generate OSInstall.collection pkg_array
# array needs OSInstall.mpkg twice at the beginning
# no idea why
pkg_array = ['/System/Installation/Packages/OSInstall.mpkg',
'/System/Installation/Packages/OSInstall.mpkg']
for pkg in packages:
pkgname = os.path.basename(pkg)
pkg_path = os.path.join('/System/Installation/Packages', pkgname)
pkg_array.append(pkg_path)
# mount InstallESD.dmg with shadow
print 'Mounting %s...' % installesd_dmg
mountpoints, shadowpath = mountdmg(installesd_dmg, use_shadow=True)
if not mountpoints:
raise AddPackageError('Nothing mounted from InstallESD.dmg')
# copy additional packages to Packages directory
mountpoint = mountpoints[0]
packages_dir = os.path.join(mountpoint, 'Packages')
print 'Copying additional packages to InstallESD/Packages/:'
try:
for pkg in packages:
if os.path.isdir(pkg):
destination = os.path.join(packages_dir, os.path.basename(pkg))
print ' Copying bundle package %s' % pkg
shutil.copytree(pkg, destination)
else:
print ' Copying flat package %s' % pkg
shutil.copy(pkg, packages_dir)
except IOError, err:
unmountdmg(mountpoint)
raise AddPackageError('Error %s copying packages to disk image' % err)
# create OSInstall.collection in Packages directory
osinstall_collection_path = os.path.join(
packages_dir, 'OSInstall.collection')
print "Creating %s" % osinstall_collection_path
try:
plistlib.writePlist(pkg_array, osinstall_collection_path)
except ExpatError:
raise AddPackageError('Error %s creating OSInstall.collection' % err)
# unmount InstallESD.dmg
print 'Unmounting %s...' % installesd_dmg
unmountdmg(mountpoint)
# convert InstallESD.dmg + shadow to UDZO image
print 'Creating disk image at %s...' % output_dmg_path
cmd = ['/usr/bin/hdiutil', 'convert', '-format', 'UDZO',
'-o', output_dmg_path, installesd_dmg, '-shadow', shadowpath]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError, err:
raise AddPackageError(
'Failed to create %s at: %s' % (output_dmg_path, err))
TMPDIR = None
def main():
'''Builds a custom package that installs OS X. You may specify additional
packages to install after the OS is installed'''
global TMPDIR
usage = ('Usage: %prog --source InstallOSX.app|InstallESD.dmg\n'
' [--pkg path/to/additional.pkg]\n'
' [--output path/to/InstallOSX.pkg]\n'
' [--identifier com.example.installosx.pkg]\n'
' [--plist path/to/config.plist]\n\n'
' %prog creates a customized Lion or Mountain Lion\n'
' installation package containing the contents of the original\n'
' InstallESD.dmg plus any additional packages provided. Additional\n'
' packages will be installed in the order you provide them at the\n'
' command-line.')
parser = optparse.OptionParser(usage=usage)
parser.add_option('--source', '-s',
help='Required unless specified via plist. Path to Install Mac '
'OS X Lion.app or Install OS X Mountain Lion.app or InstallESD.dmg')
parser.add_option('--pkg', '-p', action="append", dest='packages',
metavar='PACKAGE',
help='Optional. An addtional package to include for installation. '
'May be specified more than once.')
parser.add_option('--output', '-o', help='Optional. Path for output pkg. '
'Defaults to current working directory.')
parser.add_option('--identifier', '--id',
help='Optional. Package identifier for the package. Defaults to '
'"com.googlecode.munki.install(mountain)lion.pkg"')
parser.add_option('--plist', help='Optional. Path to an XML plist file '
'containing key/value pairs for Source, Output, Packages, and '
'Identifier.')
options, arguments = parser.parse_args()
# check to see if we're root
# need to be root to copy things into the DMG with the right
# ownership and permissions
if os.geteuid() != 0:
print >> sys.stderr, 'You must run this as root, or via sudo!'
exit(-1)
plist_options = {}
if options.plist:
try:
plist_options = plistlib.readPlist(options.plist)
except (ExpatError, IOError), err:
fail('Could not read %s: %s' % (options.plist, err))
if not options.source and not 'Source' in plist_options:
print >> sys.stderr, ('ERROR: Must have --source option!')
parser.print_usage()
exit(1)
TMPDIR = tempfile.mkdtemp(dir='/tmp')
source = options.source or plist_options.get('Source')
source = source.rstrip('/')
if source.endswith('.app'):
if not os.path.isdir(source):
fail('%s doesn\'t exist or isn\'t an app!' % source)
installesd_dmg = os.path.join(
source, 'Contents/SharedSupport/InstallESD.dmg')
osinstall_mpkg = os.path.join(
source, 'Contents/SharedSupport/OSInstall.mpkg')
if (not os.path.exists(installesd_dmg) or
not os.path.exists(osinstall_mpkg)):
fail('%s doesn\'t appear to be an OS X installer application!'
% source)
elif source.endswith('.dmg'):
installesd_dmg = source
if not os.path.exists(installesd_dmg):
fail('%s doesn\'t exist!' % installesd_dmg)
else:
fail('Unknown/unsupported source: %s' % source)
# get some needed info from the disk image
print 'Verifying source...'
mountpoints, unused_shadowpath = mountdmg(installesd_dmg)
if not mountpoints:
fail('Could not mount diskimage %s' % installesd_dmg)
mountpoint = mountpoints[0]
osinstall_mpkg = os.path.join(mountpoint, 'Packages/OSInstall.mpkg')
if not os.path.exists(osinstall_mpkg):
unmountdmg(mountpoint)
fail('Missing OSInstall.mpkg in %s'% source)
expanded_osinstall_mpkg = expandOSInstallMpkg(osinstall_mpkg)
distfile = os.path.join(expanded_osinstall_mpkg, 'Distribution')
system_version_plist = os.path.join(
mountpoint, 'System/Library/CoreServices/SystemVersion.plist')
try:
version_info = plistlib.readPlist(system_version_plist)
except (ExpatError, IOError), err:
unmountdmg(mountpoint)
fail('Could not read %s: %s' % (system_version_plist, err))
unmountdmg(mountpoint)
os_version = version_info.get('ProductUserVisibleVersion')
build_number = version_info.get('ProductBuildVersion')
if os_version is None or build_number is None:
fail('Missing OS version or build info in %s' % system_version_plist)
# Things we have now that we need:
# installesd_dmg: path to the InstallESD.dmg file
# os_version: string like '10.7.4'
# build_number: string like '11E53'
# expanded_osinstall_mpkg: path to unflattened OSInstall.mpkg
# distfile: path to Distribution file in OSInstall.mpkg
print '----------------------------------------------------------------'
print 'InstallESD.dmg: %s' % installesd_dmg
print 'OS Version: %s' % os_version
print 'OS Build: %s' % build_number
if DEBUG:
print 'expanded_osinstall_mpkg: %s' % expanded_osinstall_mpkg
print 'distfile: %s' % distfile
print '----------------------------------------------------------------'
# Figure out where we will be writing this...
custom_tag = ''
additional_packages = options.packages or plist_options.get('Packages')
if additional_packages:
custom_tag = '_custom'
# get rid of trailing slashes which often result from dragging
# and dropping from the Finder into the Terminal
additional_packages = [item.rstrip('/') for item in additional_packages]
pkgname = 'InstallOSX_%s_%s%s.pkg' % (os_version, build_number, custom_tag)
output_pkg_path = os.path.abspath(os.path.join('.', pkgname))
output = options.output or plist_options.get('Output')
if output:
if output.endswith('.pkg'):
# we've been given a full path including the package name
output_pkg_path = os.path.abspath(output)
else:
# it better be a pre-existing directory
if not os.path.isdir(output):
fail(
'Directory %s not found!' % output)
else:
output_pkg_path = os.path.abspath(
os.path.join(output, pkgname))
if os.path.exists(output_pkg_path):
fail('%s already exists!' % output_pkg_path)
# now we have an output path
print 'Output package path: %s' % output_pkg_path
if additional_packages:
# make sure they all exist and look like packages
print 'Additional packages:'
print '----------------------------------------------------------------'
for pkg in additional_packages:
if (not pkg.endswith('.pkg') and not pkg.endswith('.mpkg')):
fail('%s doesn\'t look like a package!' % pkg)
if not os.path.exists(pkg):
fail('Package %s not found!' % pkg)
print os.path.basename(pkg)
print '----------------------------------------------------------------'
total_package_size = get_size_of_all_packages(additional_packages)
print 'Total additional package size: %s Kbytes' % total_package_size
print '----------------------------------------------------------------'
# make sure we have enough space on DMG
print 'Checking available space on %s...' % installesd_dmg
available_disk_space = get_available_free_space_in_dmg(installesd_dmg)
if available_disk_space == -1:
fail('Could not mount disk image %s' % installesd_dmg)
if total_package_size + 100 > available_disk_space:
print >> sys.stderr, (
'Not enough space to copy all packages to InstallESD.dmg!')
print >> sys.stderr, (
'Available space: %s KBytes' % available_disk_space)
fail()
pkg_id = (options.identifier or plist_options.get('Identifier'))
# we have everything we need.
print 'Creating package wrapper...'
makePackage(output_pkg_path, expanded_osinstall_mpkg,
os_version, build_number, pkg_id=pkg_id)
# if we have any failures after this point, we should be sure to
# clean up the broken package as well as our TMPDIR
print 'Creating MacOSXInstaller.choiceChanges...'
makeEmptyInstallerChoiceChanges(output_pkg_path)
print '----------------------------------------------------------------'
print 'Downloading and adding IncompatibleAppList pkg...'
destpath = os.path.join(output_pkg_path,
'Contents/Resources/Mac OS X Install Data')
getPkgAndMakeIndexSproduct(destpath, os_vers=os_version)
print '----------------------------------------------------------------'
print 'Copying InstallESD into package...'
output_dmg_path = os.path.join(output_pkg_path,
'Contents/Resources/InstallESD.dmg')
if additional_packages:
try:
addPackagesToInstallESD(
installesd_dmg, additional_packages, output_dmg_path)
except AddPackageError, err:
cleanupPackage(output_pkg_path)
fail(err)
else:
try:
shutil.copy(installesd_dmg, output_dmg_path)
except OSError, err:
cleanupPackage(output_pkg_path)
fail('Copy error: %e' % err)
print '----------------------------------------------------------------'
print 'Done! Completed package at: %s' % output_pkg_path
cleanUp()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# encoding: utf-8
#
# Copyright 2011 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
InstallLion.pkg postflight script
Created 01 Sep 2011 by Greg Neagle
Updated July 2012 for Mountain Lion and FileVault-protected volumes
Updated December 2012 for other CoreStorage volumes (like Fusion disks)
Sets up a Lion/Mountain Lion install.
This is intended to be run as a package postflight script.'''
import datetime
import os
import plistlib
import shutil
import subprocess
import sys
import tempfile
import urllib2
from xml.parsers.expat import ExpatError
INSTALL_DATA_DIR_NAME = 'Mac OS X Install Data'
ENCODED_INSTALL_DATA_DIR_NAME = urllib2.quote(INSTALL_DATA_DIR_NAME)
def cleanupFromFailAndExit(errmsg=''):
'''Print any error message to stderr,
clean up install data, and exit'''
if errmsg:
print >> sys.stderr, errmsg
# clean up our install data if it exists
installvolumepath = sys.argv[3]
install_data_path = os.path.join(installvolumepath, INSTALL_DATA_DIR_NAME)
if os.path.exists(install_data_path):
shutil.rmtree(install_data_path, ignore_errors=True)
exit(1)
# dmg helpers
def mountdmg(dmgpath, use_shadow=False):
"""
Attempts to mount the dmg at dmgpath
and returns a list of mountpoints
If use_shadow is true, mount image with shadow file
"""
mountpoints = []
dmgname = os.path.basename(dmgpath)
cmd = ['/usr/bin/hdiutil', 'attach', dmgpath,
'-mountRandom', '/tmp', '-nobrowse', '-plist']
if use_shadow:
cmd.append('-shadow')
proc = subprocess.Popen(cmd,
bufsize=-1, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(pliststr, err) = proc.communicate()
if proc.returncode:
print >> sys.stderr, (
'Error: "%s" while mounting %s.' % (str(err).rstrip(), dmgname))
if pliststr:
try:
plist = plistlib.readPlistFromString(pliststr)
for entity in plist.get('system-entities', []):
if 'mount-point' in entity:
mountpoints.append(entity['mount-point'])
except ExpatError:
print >> sys.stderr, (
'Bad plist string returned when mounting diskimage %s:\n%s'
% (dmgname, pliststr))
return mountpoints
def unmountdmg(mountpoint):
"""
Unmounts the dmg at mountpoint
"""
proc = subprocess.Popen(['/usr/bin/hdiutil', 'detach', mountpoint],
bufsize=-1, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(unused_output, err) = proc.communicate()
if proc.returncode:
print >> sys.stderr, 'Polite unmount failed: %s' % err
print >> sys.stderr, 'Attempting to force unmount %s' % mountpoint
# try forcing the unmount
retcode = subprocess.call(['/usr/bin/hdiutil', 'detach', mountpoint,
'-force'])
if retcode:
print >> sys.stderr, 'Failed to unmount %s' % mountpoint
class Error(Exception):
'''Exceptions specific to this script'''
pass
class CmdError(Error):
'''Error code returned from command'''
pass
class PlistParseError(Error):
'''Plist parsing error'''
pass
def getPlistFromCmd(cmd):
'''Executes cmd, returns a plist from the output'''
proc = subprocess.Popen(
cmd, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output, error) = proc.communicate()
if proc.returncode:
raise CmdError((proc.returncode, error))
try:
return plistlib.readPlistFromString(output)
except ExpatError:
raise PlistParseError(output)
def getVolumeInfo(disk_id):
'''Gets info from diskutil about disk_id and returns a dict
disk_id can be MountPoint, DiskIdentifier, DeviceNode, UUID'''
try:
return getPlistFromCmd(
['/usr/sbin/diskutil', 'info', '-plist', disk_id])
except Error:
return None
def findPhysicalVolumeDeviceIdentifiers(volumepath):
'''Given the mountpath, deviceid or UUID of a CoreStorage volume,
return the deviceids of the physical devices'''
try:
cs_vol_info = getPlistFromCmd(
['/usr/sbin/diskutil', 'cs', 'info', '-plist', volumepath])
except CmdError:
# diskutil cs info returns error if volume is not CoreStorage
return []
logical_volume_group_uuid = cs_vol_info[
'MemberOfCoreStorageLogicalVolumeGroup']
try:
cs_list = getPlistFromCmd(
['/usr/sbin/diskutil', 'cs', 'list', '-plist'])
except CmdError:
# diskutil cs info returns error if not CoreStorage volumes
return []
for volume_group in cs_list.get('CoreStorageLogicalVolumeGroups', []):
if volume_group.get('CoreStorageUUID') == logical_volume_group_uuid:
physical_volume_uuids = [item['CoreStorageUUID'] for item in
volume_group.get('CoreStoragePhysicalVolumes', [])]
pv_device_ids = []
for pv in physical_volume_uuids:
try:
vol_info = getPlistFromCmd(
['/usr/sbin/diskutil', 'cs', 'info', '-plist', pv])
except CmdError:
vol_info = {}
pv_device_ids.append(vol_info.get('DeviceIdentifier'))
return pv_device_ids
def findEmptyAppleBootPartitionsForCSvolume(volumepath):
'''Given the path to a non-bootable CoreStorage volume,
find the physical volume device ids that are most likely to
be used for the Apple_Boot partitions'''
pvs = findPhysicalVolumeDeviceIdentifiers(volumepath)
try:
all_disk_info = getPlistFromCmd(
['/usr/sbin/diskutil', 'list', '-plist'])
except Error:
return []
# find Apple_Boot partitions
apple_boot_identifiers = []
disk_partition_info = all_disk_info.get('AllDisksAndPartitions', [])
for disk in disk_partition_info:
for partition in disk.get('Partitions', []):
if partition.get('Content') == 'Apple_Boot':
if 'DeviceIdentifier' in partition:
apple_boot_identifiers.append(partition['DeviceIdentifier'])
found_apple_boots = []
all_disks = all_disk_info.get('AllDisks', [])
for index in range(len(all_disks)):
if all_disks[index] in pvs:
nextone = index + 1
previous = index - 1
for partition in [nextone, previous]:
if partition in range(len(all_disks)):
if (all_disks[partition] in apple_boot_identifiers
and not all_disks[partition] in found_apple_boots):
found_apple_boots.append(all_disks[partition])
break
return found_apple_boots
def getCoreStorageStatus(volumepath):
'''Returns one of: 'Unknown', 'Not CoreStorage', 'Not encrypted',
'Encryption pending', 'Encrypting', 'Encrypted', 'Decrypting',
'Decrypted' '''
try:
csinfo_plist = getPlistFromCmd(
['/usr/sbin/diskutil', 'cs', 'info', '-plist', volumepath])
except CmdError:
# diskutil cs info returns error if volume is not CoreStorage
return 'Not CoreStorage'
except PlistParseError:
return 'Unknown'
conversion_state = csinfo_plist.get(
'CoreStorageLogicalVolumeConversionState')
encryption_state = 'Unknown'
lvfUUID = csinfo_plist.get('MemberOfCoreStorageLogicalVolumeFamily')
if lvfUUID:
try:
lvf_info_plist = getPlistFromCmd(
['/usr/sbin/diskutil', 'cs', 'info', '-plist', lvfUUID])
except Error:
lvf_info_plist = {}
encryption_type = lvf_info_plist.get(
'CoreStorageLogicalVolumeFamilyEncryptionType')
if encryption_type == 'AES-XTS':
if conversion_state == 'Pending':
encryption_state = 'Encryption pending'
elif conversion_state == 'Converting':
encryption_state = 'Encrypting'
elif conversion_state == 'Complete':
encryption_state = 'Encrypted'
elif encryption_type == 'None':
if conversion_state == 'Converting':
encryption_state = 'Decrypting'
elif conversion_state == 'Complete':
encryption_state = 'Decrypted'
else:
encryption_state = 'Not encrypted'
return encryption_state
def getAppleBootPartitions():
'''Returns a list of DeviceIdentifiers (diskXsY) of partitions
that are of type Apple_Boot'''
try:
all_disk_info = getPlistFromCmd(
['/usr/sbin/diskutil', 'list', '-plist'])
except Error:
return []
apple_boot_identifiers = []
disk_partition_info = all_disk_info.get('AllDisksAndPartitions', [])
for disk in disk_partition_info:
for partition in disk.get('Partitions', []):
if partition.get('Content') == 'Apple_Boot':
if 'DeviceIdentifier' in partition:
apple_boot_identifiers.append(partition['DeviceIdentifier'])
return apple_boot_identifiers
def getRPSdir(mountpoint):
'''Returns the correct com.apple.Boot.X directory from the
helper partition'''
#
# for boot != root, boot info is stored in the Apple_Boot partition
# in one of three directories:
# com.apple.boot.R, com.apple.boot.P, or com.apple.boot.S
# These are the "Rock, Paper, Scissors" directories
# See "FindRPSDir" in http://opensource.apple.com/source/
# kext_tools/kext_tools-117.4/update_boot.c
#
Rdir = os.path.join(mountpoint, 'com.apple.boot.R')
Pdir = os.path.join(mountpoint, 'com.apple.boot.P')
Sdir = os.path.join(mountpoint, 'com.apple.boot.S')
haveR = os.path.exists(Rdir)
haveP = os.path.exists(Pdir)
haveS = os.path.exists(Sdir)
RPSdir = None
# handle all permutations: 3 dirs, any 2 dirs, any 1 dir
if haveR and haveP and haveS:
# Apple code picks R
RPSdir = Rdir
elif haveR and haveP:
# P wins
RPSdir = Pdir
elif haveR and haveS:
# R wins
RPSdir = Rdir
elif haveP and haveS:
# S wins
RPSdir = Sdir
elif haveR:
RPSdir = Rdir
elif haveP:
RPSdir = Pdir
elif haveS:
RPSdir = Sdir
return RPSdir
def mountHelperPartitionHidden(deviceIdentifier):
'''Mounts an Apple_Boot partition so that it does not
show up in the Finder. Returns the path to the mountpoint.'''
volumeinfo = getVolumeInfo(deviceIdentifier)
# is it already mounted?
if volumeinfo.get('MountPoint'):
return volumeinfo['MountPoint']
# not currently mounted; let's mount it hidden
mountpoint = tempfile.mkdtemp(dir='/tmp')
device = os.path.join('/dev', deviceIdentifier)
try:
# we use mount instead of diskutil to mount the disk
# so we can hide it from any users
subprocess.check_call(
['/sbin/mount', '-t', 'hfs', '-o', 'nobrowse', device, mountpoint])
return mountpoint
except subprocess.CalledProcessError, err:
# couldn't mount it
print >> sys.stderr, 'Could not mount %s: %s' % (deviceIdentifier, err)
os.rmdir(mountpoint)
return None
def unmountVolume(mountpoint):
'''Uses diskutil to unmount the volume at mountpoint
Returns True if successful, false otherwise'''
try:
subprocess.check_call(
['/usr/sbin/diskutil', 'unmount', mountpoint],
stdout=subprocess.PIPE)
if os.path.isdir(mountpoint):
# remove the mountpoint dir if it still exists
os.rmdir(mountpoint)
return True
except subprocess.CalledProcessError, err:
# could not unmount the disk
print >> sys.stderr, 'Could not unmount %s: %s' % (mountpoint, err)
return False
except OSError, err:
# could not remove the mountpoint dir
print >> sys.stderr, 'Could not remove %s: %s' % (mountpoint, err)
return False
def getBootPlistRootUUID(deviceIdentifier):
'''Looks for a com.apple.Boot.plist file on an Apple_Boot
partition; returns its Root UUID'''
result = None
mountpoint = mountHelperPartitionHidden(deviceIdentifier)
if mountpoint:
# active com.apple.Boot.plist could be in one of three directories.
# find the right one.
RPSdir = getRPSdir(mountpoint)
if RPSdir:
boot_plist_file = os.path.join(
RPSdir,
'Library/Preferences/SystemConfiguration/'
'com.apple.Boot.plist')
if os.path.exists(boot_plist_file):
try:
boot_plist = plistlib.readPlist(boot_plist_file)
except ExpatError:
print 'Bad plist at %s' % boot_plist_file
boot_plist = {}
result = boot_plist.get('Root UUID')
else:
print 'No plist at %s' % boot_plist_file
else:
print 'Apple_Boot partition %s debug info: %s' % (deviceIdentifier,
os.listdir(mountpoint))
unused_result = unmountVolume(mountpoint)
return result
def findBootHelperPartitions(target_volume_path):
'''Attempts to find the Apple_Boot partition that acts as the boot helper
partition for the target_volume_path'''
helper_partitions = []
apple_boot_partitions = getAppleBootPartitions()
disk_info = getVolumeInfo(target_volume_path)
if not disk_info:
print >> sys.stderr, (
'Could not get disk info for %s' % target_volume_path)
return None
# check if target volume is an Apple_RAID volume
if 'RAIDSetUUID' in disk_info:
# volume appears to be an AppleRAID volume.
raid_members = disk_info.get('RAIDSetMembers', [])
for member_uuid in raid_members:
member_info = getVolumeInfo(member_uuid)
parent_disk = member_info.get('ParentWholeDisk', None)
if parent_disk:
helper_partitions.extend(
[partition for partition in apple_boot_partitions
if partition.startswith(parent_disk)])
else:
cs_state = getCoreStorageStatus(target_volume_path)
if cs_state == 'Not CoreStorage':
# do nothing with this volume
pass
elif cs_state in ['Encrypted', 'Not encrypted']:
# CoreStorage volume (FileVault, Fusion?)
vol_UUID = disk_info.get('VolumeUUID')
if not vol_UUID:
print >> sys.stderr, (
'Could not get VolumeUUID for %s' % target_volume_path)
return None
for device_id in apple_boot_partitions:
if getBootPlistRootUUID(device_id) == vol_UUID:
helper_partitions.append(device_id)
if not helper_partitions:
helper_partitions = findEmptyAppleBootPartitionsForCSvolume(
target_volume_path)
if not helper_partitions:
print >> sys.stderr, 'Did not find a boot helper partition!'
return None
else:
print >> sys.stderr, (
'Unsupported CoreStorage state of %s for: %s' %
(cs_state, target_volume_path))
return None
return helper_partitions
def createBootPlist(install_data_path):
'''Creates the com.apple.Boot.plist file'''
# Example com.apple.Boot.plist:
#
# <?xml version="1.0" encoding="UTF-8"?>
# <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
# "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
# <plist version="1.0">
# <dict>
# <key>Kernel Cache</key>
# <string>/Mac OS X Install Data/kernelcache</string>
# <key>Kernel Flags</key>
# <string>
# container-dmg=file:///Mac%20OS%20X%20Install%20Data/InstallESD.dmg
# root-dmg=file:///BaseSystem.dmg</string>
# </dict>
# </plist>
boot_pl = {}
boot_pl['Kernel Cache'] = '/%s/kernelcache' % INSTALL_DATA_DIR_NAME
boot_pl['Kernel Flags'] = (
'container-dmg=file://localhost/%s/InstallESD.dmg '
'root-dmg=file://localhost/BaseSystem.dmg'
% ENCODED_INSTALL_DATA_DIR_NAME)
try:
plistlib.writePlist(
boot_pl, os.path.join(install_data_path, 'com.apple.Boot.plist'))
except (IOError, ExpatError), err:
cleanupFromFailAndExit(
'Failed when creating com.apple.Boot.plist: %s' % err)
def create_minstallconfig(resources_path, installvolumepath,
installvolinfo, language='en', custompackages=False):
'''Creates and writes our automated installation file'''
# Example minstallconfig.xml:
#
# <?xml version="1.0" encoding="UTF-8"?>
# <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
# "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
# <plist version="1.0">
# <dict>
# <key>ChoiceChanges</key>
# <string>MacOSXInstaller.choiceChanges</string>
# <key>InstallType</key>
# <string>automated</string>
# <key>Language</key>
# <string>en</string>
# <key>Package</key>
# <string>/System/Installation/Packages/OSInstall.collection</string>
# <key>Target</key>
# <string>/Volumes/Image Volume</string>
# <key>TargetName</key>
# <string>Image Volume</string>
# <key>TargetUUID</key>
# <string>8217958C-4471-3E5F-B63D-2FFB04953F50</string>
# </dict>
# </plist>
install_data_path = os.path.join(installvolumepath, INSTALL_DATA_DIR_NAME)
config = {'InstallType': 'automated',
'Language': language}
# do we have a choiceChanges file?
choiceChangesFile = os.path.join(
resources_path, INSTALL_DATA_DIR_NAME,
'MacOSXInstaller.choiceChanges')
if os.path.exists(choiceChangesFile):
shutil.copy(choiceChangesFile, install_data_path)
config['ChoiceChanges'] = 'MacOSXInstaller.choiceChanges'
if custompackages:
pkgpath = '/System/Installation/Packages/OSInstall.collection'
else:
pkgpath = '/System/Installation/Packages/OSInstall.mpkg'
config['Package'] = pkgpath
# add install volume info
config['Target'] = installvolumepath
config['TargetName'] = installvolinfo['VolumeName']
config['TargetUUID'] = installvolinfo['VolumeUUID']
# now write it out
minstallconfig_path = os.path.join(install_data_path, 'minstallconfig.xml')
try:
plistlib.writePlist(config, minstallconfig_path)
except (IOError, ExpatError), err:
cleanupFromFailAndExit(
'Failed when creating minstallconfig.xml: %s' % err)
def create_index_sproduct(resources_path, install_data_path):
'''Copies or creates index.sproduct file and any packages
it lists.'''
# index.sproduct contains a list of additional signed (and therefore _flat_)
# packages to install. Install Mac OS X Lion.app downloads these before
# setting up the Lion install. They do not seem to be vital to the install.
# Example index.sproduct file:
#
# <?xml version="1.0" encoding="UTF-8"?>
# <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
# "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
# <plist version="1.0">
# <dict>
# <key>Packages</key>
# <array>
# <dict>
# <key>Identifier</key>
# <string>com.apple.pkg.CompatibilityUpdate</string>
# <key>Size</key>
# <integer>10517</integer>
# <key>URL</key>
# <string>MacOS_10_7_IncompatibleAppList.pkg</string>
# <key>Version</key>
# <string>10.7</string>
# </dict>
# </array>
# </dict>
# </plist>
#
index_sproduct_file = os.path.join(
resources_path, INSTALL_DATA_DIR_NAME, 'index.sproduct')
if os.path.exists(index_sproduct_file):
# now copy all the packages it references
index_pl = plistlib.readPlist(index_sproduct_file)
for package in index_pl.get('Packages', []):
try:
pkgpath = os.path.join(
resources_path, INSTALL_DATA_DIR_NAME, package['URL'])
shutil.copy(pkgpath, install_data_path)
except (KeyError, IOError), err:
cleanupFromFailAndExit(
'Failed when copying signed packages: %s' % err)
try:
shutil.copy(index_sproduct_file, install_data_path)
except IOError, err:
cleanupFromFailAndExit(
'Failed when copying index.sproduct: %s' % err)
else:
# write an empty index.sproduct
index_pl = {}
index_pl['Packages'] = []
try:
index_sproduct_path = os.path.join(
install_data_path, 'index.sproduct')
plistlib.writePlist(index_pl, index_sproduct_path)
except (IOError, ExpatError), err:
cleanupFromFailAndExit(
'Failed when creating index.sproduct: %s' % err)
def create_osinstallattr_plist(installvolinfo, install_data_path):
'''Creates the OSInstallAttr.plist file'''
# Lion installer consults OSInstallAttr.plist to make sure it hasn't been
# too long since the Lion install environment was created; it skips the
# automation file if it deems it "too old".
# This file also provides the path to the install automation file.
# Example OSInstallAttr.plist:
#
# <?xml version="1.0" encoding="UTF-8"?>
# <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
# "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
# <plist version="1.0">
# <dict>
# <key>IAEndDate</key>
# <date>2011-08-31T21:09:49Z</date>
# <key>IALogFile</key>
# <string>/Volumes/foo/Mac OS X Install Data/ia.log</string>
# <key>OSIAutomationFile</key>
# <string>/Volumes/foo/Mac OS X Install Data/minstallconfig.xml</string>
# </dict>
# </plist>
#
now = datetime.datetime.utcnow()
attr = {}
attr['IAEndDate'] = now
attr['IALogFile'] = ('/Volumes/%s/%s/ia.log'
% (installvolinfo['VolumeName'], INSTALL_DATA_DIR_NAME))
attr['OSIAutomationFile'] = (
'/Volumes/%s/%s/minstallconfig.xml'
% (installvolinfo['VolumeName'], INSTALL_DATA_DIR_NAME))
try:
attr_path = os.path.join(install_data_path, 'OSInstallAttr.plist')
plistlib.writePlist(attr, attr_path)
except (IOError, ExpatError), err:
cleanupFromFailAndExit(
'Failed when creating OSInstallAttr.plist: %s' % err)
def cacheBaseSystemData(install_data_path):
'''Caches data from the BaseSystem.dmg we use to set up boot helper
partitions for Apple_RAID volumes.'''
install_dmg = os.path.join(install_data_path, 'InstallESD.dmg')
print 'Mounting %s...' % install_dmg
mountpoints = mountdmg(install_dmg)
if not mountpoints:
print >> sys.stderr, 'Nothing mounted from InstallESD.dmg'
return False
installesd_mountpoint = mountpoints[0]
base_dmg = os.path.join(installesd_mountpoint, 'BaseSystem.dmg')
print 'Mounting %s...' % base_dmg
mountpoints = mountdmg(base_dmg)
if not mountpoints:
unmountdmg(installesd_mountpoint)
print >> sys.stderr, 'Nothing mounted from BaseSystem.dmg'
return False
basedmg_mountpoint = mountpoints[0]
# we need:
# System/Library/CoreServices/PlatformSupport.plist
# System/Library/CoreServices/SystemVersion.plist
# boot.efi should already be in install_data_path
#
# usr/standalone/i386/EfiLoginUI
boothelperdatapath = os.path.join(install_data_path, 'boot_helper_data')
if not os.path.exists(boothelperdatapath):
try:
os.mkdir(boothelperdatapath)
except OSError, err:
unmountdmg(basedmg_mountpoint)
unmountdmg(installesd_mountpoint)
print >> sys.stderr, (
'Could not create %s: %s' % (boothelperdatapath, err))
return False
platform_support_plist = os.path.join(
basedmg_mountpoint, 'System/Library/CoreServices/PlatformSupport.plist')
system_version_plist = os.path.join(
basedmg_mountpoint, 'System/Library/CoreServices/SystemVersion.plist')
efilogindata = os.path.join(
basedmg_mountpoint, 'usr/standalone/i386/EfiLoginUI')
for item in [platform_support_plist, system_version_plist, efilogindata]:
try:
if os.path.isdir(item):
destination = os.path.join(
boothelperdatapath, os.path.basename(item))
shutil.copytree(item, destination)
else:
shutil.copy(item, boothelperdatapath)
except OSError, err:
unmountdmg(basedmg_mountpoint)
unmountdmg(installesd_mountpoint)
print >> sys.stderr, (
'Error copying %s to %s: %s' % (item, boothelperdatapath, err))
return False
#clean up and return True for success
unmountdmg(basedmg_mountpoint)
unmountdmg(installesd_mountpoint)
return True
def setupHelperPartition(mountpoint, install_vol_path, install_data_path):
'''If we are installing OS X to a new, empty Apple_RAID volume, or a new,
empty CoreStorage volume, the boot helper partition will not be set up for
us. We have to do it ourselves'''
print 'Setting up helper partition...'
boothelperdatapath = os.path.join(install_data_path, 'boot_helper_data')
if not os.path.exists(boothelperdatapath):
success = cacheBaseSystemData(install_data_path)
if not success:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Could not cache data from BaseSystem.dmg')
helper_root = os.path.join(mountpoint, 'System/Library/CoreServices')
if not os.path.exists(helper_root):
try:
os.makedirs(helper_root)
except OSError, err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Could not create %s on boot helper partition: %s'
% (helper_root, err))
bootefi = os.path.join(install_data_path, 'boot.efi')
platform_support_plist = os.path.join(
boothelperdatapath, 'PlatformSupport.plist')
system_version_plist = os.path.join(
boothelperdatapath, 'SystemVersion.plist')
for item in [bootefi, platform_support_plist, system_version_plist]:
try:
shutil.copy(item, helper_root)
except OSError, err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Error copying %s to %s: %s' % (item, helper_root, err))
RPSdir = os.path.join(mountpoint, 'com.apple.boot.R')
usrstandalonedir = os.path.join(
RPSdir, 'usr/standalone/i386')
kernelcachedir = os.path.join(
RPSdir, 'System/Library/Caches/com.apple.kext.caches/Startup')
bootpdir = os.path.join(
RPSdir, 'Library/Preferences/SystemConfiguration')
for directory in [usrstandalonedir, kernelcachedir, bootpdir]:
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError, err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Could not create %s: %s' % (directory, err))
efilogindata = os.path.join(boothelperdatapath, 'EfiLoginUI')
try:
usrstandaloneefidir = os.path.join(usrstandalonedir, 'EfiLoginUI')
shutil.copytree(efilogindata, usrstandaloneefidir)
except OSError, err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Could not copy %s: %s' % (efilogindata, err))
try:
volume_uuid = getVolumeInfo(install_vol_path)['VolumeUUID']
except AttributeError:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Missing VolumeUUID attribute for %s' % install_vol_path)
boot_plist = {}
boot_plist['Root UUID'] = volume_uuid
boot_plist_file = os.path.join(bootpdir, 'com.apple.Boot.plist')
try:
plistlib.writePlist(boot_plist, boot_plist_file)
except (IOError, ExpatError), err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Failed when creating com.apple.Boot.plist: %s' % err)
def updateHelperPartitions(install_vol_path, install_data_path):
'''Used with a CoreStorage or Apple_RAID boot disk --
updates the Apple_Boot helper partition to use the OS X Installer files'''
print 'Looking for helper partitions...'
helper_partitions = findBootHelperPartitions(install_vol_path)
if not helper_partitions:
cleanupFromFailAndExit(
'Could not find any boot helper partitions for %s'
% install_vol_path)
for helper_partition in helper_partitions:
print 'Mounting %s to update helper partition' % helper_partition
mountpoint = mountHelperPartitionHidden(helper_partition)
# update com.apple.Boot.plist
RPSdir = getRPSdir(mountpoint)
if not RPSdir:
# perhaps this helper partition has never been set up
# so do it manually!
setupHelperPartition(
mountpoint, install_vol_path, install_data_path)
RPSdir = os.path.join(mountpoint, 'com.apple.boot.R')
boot_plist_file = os.path.join(
RPSdir,
'Library/Preferences/SystemConfiguration/'
'com.apple.Boot.plist')
if os.path.exists(boot_plist_file):
try:
boot_plist = plistlib.readPlist(boot_plist_file)
except ExpatError:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Bad com.apple.Boot.plist at %s' % boot_plist_file)
new_boot_plist = {}
new_boot_plist['Kernel Flags'] = (
'container-dmg=file://localhost/%s/InstallESD.dmg '
'root-dmg=file://localhost/BaseSystem.dmg'
% ENCODED_INSTALL_DATA_DIR_NAME)
try:
new_boot_plist['Root UUID'] = boot_plist['Root UUID']
except AttributeError:
# something has gone horribly wrong
cleanupFromFailAndExit(
'com.apple.Boot.plist is missing \'Root UUID\' attribute!')
try:
plistlib.writePlist(new_boot_plist, boot_plist_file)
except (IOError, ExpatError), err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit(
'Failed when updating com.apple.Boot.plist: %s' % err)
# copy kernelcache to helper partition
kernelcache = os.path.join(install_data_path, 'kernelcache')
dest_path = os.path.join(RPSdir,
'System/Library/Caches/com.apple.kext.caches/Startup')
try:
print "Copying kernelcache to helper partition"
shutil.copy(kernelcache, dest_path)
except IOError, err:
unused_result = unmountVolume(mountpoint)
cleanupFromFailAndExit('Failed when copying kernelcache: %s' % err)
# unmount the helper partition
unused_result = unmountVolume(mountpoint)
# we are done updating the boot helper partitions.
# we could remove the kernelcache from the target volume's install data
# now, but we won't bother
# main
def main():
'''Our main routine'''
# get args passed to us from the Installer
try:
packagepath = sys.argv[1]
installvolumepath = sys.argv[3]
except IndexError:
cleanupFromFailAndExit('Missing runtime parameters from installer.')
# need this info a few places, so get it now
installvolinfo = getVolumeInfo(installvolumepath)
target_volume_is_corestorage_or_raid = False
# check the install volume to see if it's CoreStorage
cs_state = getCoreStorageStatus(installvolumepath)
if cs_state in ['Encrypted', 'Not encrypted']:
target_volume_is_corestorage_or_raid = True
# make sure we can find the Apple_Boot helper partition before
# we continue
helper_partitions = findBootHelperPartitions(installvolumepath)
if not helper_partitions:
cleanupFromFailAndExit(
'Cannot find a Recovery partition set as a boot helper for '
'CoreStorage volume %s. Cannot continue.' % installvolumepath)
print ('%s appears to be a CoreStorage volume.'
% installvolumepath)
elif cs_state == 'Not CoreStorage':
target_volume_is_corestorage_or_raid = False
else:
# volume is being converted to or from Core Storage
# we should not install now.
cleanupFromFailAndExit(
'Cannot install to CoreStorage volume %s in the middle of '
'conversion. Current state is: %s.\nPlease wait for conversion '
'to complete, restart, and try again.'
% (installvolumepath, cs_state))
# now check if target volume is an Apple_RAID volume
if 'RAIDSetUUID' in installvolinfo:
print '%s appears to be an AppleRAID volume.' % installvolumepath
target_volume_is_corestorage_or_raid = True
# find our resources
resources_path = os.path.join(packagepath, "Contents", "Resources")
install_dmg = os.path.join(resources_path, 'InstallESD.dmg')
if not os.path.exists(install_dmg):
# look in Resources/Mac OS X Install Data/ in case the
# admin put it there
install_dmg = os.path.join(
resources_path, INSTALL_DATA_DIR_NAME, 'InstallESD.dmg')
if not os.path.exists(install_dmg):
cleanupFromFailAndExit(
'Missing InstallESD.dmg in package resources.')
# prep volume for install. Create a directory for the install data on the
# target volume.
install_data_path = os.path.join(installvolumepath, INSTALL_DATA_DIR_NAME)
if os.path.exists(install_data_path):
print '%s already exists on %s. Reusing it.' % (INSTALL_DATA_DIR_NAME,
installvolumepath)
else:
print 'Creating %s...' % install_data_path
try:
os.mkdir(install_data_path)
except OSError, err:
msg = ('Could not create \'%s\' directory on %s:\n%s'
% (INSTALL_DATA_DIR_NAME, installvolumepath, err))
cleanupFromFailAndExit(msg)
# mount the InstallESD.dmg
print 'Mounting %s...' % install_dmg
mountpoints = mountdmg(install_dmg)
if not mountpoints:
cleanupFromFailAndExit('Nothing mounted from InstallESD.dmg')
mountpoint = mountpoints[0]
# copy kernelcache and boot.efi from root of dmg
# to install_data_path
kernelcache = os.path.join(mountpoint, 'kernelcache')
if not os.path.exists(kernelcache):
unmountdmg(mountpoint)
cleanupFromFailAndExit('kernelcache missing from InstallESD.dmg')
bootefi = os.path.join(mountpoint, 'boot.efi')
if not os.path.exists(kernelcache):
unmountdmg(mountpoint)
cleanupFromFailAndExit('boot.efi missing from InstallESD.dmg')
try:
print 'Copying kernelcache and boot.efi to %s...' % install_data_path
shutil.copy(kernelcache, install_data_path)
shutil.copy(bootefi, install_data_path)
except IOError, err:
unmountdmg(mountpoint)
cleanupFromFailAndExit('Could not copy needed resources: %s' % err)
# while we have the DMG mounted, let's check to see if the install has been
# customized with additional packages
osinstallcollection = os.path.join(mountpoint,
'Packages/OSInstall.collection')
# are we installing additional packages after the OS install?
custompackages_state = os.path.exists(osinstallcollection)
print 'Customized OS install found: %s' % custompackages_state
# unmount the InstallESD.dmg
print 'Unmounting %s...' % install_dmg
unmountdmg(mountpoint)
# either copy or link the dmg into place in install_data_path
dest_path = os.path.join(install_data_path, 'InstallESD.dmg')
try:
print 'Attempting to link %s to %s...' % (install_dmg, dest_path)
os.link(install_dmg, dest_path)
except OSError:
# couldn't link, so try to copy
try:
print ('Link not possible. Copying %s to %s...'
% (install_dmg, dest_path))
shutil.copy(install_dmg, dest_path)
except OSError, err:
cleanupFromFailAndExit(
'Could not copy InstallESD.dmg to %s: %s' %
(install_data_path, err))
if not target_volume_is_corestorage_or_raid:
# create and write com.apple.Boot.plist file in install_data_path
print 'Creating com.apple.Boot.plist at %s...' % install_data_path
createBootPlist(install_data_path)
# We have everything in place now to boot from the dmg.
# Next we need to set up items so the install kicks off automatically
# minstallconfig.xml
# this is info used by the Installer for an automated install
print 'Creating minstallconfig.xml at %s...' % install_data_path
create_minstallconfig(resources_path, installvolumepath,
installvolinfo, language='en', custompackages=custompackages_state)
# index.sproduct
# this contains a list of additional signed (and therefore _flat_)
# packages to install.
print 'Creating index.sproduct at %s...' % install_data_path
create_index_sproduct(resources_path, install_data_path)
# OSInstallAttr.plist
# Lion installer consults this to make sure it hasn't been too long since
# the Lion install environment was created; it skips the automation file if
# it deems it "too old".
print 'Creating OSInstallAttr.plist at %s...' % install_data_path
create_osinstallattr_plist(installvolinfo, install_data_path)
# All files are in place. Before we reboot we must set an nvram variable and
# bless our OS X installer files
# nvram
install_product_url = 'install-product-url=x-osproduct://'
install_product_url += installvolinfo['VolumeUUID']
install_product_url += urllib2.quote('/%s' % ENCODED_INSTALL_DATA_DIR_NAME)
print 'Setting OS X installer NVRAM install-product-url variable...'
try:
subprocess.check_call(['/usr/sbin/nvram', install_product_url])
except subprocess.CalledProcessError, err:
cleanupFromFailAndExit('Couldn\'t set nvram: %s' % err)
# bless our OS X install boot environment
folder = install_data_path
bootefi = os.path.join(install_data_path, 'boot.efi')
label = 'Mac OS X Installer'
cmd = ['/usr/sbin/bless', '--setBoot', '--folder', folder,
'--bootefi', bootefi, '--label', label]
if not target_volume_is_corestorage_or_raid:
options = ['--options',
'config="\%s\com.apple.Boot"' % INSTALL_DATA_DIR_NAME]
cmd.extend(options)
print 'Blessing OS X installer boot environment in %s...' % folder
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError, err:
cleanupFromFailAndExit(
'Failed to bless OS X installer for startup: %s' % err)
if target_volume_is_corestorage_or_raid:
# more work to do!
# we need to update the Recovery or Boot OS X partitions
updateHelperPartitions(installvolumepath, install_data_path)
# all that's left now is to restart!
print 'Setup for OS X install is complete.'
print 'Please restart immediately to complete installation of OS X.'
exit(0)
if __name__ == '__main__':
main()
| Python |
env = Environment()
Program( 'feedread', 'main.cpp', LIBS=['xml2', 'curl'], CPPPATH='/usr/include/libxml2', CPPFLAGS='-W -Wall -pedantic -ansi' )
| Python |
#!/usr/bin/python
import os
import os.path
import sys
import datetime
import sendmail
from config import *
today = datetime.date.today()
PROGDIR = os.path.dirname(sys.argv[0])
for name in FEEDS:
feed = FEEDS[name]
os.system("wget -q -T 5 -O %s/%s.xml \"%s\"" % (RUNDIR,name,feed))
os.system("xsltproc %s/rss.xsl %s/%s.xml > %s/%s.html" % (PROGDIR,RUNDIR,name,RUNDIR,name))
sendmail.send("%s-%s.html" % (name,today), "%s/%s.html" % (RUNDIR,name))
#os.system("rm -rf %s/*" % RUNDIR);
| Python |
#!/usr/bin/python
import sys
import os
import os.path
import smtplib
import mimetypes
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.MIMEAudio import MIMEAudio
from email.MIMEImage import MIMEImage
from email.Encoders import encode_base64
from email import encoders
from config import *
def send(subject, fname):
global user,passwd,recipient,mailserver,mailport
msg = MIMEMultipart()
msg['From'] = user
msg['To'] = recipient
msg['Subject'] = subject
file = MIMEBase('application','octet-stream')
f = open(fname,'rb')
file.set_payload(f.read())
f.close()
encoders.encode_base64(file)
file.add_header('Content-Disposition', 'attachment', filename=subject)
msg.attach(file)
mailServer = smtplib.SMTP(mailserver, mailport)
mailServer.ehlo()
mailServer.starttls()
mailServer.ehlo()
mailServer.login(user, passwd)
mailServer.sendmail(user, recipient.split(','), msg.as_string())
mailServer.close()
print('Sent email to %s' % recipient)
| Python |
#!/usr/bin/python
RUNDIR="/var/run/feed2kindle"
FEEDS={
'cnbeta':'http://www.cnbeta.com/backend.php',
'csdn':'http://articles.csdn.net/api/rss.php?tid=1008',
'sina':'http://rss.sina.com.cn/news/marquee/ddt.xml'
}
user = 'yourgmailaccount@gmail.com'
passwd = 'yourgmailpassword'
recipient = 'yourkindleaccount@free.kindle.com'
mailserver = 'smtp.googlemail.com'
mailport = 587
| Python |
#!/usr/bin/python
RUNDIR="/var/run/feed2kindle"
FEEDS={
'cnbeta':'http://www.cnbeta.com/backend.php',
'csdn':'http://articles.csdn.net/api/rss.php?tid=1008',
'sina':'http://rss.sina.com.cn/news/marquee/ddt.xml'
}
user = 'yourgmailaccount@gmail.com'
passwd = 'yourgmailpassword'
recipient = 'yourkindleaccount@free.kindle.com'
mailserver = 'smtp.googlemail.com'
mailport = 587
| Python |
#!/usr/bin/python
import os
import os.path
import sys
import datetime
import sendmail
from config import *
today = datetime.date.today()
PROGDIR = os.path.dirname(sys.argv[0])
for name in FEEDS:
feed = FEEDS[name]
os.system("wget -q -T 5 -O %s/%s.xml \"%s\"" % (RUNDIR,name,feed))
os.system("xsltproc %s/rss.xsl %s/%s.xml > %s/%s.html" % (PROGDIR,RUNDIR,name,RUNDIR,name))
sendmail.send("%s-%s.html" % (name,today), "%s/%s.html" % (RUNDIR,name))
#os.system("rm -rf %s/*" % RUNDIR);
| Python |
#!/usr/bin/python
import sys
import os
import os.path
import smtplib
import mimetypes
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.MIMEAudio import MIMEAudio
from email.MIMEImage import MIMEImage
from email.Encoders import encode_base64
from email import encoders
from config import *
def send(subject, fname):
global user,passwd,recipient,mailserver,mailport
msg = MIMEMultipart()
msg['From'] = user
msg['To'] = recipient
msg['Subject'] = subject
file = MIMEBase('application','octet-stream')
f = open(fname,'rb')
file.set_payload(f.read())
f.close()
encoders.encode_base64(file)
file.add_header('Content-Disposition', 'attachment', filename=subject)
msg.attach(file)
mailServer = smtplib.SMTP(mailserver, mailport)
mailServer.ehlo()
mailServer.starttls()
mailServer.ehlo()
mailServer.login(user, passwd)
mailServer.sendmail(user, recipient.split(','), msg.as_string())
mailServer.close()
print('Sent email to %s' % recipient)
| Python |
from twisted.python import log
from twisted.internet import threads
import os
import struct
import hashlib
import json
import socket
import uuid
import datetime
BUFSIZ = 16384
OP_ERROR = 0
OP_INFO = 1
OP_AUTH = 2
OP_PUBLISH = 3
OP_SUBSCRIBE = 4
MAXBUF = 1024**2
SIZES = {
OP_ERROR: 5+MAXBUF,
OP_INFO: 5+256+20,
OP_AUTH: 5+256+20,
OP_PUBLISH: 5+MAXBUF,
OP_SUBSCRIBE: 5+256*2,
}
HONSSHAUTHCHAN = 'honssh.auth'
HONSSHSESHCHAN = 'honssh.sessions'
class BadClient(Exception):
pass
# packs a string with 1 byte length field
def strpack8(x):
if isinstance(x, str): x = x.encode('latin1')
return struct.pack('!B', len(x)) + x
# unpacks a string with 1 byte length field
def strunpack8(x):
l = x[0]
return x[1:1+l], x[1+l:]
def msghdr(op, data):
return struct.pack('!iB', 5+len(data), op) + data
def msgpublish(ident, chan, data):
return msghdr(OP_PUBLISH, strpack8(ident) + strpack8(chan) + data)
def msgsubscribe(ident, chan):
if isinstance(chan, str): chan = chan.encode('latin1')
return msghdr(OP_SUBSCRIBE, strpack8(ident) + chan)
def msgauth(rand, ident, secret):
hash = hashlib.sha1(bytes(rand)+secret).digest()
return msghdr(OP_AUTH, strpack8(ident) + hash)
class FeedUnpack(object):
def __init__(self):
self.buf = bytearray()
def __iter__(self):
return self
def next(self):
return self.unpack()
def feed(self, data):
self.buf.extend(data)
def unpack(self):
if len(self.buf) < 5:
raise StopIteration('No message.')
ml, opcode = struct.unpack('!iB', buffer(self.buf,0,5))
if ml > SIZES.get(opcode, MAXBUF):
raise BadClient('Not respecting MAXBUF.')
if len(self.buf) < ml:
raise StopIteration('No message.')
data = bytearray(buffer(self.buf, 5, ml-5))
del self.buf[:ml]
return opcode, data
class hpclient(object):
def __init__(self, server, port, ident, secret):
log.msg('[HPFEEDS] - hpfeeds client init broker {0}:{1}, identifier {2}'.format(server, port, ident))
self.server, self.port = server, int(port)
self.ident, self.secret = ident.encode('latin1'), secret.encode('latin1')
self.unpacker = FeedUnpack()
self.state = 'INIT'
self.connect()
self.sendfiles = []
self.filehandle = None
def connect(self):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.settimeout(3)
try: self.s.connect((self.server, self.port))
except:
log.msg('[HPFEEDS] - hpfeeds client could not connect to broker.')
self.s = None
else:
self.s.settimeout(None)
self.handle_established()
def send(self, data):
if not self.s: return
self.s.send(data)
def close(self):
self.s.close()
self.s = None
def handle_established(self):
log.msg('[HPFEEDS] - hpclient established')
while self.state != 'GOTINFO':
self.read()
#quickly try to see if there was an error message
self.s.settimeout(0.5)
self.read()
self.s.settimeout(None)
def read(self):
if not self.s: return
try: d = self.s.recv(BUFSIZ)
except socket.timeout:
return
if not d:
self.close()
return
self.unpacker.feed(d)
try:
for opcode, data in self.unpacker:
log.msg('[HPFEEDS] - hpclient msg opcode {0} data {1}'.format(opcode, data))
if opcode == OP_INFO:
name, rand = strunpack8(data)
log.msg('[HPFEEDS] - hpclient server name {0} rand {1}'.format(name, rand))
self.send(msgauth(rand, self.ident, self.secret))
self.state = 'GOTINFO'
elif opcode == OP_PUBLISH:
ident, data = strunpack8(data)
chan, data = strunpack8(data)
log.msg('[HPFEEDS] - publish to {0} by {1}: {2}'.format(chan, ident, data))
elif opcode == OP_ERROR:
log.err('[HPFEEDS] - errormessage from server: {0}'.format(data))
else:
log.err('[HPFEEDS] - unknown opcode message: {0}'.format(opcode))
except BadClient:
log.err('[HPFEEDS] - unpacker error, disconnecting.')
self.close()
def publish(self, channel, **kwargs):
try:
self.send(msgpublish(self.ident, channel, json.dumps(kwargs).encode('latin1')))
except Exception, e:
log.err('[HPFEEDS] - connection to hpfriends lost: {0}'.format(e))
log.err('[HPFEEDS] - connecting')
self.connect()
self.send(msgpublish(self.ident, channel, json.dumps(kwargs).encode('latin1')))
def sendfile(self, filepath):
# does not read complete binary into memory, read and send chunks
if not self.filehandle:
self.sendfileheader(i.file)
self.sendfiledata()
else: self.sendfiles.append(filepath)
def sendfileheader(self, filepath):
self.filehandle = open(filepath, 'rb')
fsize = os.stat(filepath).st_size
headc = strpack8(self.ident) + strpack8(UNIQUECHAN)
headh = struct.pack('!iB', 5+len(headc)+fsize, OP_PUBLISH)
self.send(headh + headc)
def sendfiledata(self):
tmp = self.filehandle.read(BUFSIZ)
if not tmp:
if self.sendfiles:
fp = self.sendfiles.pop(0)
self.sendfileheader(fp)
else:
self.filehandle = None
self.handle_io_in(b'')
else:
self.send(tmp)
class HPLogger():
def start(self, cfg):
log.msg('[HPFEEDS] - hpfeeds DBLogger start')
server = cfg.get('hpfeeds', 'server')
port = cfg.get('hpfeeds', 'port')
ident = cfg.get('hpfeeds', 'identifier')
secret = cfg.get('hpfeeds', 'secret')
return hpclient(server, port, ident, secret)
def setClient(self, hpClient, cfg):
self.sensor_name = cfg.get('honeypot','sensor_name')
self.client = hpClient
def createSession(self, session, peerIP, peerPort, hostIP, hostPort):
self.sessionMeta = { 'sensor_name': self.sensor_name, 'uuid': session, 'startTime': self.getDateTime(), 'channels': [] }
self.sessionMeta['connection'] = {'peerIP': peerIP, 'peerPort': peerPort, 'hostIP': hostIP, 'hostPort': hostPort, 'version': None}
return session
def handleConnectionLost(self):
log.msg('[HPFEEDS] - publishing metadata to hpfeeds')
meta = self.sessionMeta
meta['endTime'] = self.getDateTime()
log.msg("[HPFEEDS] - sessionMeta: " + str(meta))
threads.deferToThread(self.client.publish, HONSSHSESHCHAN, **meta)
def handleLoginFailed(self, username, password):
authMeta = {'sensor_name': self.sensor_name, 'datetime': self.getDateTime(),'username': username, 'password': password, 'success': False}
log.msg('[HPFEEDS] - authMeta: ' + str(authMeta))
threads.deferToThread(self.client.publish, HONSSHAUTHCHAN, **authMeta)
def handleLoginSucceeded(self, username, password):
authMeta = {'sensor_name': self.sensor_name, 'datetime': self.getDateTime(),'username': username, 'password': password, 'success': True}
log.msg('[HPFEEDS] - authMeta: ' + str(authMeta))
threads.deferToThread(self.client.publish, HONSSHAUTHCHAN, **authMeta)
def channelOpened(self, uuid, channelName):
self.sessionMeta['channels'].append({'name': channelName, 'uuid': uuid, 'startTime': self.getDateTime(), 'commands': []})
def channelClosed(self, uuid, ttylog=None):
chan = self.findChannel(uuid)
chan['endTime'] = self.getDateTime()
if ttylog != None:
fp = open(ttylog, 'rb')
ttydata = fp.read()
fp.close()
chan['ttylog'] = ttydata.encode('hex')
def handleCommand(self, uuid, command):
chan = self.findChannel(uuid)
chan['commands'].append([self.getDateTime(), command])
def handleClientVersion(self, version):
self.sessionMeta['connection']['version'] = version
def getDateTime(self):
return datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")[:-3]
def findChannel(self, uuid):
for chan in self.sessionMeta['channels']:
if chan['uuid'] == uuid:
return chan | Python |
# Copyright (c) 2009 Upi Tamminen <desaster@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
# Should be compatible with user mode linux
import struct, sys, os
OP_OPEN, OP_CLOSE, OP_WRITE, OP_EXEC = 1, 2, 3, 4
TYPE_INPUT, TYPE_OUTPUT, TYPE_INTERACT = 1, 2, 3
def ttylog_write(logfile, len, direction, stamp, data = None):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 3, 0, len, direction, sec, usec))
f.write(data)
f.close()
def ttylog_open(logfile, stamp):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 1, 0, 0, 0, sec, usec))
f.close()
os.chmod(logfile, 0644)
def ttylog_close(logfile, stamp):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 2, 0, 0, 0, sec, usec))
f.close()
| Python |
# Copyright (c) 2009 Upi Tamminen <desaster@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.internet import protocol
from twisted.conch import telnet, recvline
from twisted.python import log
from kippo.core import ttylog
import time, struct
class Interact(telnet.Telnet):
def connectionMade(self):
self.interacting = None
self.cmdbuf = ''
self.honeypotFactory = self.factory.honeypotFactory
# someone tell me if i'm doing this wrong?
d = self.do(telnet.LINEMODE)
self.requestNegotiation(telnet.LINEMODE, telnet.LINEMODE_EDIT + '\0')
self.will(telnet.ECHO)
self.transport.write('*** HonSSH session management console ***\r\n')
self.cmd_help()
def connectionLost(self, reason):
if self.interacting != None:
self.interacting.delInteractor(self)
def enableRemote(self, option):
return option == telnet.LINEMODE
def disableRemote(self, option):
pass
def applicationDataReceived(self, bytes):
# in command mode, we want to echo characters and buffer the input
if not self.interacting:
self.transport.write(bytes)
if bytes in ('\r', '\n'):
self.transport.write('\n')
pieces = self.cmdbuf.split(' ', 1)
self.cmdbuf = ''
cmd, args = pieces[0], ''
if len(pieces) > 1:
args = pieces[1]
try:
func = getattr(self, 'cmd_' + cmd)
except AttributeError:
self.transport.write('** Unknown command.\r\n')
return
func(args)
else:
self.cmdbuf += bytes
# in non-command mode we are passing input to the session we are
# watching
else:
for c in bytes:
if ord(c) == 27: # escape
self.interacting.delInteractor(self)
self.interacting = None
self.transport.write(
'\r\n** Interactive session closed.\r\n')
return
if not self.readonly:
if type(bytes) == type(''):
ttylog.ttylog_write(
self.interacting.ttylog_file,len(bytes), ttylog.TYPE_INTERACT, time.time(), bytes)
bytes = struct.pack('>Q',len(bytes)) + bytes
self.interacting.client.sendPacket(94, bytes)
def sessionWrite(self, data):
buf, prev = '', ''
for c in data:
if c == '\n' and prev != '\r':
buf += '\r\n'
else:
buf += c
prev = c
self.transport.write(buf)
def sessionClosed(self):
self.interacting.delInteractor(self)
self.interacting = None
self.transport.write('\r\n** Interactive session disconnected.\r\n')
def cmd_hijack(self, args):
self.cmd_view(args)
self.readonly = False
def cmd_view(self, args):
self.readonly = True
try:
sessionno = int(args)
except ValueError:
self.transport.write('** Invalid session ID.\r\n')
return
for s in self.honeypotFactory.sessions:
if sessionno == s:
self.view(s)
return
self.transport.write('** No such session found.\r\n')
def view(self, sessionno):
session = self.honeypotFactory.sessions[sessionno]
self.transport.write(
'** Attaching to #%d, hit ESC to return\r\n' % sessionno)
session.addInteractor(self)
self.interacting = session
def cmd_list(self, args):
self.transport.write('ID clientIP clientVersion\r\n')
for s in self.honeypotFactory.sessions:
session = self.honeypotFactory.sessions[s]
self.transport.write('%s %s %s\r\n' % \
(str(s).ljust(4),
session.endIP.ljust(15),
session.otherVersionString))
def cmd_help(self, args = ''):
self.transport.write('List of commands:\r\n')
self.transport.write(' list - list all active sessions\r\n')
self.transport.write(
' view - attach to a session in read-only mode\r\n')
self.transport.write(
' hijack - attach to a session in interactive mode\r\n')
self.transport.write(
' disconnect - disconnect a session\r\n')
self.transport.write(' help - this help\r\n')
self.transport.write(' exit - disconnect the console\r\n')
def cmd_disconnect(self, args):
try:
sessionno = int(args)
except ValueError:
self.transport.write('** Invalid session ID.\r\n')
return
for s in self.honeypotFactory.sessions:
if sessionno == s:
self.transport.write(
'** Disconnecting session #%d\r\n' % sessionno)
self.honeypotFactory.sessions[s].loseConnection()
return
self.transport.write('** No such session found.\r\n')
def cmd_exit(self, args = ''):
self.transport.loseConnection()
def makeInteractFactory(honeypotFactory):
ifactory = protocol.Factory()
ifactory.protocol = Interact
ifactory.honeypotFactory = honeypotFactory
return ifactory
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import ConfigParser, os, re
def config():
cfg = ConfigParser.ConfigParser()
if os.path.exists('honssh.cfg'):
cfg.read('honssh.cfg')
return cfg
return None
def validateConfig(cfg):
validConfig = True
#Check prop exists and is an IP address
props = [['honeypot','ssh_addr'], ['honeypot','client_addr'], ['honeypot','honey_addr']]
for prop in props:
if not checkExist(cfg,prop) or not checkValidIP(cfg,prop):
validConfig = False
#Check prop exists and is a port number
prop = ['honeypot','ssh_port']
if not checkExist(cfg,prop) or not checkValidPort(cfg,prop):
validConfig = False
#Check prop exists
props = [['honeypot','sensor_name'],['honeypot','public_key'], ['honeypot','private_key'], ['folders','log_path'], ['folders','session_path']]
for prop in props:
if not checkExist(cfg,prop):
validConfig = False
#Check prop exists and is true/false
props = [['advNet','enabled'], ['interact','enabled'], ['spoof','enabled'], ['txtlog','enabled'], ['database_mysql','enabled'], ['email','login'], ['email','attack'], ['hpfeeds','enabled'], ['download','passive'], ['download','active'], ['packets','enabled'], ['hp-restrict', 'disable_publicKey'], ['hp-restrict', 'disable_x11'], ['hp-restrict', 'disable_sftp'], ['hp-restrict', 'disable_exec'], ['hp-restrict', 'disable_port_forwarding']]
for prop in props:
if not checkExist(cfg,prop) or not checkValidBool(cfg, prop):
validConfig = False
#If interact is enabled check it's config
if cfg.get('interact','enabled') == 'true':
prop = ['interact','interface']
if not checkExist(cfg,prop) or not checkValidIP(cfg,prop):
validConfig = False
prop = ['interact','port']
if not checkExist(cfg,prop) or not checkValidPort(cfg,prop):
validConfig = False
#If spoof is enabled check it's config
if cfg.get('spoof','enabled') == 'true':
prop = ['spoof','users_conf']
if not checkExist(cfg,prop):
validConfig = False
#If database_mysql is enabled check it's config
if cfg.get('database_mysql','enabled') == 'true':
prop = ['database_mysql','port']
if not checkExist(cfg,prop) or not checkValidPort(cfg,prop):
validConfig = False
props = [['database_mysql','host'], ['database_mysql','database'], ['database_mysql','username'], ['database_mysql','password']]
for prop in props:
if not checkExist(cfg,prop):
validConfig = False
#If email is enabled check it's config
if cfg.get('email','login') == 'true' or cfg.get('email','login') == 'attack':
if cfg.get('txtlog','enabled') == 'true':
prop = ['email','port']
if not checkExist(cfg,prop) or not checkValidPort(cfg,prop):
validConfig = False
props = [['email','use_tls'], ['email','use_smtpauth']]
for prop in props:
if not checkExist(cfg,prop) or not checkValidBool(cfg,prop):
validConfig = False
if cfg.get('email','use_smtpauth') == 'true':
props = [['email','username'], ['email','password']]
for prop in props:
if not checkExist(cfg,prop):
validConfig = False
props = [['email','host'], ['email','from'], ['email','to']]
for prop in props:
if not checkExist(cfg,prop):
validConfig = False
else:
print '[txtlog][enabled] must be set to true for email support to work'
validConfig = False
#If hpfeeds is enabled check it's config
if cfg.get('hpfeeds','enabled') == 'true':
props = [['hpfeeds','server'], ['hpfeeds','identifier'], ['hpfeeds','secret']]
for prop in props:
if not checkExist(cfg,prop):
validConfig = False
prop = ['hpfeeds','port']
if not checkExist(cfg,prop) or not checkValidPort(cfg,prop):
validConfig = False
return validConfig
def checkExist(cfg, property):
if cfg.has_option(property[0], property[1]):
if not cfg.get(property[0], property[1]) == '':
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] must not be blank.'
return False
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] must exist.'
return False
def checkValidIP(cfg, property):
match = re.match('^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$', cfg.get(property[0], property[1]))
if match:
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] should be a valid IP address'
return False
def checkValidPort(cfg, property):
if checkValidNumber(cfg, property):
if 1 <= int(cfg.get(property[0], property[1])) <= 65535:
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] should be between 1 and 65535'
return False
def checkValidBool(cfg, property):
if cfg.get(property[0], property[1]) in ['true', 'false']:
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] must be either true or false (case sensitive)'
return False
def checkValidNumber(cfg, property):
if cfg.get(property[0], property[1]).isdigit():
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] should be number.'
return False
def checkValidChance(cfg, property):
if checkValidNumber(cfg, property):
if 1 <= int(cfg.get(property[0], property[1])):
return True
else:
print '[VALIDATION] - [' + property[0] + '][' + property[1] + '] should be greater than 0'
return False | Python |
# Copyright (c) 2009 Upi Tamminen <desaster@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.enterprise import adbapi
from twisted.internet import defer
from twisted.python import log
import MySQLdb, uuid, time
class ReconnectingConnectionPool(adbapi.ConnectionPool):
"""Reconnecting adbapi connection pool for MySQL.
This class improves on the solution posted at
http://www.gelens.org/2008/09/12/reinitializing-twisted-connectionpool/
by checking exceptions by error code and only disconnecting the current
connection instead of all of them.
Also see:
http://twistedmatrix.com/pipermail/twisted-python/2009-July/020007.html
"""
def _runInteraction(self, interaction, *args, **kw):
try:
return adbapi.ConnectionPool._runInteraction(
self, interaction, *args, **kw)
except MySQLdb.OperationalError, e:
if e[0] not in (2006, 2013):
raise
log.msg("RCP: got error %s, retrying operation" %(e))
conn = self.connections.get(self.threadID())
self.disconnect(conn)
# try the interaction again
return adbapi.ConnectionPool._runInteraction(
self, interaction, *args, **kw)
class DBLogger():
def start(self, cfg):
if cfg.has_option('database_mysql', 'port'):
port = int(cfg.get('database_mysql', 'port'))
else:
port = 3306
return ReconnectingConnectionPool('MySQLdb',
host = cfg.get('database_mysql', 'host'),
db = cfg.get('database_mysql', 'database'),
user = cfg.get('database_mysql', 'username'),
passwd = cfg.get('database_mysql', 'password'),
port = port,
cp_min = 1,
cp_max = 1)
def setClient(self, dblog, cfg):
self.db = dblog
self.cfg = cfg
def sqlerror(self, error):
print 'SQL Error:', error.value
def simpleQuery(self, sql, args):
""" Just run a deferred sql query, only care about errors """
d = self.db.runQuery(sql, args)
d.addErrback(self.sqlerror)
def createSession(self, sid, peerIP, peerPort, hostIP, hostPort):
self.createSessionWhenever(sid, peerIP, peerPort, hostIP, hostPort)
# This is separate since we can't return with a value
@defer.inlineCallbacks
def createSessionWhenever(self, sid, peerIP, peerPort, hostIP, hostPort):
sensorname = self.cfg.get('honeypot','sensor_name')
r = yield self.db.runQuery('SELECT `id` FROM `sensors` WHERE `ip` = %s AND `name` = %s AND `port` = %s', (hostIP, sensorname, hostPort))
if r:
id = r[0][0]
else:
yield self.db.runQuery('INSERT INTO `sensors` (`ip`, `name`, `port`) VALUES (%s, %s, %s)', (hostIP, sensorname, hostPort))
r = yield self.db.runQuery('SELECT LAST_INSERT_ID()')
id = int(r[0][0])
# now that we have a sensorID, continue creating the session
self.simpleQuery('INSERT INTO `sessions` (`id`, `starttime`, `sensor`, `ip`, `port`) VALUES (%s, FROM_UNIXTIME(%s), %s, %s, %s)', (sid, self.nowUnix(), id, peerIP, peerPort))
def nowUnix(self):
"""return the current UTC time as an UNIX timestamp"""
return int(time.mktime(time.gmtime()[:-1] + (-1,)))
def handleConnectionLost(self, sid):
self.simpleQuery('UPDATE `sessions` SET `endtime` = FROM_UNIXTIME(%s) WHERE `id` = %s', (self.nowUnix(), sid))
def handleLoginFailed(self, username, password):
self.simpleQuery('INSERT INTO `auth` (`success`, `username`, `password`, `timestamp`) VALUES (%s, %s, %s, FROM_UNIXTIME(%s))', (0, username, password, self.nowUnix()))
def handleLoginSucceeded(self, username, password):
self.simpleQuery('INSERT INTO `auth` (`success`, `username`, `password`, `timestamp`) VALUES (%s, %s, %s, FROM_UNIXTIME(%s))', ( 1, username, password, self.nowUnix()))
def channelOpened(self, sessionID, uuid, channelName):
self.simpleQuery('INSERT INTO `channels` (`id`, `type`, `starttime`, `sessionid`) VALUES (%s, %s, FROM_UNIXTIME(%s), %s)', (uuid, channelName, self.nowUnix(), sessionID))
def channelClosed(self, uuid, ttylog=None):
self.simpleQuery('UPDATE `channels` SET `endtime` = FROM_UNIXTIME(%s) WHERE `id` = %s', (self.nowUnix(), uuid))
if ttylog != None:
fp = open(ttylog, 'rb')
ttydata = fp.read()
fp.close()
self.simpleQuery('INSERT INTO `ttylog` (`channelid`, `ttylog`) VALUES (%s, %s)', (uuid, ttydata))
def handleCommand(self, uuid, theCommand):
self.simpleQuery('INSERT INTO `commands` (`timestamp`, `channelid`, `command`) VALUES (FROM_UNIXTIME(%s), %s, %s)', (self.nowUnix(), uuid, theCommand))
@defer.inlineCallbacks
def handleClientVersion(self, session, version):
r = yield self.db.runQuery('SELECT `id` FROM `clients` WHERE `version` = %s', (version))
if r:
id = int(r[0][0])
else:
yield self.db.runQuery('INSERT INTO `clients` (`version`) VALUES (%s)', (version))
r = yield self.db.runQuery('SELECT LAST_INSERT_ID()')
id = int(r[0][0])
self.simpleQuery('UPDATE `sessions` SET `client` = %s WHERE `id` = %s', (id, session))
def handleFileDownload(self, uuid, url, outfile):
self.simpleQuery('INSERT INTO `downloads` (`channelid`, `timestamp`, `url`, `outfile`) VALUES (%s, FROM_UNIXTIME(%s), %s, %s)', (uuid, self.nowUnix(), url, outfile))
| Python |
#!/usr/bin/env python
#
# Copyright (c) 2009 Upi Tamminen <desaster@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import os, sys, time, struct, string, getopt
OP_OPEN, OP_CLOSE, OP_WRITE, OP_EXEC = 1, 2, 3, 4
TYPE_INPUT, TYPE_OUTPUT, TYPE_INTERACT = 1, 2, 3
def playlog(fd, settings):
ssize = struct.calcsize('<iLiiLL')
currtty, prevtime, prefdir = 0, 0, 0
color = None
while 1:
try:
(op, tty, length, dir, sec, usec) = \
struct.unpack('<iLiiLL', fd.read(ssize))
data = fd.read(length)
except struct.error:
if settings['tail']:
prevtime = 0
time.sleep(0.1)
settings['maxdelay'] = 0
continue
break
if currtty == 0: currtty = tty
if str(tty) == str(currtty) and op == OP_WRITE:
# the first stream seen is considered 'output'
if prefdir == 0:
prefdir = dir
# use the other direction
if settings['input_only']:
prefdir = TYPE_INPUT
if dir == TYPE_INPUT: prefdir = TYPE_OUTPUT
if dir == TYPE_INTERACT:
color = '\033[36m'
elif dir == TYPE_INPUT:
color = '\033[33m'
if dir == prefdir or settings['both_dirs']:
curtime = float(sec) + float(usec) / 1000000
if prevtime != 0:
sleeptime = curtime - prevtime
if sleeptime > settings['maxdelay']:
sleeptime = settings['maxdelay']
if settings['maxdelay'] > 0:
time.sleep(sleeptime)
prevtime = curtime
if settings['colorify'] and color:
sys.stdout.write(color)
sys.stdout.write(data)
if settings['colorify'] and color:
sys.stdout.write('\033[0m')
color = None
sys.stdout.flush()
elif str(tty) == str(currtty) and op == OP_CLOSE:
break
def help(brief = 0):
print 'Usage: %s [-bfhi] [-m secs] [-w file] <tty-log-file>\n' % \
os.path.basename(sys.argv[0])
if not brief:
print ' -f keep trying to read the log until it\'s closed'
print ' -m <seconds> maximum delay in seconds, to avoid' + \
' boredom or fast-forward\n' + \
' to the end. (default is 3.0)'
print ' -i show the input stream instead of output'
print ' -b show both input and output streams'
print ' -c colorify the output stream based on what streams are being received'
print ' -h display this help\n'
sys.exit(1)
if __name__ == '__main__':
settings = {
'tail': 0,
'maxdelay': 3.0,
'input_only': 0,
'both_dirs': 0,
'colorify': 0,
}
try:
optlist, args = getopt.getopt(sys.argv[1:], 'fhibcm:w:', ['help'])
except getopt.GetoptError, error:
print 'Error: %s\n' % error
help()
for o, a in optlist:
if o == '-f': settings['tail'] = 1
elif o == '-m': settings['maxdelay'] = float(a) # takes decimals
elif o == '-i': settings['input_only'] = 1
elif o == '-b': settings['both_dirs'] = 1
elif o in ['-h', '--help']: help()
elif o == '-c': settings['colorify'] = 1
if len(args) < 1:
help()
try:
logfd = open(args[0], 'rb')
except IOError:
print "Couldn't open log file!"
sys.exit(2)
playlog(logfd, settings)
# vim: set sw=4:
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.conch.ssh import factory, transport, service
from twisted.conch.ssh.transport import SSHCiphers
from twisted.python import log
from twisted.internet import reactor, defer, threads
from honssh import client, output, networking, honsshServer
from honssh.protocols import sftp, ssh
from kippo.core.config import config
from kippo.dblog import mysql
from hpfeeds import hpfeeds
import datetime, time, os, struct, re, subprocess, random
class HonsshServerTransport(honsshServer.HonsshServer):
cfg = config()
def connectionMade(self):
self.timeoutCount = 0
self.interactors = []
clientFactory = client.HonsshClientFactory()
clientFactory.server = self
self.factory.sessions[self.transport.sessionno] = self
self.out = output.Output(self.factory.hpLog, self.factory.dbLog)
self.net = networking.Networking()
self.clientConnected = False
self.delayedPackets = []
self.endIP = self.transport.getPeer().host
self.bindIP = self.net.setupNetworking(self.endIP)
reactor.connectTCP(self.cfg.get('honeypot', 'honey_addr'), 22, clientFactory, bindAddress=(self.bindIP, self.transport.getPeer().port), timeout=10)
d = threads.deferToThread(self.checkClientConnected)
d.addCallback(self.clientConn)
self.out.connectionMade(self.endIP, self.transport.getPeer().port)
self.sshParse = ssh.SSH(self, self.out)
honsshServer.HonsshServer.connectionMade(self)
def connectionLost(self, reason):
try:
self.client.loseConnection()
except:
pass
for i in self.interactors:
i.sessionClosed()
if self.transport.sessionno in self.factory.sessions:
del self.factory.sessions[self.transport.sessionno]
honsshServer.HonsshServer.connectionLost(self, reason)
self.out.connectionLost()
self.net.removeNetworking(self.factory.sessions)
def ssh_KEXINIT(self, packet):
self.out.setVersion(self.otherVersionString)
return honsshServer.HonsshServer.ssh_KEXINIT(self, packet)
def dispatchMessage(self, messageNum, payload):
if honsshServer.HonsshServer.isEncrypted(self, "both"):
if not self.clientConnected:
log.msg("[SERVER] CONNECTION TO HONEYPOT NOT READY, BUFFERING PACKET")
self.delayedPackets.append([messageNum, payload])
else:
self.sshParse.parsePacket("[SERVER]", messageNum, payload)
else:
honsshServer.HonsshServer.dispatchMessage(self, messageNum, payload)
def addInteractor(self, interactor):
self.interactors.append(interactor)
def delInteractor(self, interactor):
self.interactors.remove(interactor)
def sendPacket(self, messageNum, payload):
honsshServer.HonsshServer.sendPacket(self, messageNum, payload)
def checkClientConnected(self):
while not self.clientConnected:
time.sleep(1)
self.timeoutCount = self.timeoutCount + 1
if self.timeoutCount == 10:
break
self.timeoutCount = 0
return self.clientConnected
def clientConn(self, success):
if success:
log.msg("[SERVER] CLIENT CONNECTED, REPLAYING BUFFERED PACKETS")
for packet in self.delayedPackets:
self.sshParse.parsePacket("[SERVER]", packet[0], packet[1])
else:
log.msg("[SERVER][ERROR] COULD NOT CONNECT TO HONEYPOT AFTER 10 SECONDS - DISCONNECTING CLIENT")
self.loseConnection()
class HonsshServerFactory(factory.SSHFactory):
cfg = config()
otherVersionString = ''
sessions = {}
hpLog = None
dbLog = None
def __init__(self):
clientFactory = client.HonsshSlimClientFactory()
clientFactory.server = self
reactor.connectTCP(self.cfg.get('honeypot', 'honey_addr'), 22, clientFactory)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
hp = hpfeeds.HPLogger()
self.hpLog = hp.start(self.cfg)
if self.cfg.get('database_mysql', 'enabled') == 'true':
db = mysql.DBLogger()
self.dbLog = db.start(self.cfg)
def buildProtocol(self, addr):
t = HonsshServerTransport()
t.ourVersionString = self.otherVersionString
t.factory = self
t.factory.hpLog = self.hpLog
t.supportedPublicKeys = self.privateKeys.keys()
if not self.primes:
log.msg('[SERVER] - disabling diffie-hellman-group-exchange because we cannot find moduli file')
ske = t.supportedKeyExchanges[:]
ske.remove('diffie-hellman-group-exchange-sha1')
t.supportedKeyExchanges = ske
return t
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.conch.ssh import transport
from twisted.python import log
class HonsshServer(transport.SSHServerTransport):
def connectionMade(self):
"""
Called when the connection is made to the other side. We sent our
version and the MSG_KEXINIT packet.
"""
self.transport.write('%s\r\n' % (self.ourVersionString,))
self.currentEncryptions = transport.SSHCiphers('none', 'none', 'none', 'none')
self.currentEncryptions.setKeys('', '', '', '', '', '')
def dataReceived(self, data):
"""
First, check for the version string (SSH-2.0-*). After that has been
received, this method adds data to the buffer, and pulls out any
packets.
@type data: C{str}
"""
self.buf = self.buf + data
if not self.gotVersion:
if self.buf.find('\n', self.buf.find('SSH-')) == -1:
return
lines = self.buf.split('\n')
for p in lines:
if p.startswith('SSH-'):
self.gotVersion = True
self.otherVersionString = p.strip()
remoteVersion = p.split('-')[1]
if remoteVersion not in self.supportedVersions:
self._unsupportedVersionReceived(remoteVersion)
return
i = lines.index(p)
self.buf = '\n'.join(lines[i + 1:])
self.sendKexInit()
packet = self.getPacket()
while packet:
messageNum = ord(packet[0])
self.dispatchMessage(messageNum, packet[1:])
packet = self.getPacket()
def sendDisconnect(self, reason, desc):
"""
http://kbyte.snowpenguin.org/portal/2013/04/30/kippo-protocol-mismatch-workaround/
Workaround for the "bad packet length" error message.
@param reason: the reason for the disconnect. Should be one of the
DISCONNECT_* values.
@type reason: C{int}
@param desc: a descrption of the reason for the disconnection.
@type desc: C{str}
"""
if not 'bad packet length' in desc:
# With python >= 3 we can use super?
transport.SSHServerTransport.sendDisconnect(self, reason, desc)
else:
self.transport.write('Protocol mismatch.\n')
log.msg('[SERVER] - Disconnecting with error, code %s\nreason: %s' % (reason, desc))
self.transport.loseConnection()
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
from kippo.core.config import config
import subprocess
class Networking():
cfg = config()
theIP = None
theFakeIP = None
def setupNetworking(self, theIP):
if self.cfg.get('advNet', 'enabled') == 'true':
self.theIP = theIP
self.theFakeIP = self.getFakeIP(self.theIP)
sp = self.runCommand('ip link add name honssh type dummy')
result = sp.communicate()
if sp.returncode != 0:
if 'File exists' in result[0]:
log.msg("[ADV-NET] - HonSSH Interface already exists, not re-adding")
return self.addFakeIP()
else:
log.msg('[ADV-NET] - Error creating HonSSH Interface - Using client_addr: ' + result[0])
return self.cfg.get('honeypot', 'client_addr')
else:
sp = self.runCommand('ip link set honssh up')
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error setting HonSSH Interface UP - Using client_addr: ' + result[0])
return self.cfg.get('honeypot', 'client_addr')
else:
log.msg("[ADV-NET] - HonSSH Interface created")
return self.addFakeIP()
else:
log.msg("[ADV-NET] - Advanced Networking disabled - Using client_addr")
return self.cfg.get('honeypot', 'client_addr')
def addFakeIP(self):
sp = self.runCommand('ip addr add ' + self.theFakeIP + '/32 dev honssh')
result = sp.communicate()
if sp.returncode != 0:
if 'File exists' in result[0]:
log.msg("[ADV-NET] - Fake IP Address already exists, not re-adding")
return self.theFakeIP
else:
log.msg('[ADV-NET] - Error adding IP address to HonSSH Interface - Using client_addr: ' + result[0])
return self.cfg.get('honeypot', 'client_addr')
else:
sp = self.runCommand('iptables -t nat -A POSTROUTING -s ' + self.theFakeIP + '/32 -d ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -p tcp --dport 22 -j SNAT --to ' + self.theIP)
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error creating POSTROUTING Rule - Using client_addr: ' + result[0])
return self.cfg.get('honeypot', 'client_addr')
else:
sp = self.runCommand('iptables -t nat -A PREROUTING -s ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -d ' + self.theIP +'/32 -p tcp --sport 22 -j DNAT --to ' + self.theFakeIP)
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error creating PREROUTING Rule - Using client_addr: ' + result[0])
return self.cfg.get('honeypot', 'client_addr')
else:
log.msg("[ADV-NET] - HonSSH FakeIP and iptables rules added")
return self.theFakeIP
def removeFakeIP(self):
sp = self.runCommand('ip addr del ' + self.theFakeIP + '/32 dev honssh')
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error removing IP address to HonSSH Interface: ' + result[0])
sp = self.runCommand('iptables -t nat -D POSTROUTING -s ' + self.theFakeIP + '/32 -d ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -p tcp --dport 22 -j SNAT --to ' + self.theIP)
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error removing POSTROUTING Rule: ' + result[0])
sp = self.runCommand('iptables -t nat -D PREROUTING -s ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -d ' + self.theIP +'/32 -p tcp --sport 22 -j DNAT --to ' + self.theFakeIP)
result = sp.communicate()
if sp.returncode != 0:
log.msg('[ADV-NET] - Error removing PREROUTING Rule: ' + result[0])
def removeNetworking(self, sessions):
if self.cfg.get('advNet', 'enabled') == 'true':
if len(sessions) == 0:
self.removeFakeIP()
sp = self.runCommand('ip link del dev honssh')
result = sp.communicate()
if sp.returncode != 0:
log.msg("[ADV-NET] - Error removing HonSSH Interface: " + result[0])
else:
found = False
for s in sessions:
session = sessions[s]
if session.endIP == self.theIP:
found = True
break
if not found:
self.removeFakeIP()
def getFakeIP(self, theIP):
ipBits = theIP.split('.')
for i in range(0, len(ipBits)):
ipBits[i] = str(int(ipBits[i]) + 1)
if ipBits[i] >= '255':
ipBits[i] = '1'
return '.'.join(ipBits)
def runCommand(self, cmd):
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from kippo.core.config import config
def attemptedLogin(username, password):
cfg = config()
if cfg.get('extras', 'voice') == 'true':
from espeak import espeak
espeak.synth("Attempted login using: %s and %s" % (username, password))
def successLogin(endIP):
cfg = config()
if cfg.get('extras', 'voice') == 'true':
from espeak import espeak
espeak.synth("Successful login from: %s" % endIP)
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.conch.ssh import transport, service
from twisted.python import log
from twisted.internet import protocol, defer
from kippo.core.config import config
import datetime, time, os, re, io, struct
class HonsshClientTransport(transport.SSHClientTransport):
def connectionMade(self):
log.msg('[CLIENT] - New client connection')
self.factory.server.client = self
self.factory.server.sshParse.setClient(self)
transport.SSHClientTransport.connectionMade(self)
self.cfg = self.factory.server.cfg
self.out = self.factory.server.out
def verifyHostKey(self, pubKey, fingerprint):
return defer.succeed(True)
def connectionSecure(self):
self.factory.server.clientConnected = True
log.msg('[CLIENT] - Client Connection Secured')
def connectionLost(self, reason):
transport.SSHClientTransport.connectionLost(self, reason)
log.msg("[CLIENT] - Lost connection with the honeypot: %s" % self.cfg.get('honeypot', 'honey_addr'))
def dispatchMessage(self, messageNum, payload):
if transport.SSHClientTransport.isEncrypted(self, "both"):
self.factory.server.sshParse.parsePacket('[CLIENT]', messageNum, payload)
else:
transport.SSHClientTransport.dispatchMessage(self, messageNum, payload)
## for i in self.factory.server.interactors:
## i.sessionWrite(data)
class HonsshClientFactory(protocol.ClientFactory):
protocol = HonsshClientTransport
class HonsshSlimClientTransport(transport.SSHClientTransport):
def dataReceived(self, data):
self.buf = self.buf + data
if not self.gotVersion:
if self.buf.find('\n', self.buf.find('SSH-')) == -1:
return
lines = self.buf.split('\n')
for p in lines:
if p.startswith('SSH-'):
self.gotVersion = True
self.otherVersionString = p.strip()
self.factory.server.otherVersionString = self.otherVersionString
log.msg("[CLIENT] - " + self.factory.server.otherVersionString)
self.loseConnection()
class HonsshSlimClientFactory(protocol.ClientFactory):
protocol = HonsshSlimClientTransport
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import sys
import datetime
import os
def log(logfile, message):
setPermissions = False
if(os.path.isfile(logfile) == False):
setPermissions = True
f = file(logfile, 'a')
f.write(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " - " + message + "\n")
f.close()
if(setPermissions):
os.chmod(logfile, 0644)
def authLog(logfile, ip, username, password, success):
setPermissions = False
if(os.path.isfile(logfile) == False):
setPermissions = True
f = file(logfile, 'a')
if username == '' or password == '':
f.write("%s,%s\n" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),ip))
else:
auth = "0"
if success:
auth = "1"
f.write("%s,%s,%s,%s,%s\n" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),ip,username,password,auth))
f.close()
if(setPermissions):
os.chmod(logfile, 0644)
def downloadLog(dt, logfile, ip, link, outFile, theSize, theMD5):
setPermissions = False
if(os.path.isfile(logfile) == False):
setPermissions = True
f = file(logfile, 'a')
f.write("%s,%s,%s,%s,%s,%s\n" % (dt, ip, link, theSize, theMD5, outFile))
f.close()
if(setPermissions):
os.chmod(logfile, 0644)
def spoofLog(logfile, username, password, ip):
setPermissions = False
found = False
if os.path.isfile(logfile):
f = file(logfile, 'r')
lines = f.readlines()
f.close()
for i in range(len(lines)):
lines[i] = lines[i].strip().split(' - ')
if lines[i][0] == username and lines[i][1] == password:
found = True
if ip not in lines[i][2:]:
lines[i].append(ip)
f = file(logfile, 'w')
for line in lines:
f.write(' - '.join(line) + '\n')
if not found:
f.write("%s - %s - %s\n" % (username,password,ip))
f.close()
else:
f = file(logfile, 'a')
f.write("%s - %s - %s\n" % (username,password,ip))
f.close()
setPermissions = True
if(setPermissions):
os.chmod(logfile, 0644) | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from honssh.protocols import baseProtocol, sftp, term, execTerm
from twisted.python import log
from kippo.core.config import config
import struct, uuid, random, os, ConfigParser, re
class SSH(baseProtocol.BaseProtocol):
channels = []
username = ''
password = ''
cfg = config()
packetLayout = {
1 : 'SSH_MSG_DISCONNECT', #['uint32', 'reason_code'], ['string', 'reason'], ['string', 'language_tag']
2 : 'SSH_MSG_IGNORE', #['string', 'data']
3 : 'SSH_MSG_UNIMPLEMENTED', #['uint32', 'seq_no']
4 : 'SSH_MSG_DEBUG', #['boolean', 'always_display']
5 : 'SSH_MSG_SERVICE_REQUEST', #['string', 'service_name']
6 : 'SSH_MSG_SERVICE_ACCEPT', #['string', 'service_name']
20 : 'SSH_MSG_KEXINIT', #['string', 'service_name']
21 : 'SSH_MSG_NEWKEYS', #
50 : 'SSH_MSG_USERAUTH_REQUEST', #['string', 'username'], ['string', 'service_name'], ['string', 'method_name']
51 : 'SSH_MSG_USERAUTH_FAILURE', #['name-list', 'authentications'], ['boolean', 'partial_success']
52 : 'SSH_MSG_USERAUTH_SUCCESS', #
53 : 'SSH_MSG_USERAUTH_BANNER', #['string', 'message'], ['string', 'language_tag']
80 : 'SSH_MSG_GLOBAL_REQUEST', #['string', 'request_name'], ['boolean', 'want_reply'] #tcpip-forward
81 : 'SSH_MSG_REQUEST_SUCCESS', #
82 : 'SSH_MSG_REQUEST_FAILURE', #
90 : 'SSH_MSG_CHANNEL_OPEN', #['string', 'channel_type'], ['uint32', 'sender_channel'], ['uint32', 'initial_window_size'], ['uint32', 'maximum_packet_size'],
91 : 'SSH_MSG_CHANNEL_OPEN_CONFIRMATION', #['uint32', 'recipient_channel'], ['uint32', 'sender_channel'], ['uint32', 'initial_window_size'], ['uint32', 'maximum_packet_size'],
92 : 'SSH_MSG_CHANNEL_OPEN_FAILURE', #['uint32', 'recipient_channel'], ['uint32', 'reason_code'], ['string', 'reason'], ['string', 'language_tag']
93 : 'SSH_MSG_CHANNEL_WINDOW_ADJUST', #['uint32', 'recipient_channel'], ['uint32', 'additional_bytes']
94 : 'SSH_MSG_CHANNEL_DATA', #['uint32', 'recipient_channel'], ['string', 'data']
95 : 'SSH_MSG_CHANNEL_EXTENDED_DATA', #['uint32', 'recipient_channel'], ['uint32', 'data_type_code'], ['string', 'data']
96 : 'SSH_MSG_CHANNEL_EOF', #['uint32', 'recipient_channel']
97 : 'SSH_MSG_CHANNEL_CLOSE', #['uint32', 'recipient_channel']
98 : 'SSH_MSG_CHANNEL_REQUEST', #['uint32', 'recipient_channel'], ['string', 'request_type'], ['boolean', 'want_reply']
99 : 'SSH_MSG_CHANNEL_SUCCESS', #
100 : 'SSH_MSG_CHANNEL_FAILURE' #
}
def __init__(self, server, out):
self.out = out
self.server = server
self.channels = []
def setClient(self, client):
self.client = client
def parsePacket(self, parent, messageNum, payload):
self.data = payload
self.packetSize = len(payload)
self.sendOn = True
packet = self.packetLayout[messageNum]
if self.cfg.get('packets', 'enabled') == 'true':
if parent == '[SERVER]':
direction = 'CLIENT -> SERVER'
else:
direction = 'SERVER -> CLIENT'
self.out.advancedLog(direction + ' - ' + packet.ljust(33) + ' - ' + repr(payload))
# - UserAuth
if packet == 'SSH_MSG_USERAUTH_REQUEST':
self.username = self.extractString()
service = self.extractString()
authType = self.extractString()
if authType == 'password':
self.extractBool()
psize = self.packetSize
self.password = self.extractString()
if self.password != "":
if self.cfg.get('spoof', 'enabled') == 'true':
user = self.getUsers(self.username)
rand = 0
if user != None:
if user[2] == 'fixed':
passwords = re.sub(r'\s', '', user[3]).split(',')
if self.password in passwords:
rand = 1
elif user[2] == 'random':
randomFactor = (100 / int(user[3])) + 1
rand = random.randrange(1, randomFactor)
found = False
logfile = self.cfg.get('folders', 'log_path') + "/spoof.log"
if os.path.isfile(logfile):
f = file(logfile, 'r')
creds = f.read().splitlines()
f.close()
for cred in creds:
cred = cred.strip().split(' - ')
if cred[0] == self.username and cred[1] == self.password:
rand = 1
self.out.writePossibleLink(cred[2:])
break
if rand == 1:
payload = payload[:0-psize] + self.stringToHex(user[1])
self.out.addConnectionString("[SSH ] Spoofing Login - Changing %s to %s" % (self.password, user[1]))
self.out.writeSpoofPass(self.username, self.password)
elif authType == 'publickey':
if self.cfg.get('hp-restrict', 'disable_publicKey') == 'true':
self.sendOn = False
self.server.sendPacket(51, self.stringToHex('password') + chr(0))
elif packet == 'SSH_MSG_USERAUTH_FAILURE':
authList = self.extractString()
if 'publickey' in authList:
if self.cfg.get('hp-restrict', 'disable_publicKey') == 'true':
log.msg("[SSH] - Detected Public Key Auth - Disabling!")
payload = self.stringToHex('password') + chr(0)
if self.username != '' and self.password != '':
self.out.loginFailed(self.username, self.password)
elif packet == 'SSH_MSG_USERAUTH_SUCCESS':
if self.username != '' and self.password != '':
self.out.loginSuccessful(self.username, self.password)
# - End UserAuth
# - Channels
elif packet == 'SSH_MSG_CHANNEL_OPEN':
type = self.extractString()
id = self.extractInt(4)
if type == 'session':
self.createChannel(parent, id, type)
elif type == 'x11':
if self.cfg.get('hp-restrict', 'disable_x11') == 'true':
log.msg("[SSH] - Detected X11 Channel - Disabling!")
self.sendOn = False
self.sendBack(parent, 92, self.intToHex(id))
else:
##LOG X11 Channel opened - not logging
self.createChannel(parent, id, type, session=baseProtocol.BaseProtocol())
elif type == 'direct-tcpip':
if self.cfg.get('hp-restrict', 'disable_port_forwarding') == 'true':
log.msg("[SSH] - Detected Port Forwarding Channel - Disabling!")
self.sendOn = False
self.sendBack(parent, 92, self.intToHex(id) + self.intToHex(1) + self.stringToHex('open failed') + self.intToHex(0))
else:
##LOG PORT FORWARDING Channel opened - not logging
self.createChannel(parent, id, type, session=baseProtocol.BaseProtocol())
else:
##UNKNOWN CHANNEL TYPE
if type not in ['exit-status']:
log.msg("[SSH] - Unknown Channel Type Detected - " + type)
elif packet == 'SSH_MSG_CHANNEL_OPEN_CONFIRMATION':
channel = self.getChannel(self.extractInt(4), parent)
senderID = self.extractInt(4) #SENDER
if parent == '[SERVER]':
channel['serverID'] = senderID
elif parent == '[CLIENT]':
channel['clientID'] = senderID
##CHANNEL OPENED
elif packet == 'SSH_MSG_CHANNEL_OPEN_FAILURE':
channel = self.getChannel(self.extractInt(4), parent)
self.channels.remove(channel)
##CHANNEL FAILED TO OPEN
elif packet == 'SSH_MSG_CHANNEL_REQUEST':
channel = self.getChannel(self.extractInt(4), parent)
type = self.extractString()
theUUID = uuid.uuid4().hex
if type == 'pty-req':
channel['name'] = '[TERM' + str(channel['serverID']) + ']'
self.out.channelOpened(theUUID, channel['name'])
channel['session'] = term.Term(self.out, theUUID, channel['name'])
elif type == 'exec':
if self.cfg.get('hp-restrict','disable_exec') == 'true':
log.msg("[SSH] - Detected EXEC Channel Request - Disabling!")
self.sendOn = False
self.sendBack(parent, 100, self.intToHex(channel['serverID']))
else:
channel['name'] = '[EXEC' + str(channel['serverID']) + ']'
self.extractBool()
command = self.extractString()
self.out.channelOpened(theUUID, channel['name'])
channel['session'] = execTerm.ExecTerm(self.out, theUUID, channel['name'], command)
elif type == 'subsystem':
self.extractBool()
subsystem = self.extractString()
if subsystem == 'sftp':
if self.cfg.get('hp-restrict','disable_sftp') == 'true':
log.msg("[SSH] - Detected SFTP Channel Request - Disabling!")
self.sendOn = False
self.sendBack(parent, 100, self.intToHex(channel['serverID']))
else:
channel['name'] = '[SFTP' + str(channel['serverID']) + ']'
self.out.channelOpened(theUUID, channel['name'])
channel['session'] = sftp.SFTP(self.out, theUUID, channel['name'])
else:
##UNKNOWN SUBSYSTEM
log.msg("[SSH] - Unknown Subsystem Type Detected - " + subsystem)
elif type == 'x11-req':
if self.cfg.get('hp-restrict', 'disable_x11') == 'true':
self.sendOn = False
self.sendBack(parent, 82, '')
else:
##UNKNOWN CHANNEL REQUEST TYPE
if type not in ['window-change', 'env', 'shell', 'exit-status']:
log.msg("[SSH] - Unknown Channel Request Type Detected - " + type)
elif packet == 'SSH_MSG_CHANNEL_FAILURE':
pass
elif packet == 'SSH_MSG_CHANNEL_CLOSE':
channel = self.getChannel(self.extractInt(4), parent)
channel[parent] = True
if '[SERVER]' in channel and '[CLIENT]' in channel:
##CHANNEL CLOSED
if channel['session'] != None:
channel['session'].channelClosed()
self.out.channelClosed(channel['session'])
self.channels.remove(channel)
# - END Channels
# - ChannelData
elif packet == 'SSH_MSG_CHANNEL_DATA':
channel = self.getChannel(self.extractInt(4), parent)
channel['session'].parsePacket(parent, self.extractString())
elif packet == 'SSH_MSG_CHANNEL_EXTENDED_DATA':
channel = self.getChannel(self.extractInt(4), parent)
self.extractInt(4)
channel['session'].parsePacket(parent, self.extractString())
# - END ChannelData
elif packet == 'SSH_MSG_GLOBAL_REQUEST':
type = self.extractString()
if type == 'tcpip-forward':
if self.cfg.get('hp-restrict', 'disable_port_forwarding') == 'true':
self.sendOn = False
self.sendBack(parent, 82, '')
if self.sendOn:
if parent == '[SERVER]':
self.client.sendPacket(messageNum, payload)
else:
self.server.sendPacket(messageNum, payload)
def sendBack(self, parent, messageNum, payload):
if self.cfg.get('packets', 'enabled') == 'true':
packet = self.packetLayout[messageNum]
if parent == '[SERVER]':
direction = 'HONSSH -> CLIENT'
else:
direction = 'HONSSH -> SERVER'
self.out.advancedLog(direction + ' - ' + packet.ljust(33) + ' - ' + repr(payload))
if parent == '[SERVER]':
self.server.sendPacket(messageNum, payload)
elif parent == '[CLIENT]':
self.client.sendPacket(messageNum, payload)
def createChannel(self, parent, id, type, session=None):
if parent == '[SERVER]':
self.channels.append({'serverID':id, 'type': type, 'session':session})
elif parent == '[CLIENT]':
self.channels.append({'clientID':id, 'type': type, 'session':session})
def getChannel(self, channelNum, parent):
theChannel = None
for channel in self.channels:
if parent == '[CLIENT]':
search = 'serverID'
else:
search = 'clientID'
if channel[search] == channelNum:
theChannel = channel
break
return channel
def getUsers(self, username):
usersCfg = ConfigParser.ConfigParser()
if os.path.exists(self.cfg.get('spoof','users_conf')):
usersCfg.read(self.cfg.get('spoof','users_conf'))
users = usersCfg.sections()
for user in users:
if user == username:
if usersCfg.has_option(user, 'fake_passwords'):
return [user, usersCfg.get(user, 'real_password'), 'fixed', usersCfg.get(user, 'fake_passwords')]
if usersCfg.has_option(user, 'random_chance'):
return [user, usersCfg.get(user, 'real_password'), 'random', usersCfg.get(user, 'random_chance')]
else:
log.msg("ERROR: users_conf does not exist")
return None
def stringToHex(self, message):
b = message.encode('utf-8')
size = struct.pack('>L',len(b))
return size + b
def intToHex(self, int):
return struct.pack('>L', int) | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
from honssh.protocols import baseProtocol
import datetime, io
class SFTP(baseProtocol.BaseProtocol):
prevID = ''
ID = ''
handle = ''
path = ''
command = ''
payloadSize = 0
payloadOffset = 0
theFile = ''
packetLayout = {
1 : 'SSH_FXP_INIT', #['uint32', 'version'], [['string', 'extension_name'], ['string', 'extension_data']]]
2 : 'SSH_FXP_VERSION', #[['uint32', 'version'], [['string', 'extension_name'], ['string', 'extension_data']]]
3 : 'SSH_FXP_OPEN', #[['uint32', 'id'], ['string', 'filename'], ['uint32', 'pflags'], ['ATTRS', 'attrs']]
4 : 'SSH_FXP_CLOSE', #[['uint32', 'id'], ['string', 'handle']]
5 : 'SSH_FXP_READ', #[['uint32', 'id'], ['string', 'handle'], ['uint64', 'offset'], ['uint32', 'len']]
6 : 'SSH_FXP_WRITE', #[['uint32', 'id'], ['string', 'handle'], ['uint64', 'offset'], ['string', 'data']]
7 : 'SSH_FXP_LSTAT', #[['uint32', 'id'], ['string', 'path']]
8 : 'SSH_FXP_FSTAT', #[['uint32', 'id'], ['string', 'handle']]
9 : 'SSH_FXP_SETSTAT', #[['uint32', 'id'], ['string', 'path'], ['ATTRS', 'attrs']]
10 : 'SSH_FXP_FSETSTAT', #[['uint32', 'id'], ['string', 'handle'], ['ATTRS', 'attrs']]
11 : 'SSH_FXP_OPENDIR', #[['uint32', 'id'], ['string', 'path']]
12 : 'SSH_FXP_READDIR', #[['uint32', 'id'], ['string', 'handle']]
13 : 'SSH_FXP_REMOVE', #[['uint32', 'id'], ['string', 'filename']]
14 : 'SSH_FXP_MKDIR', #[['uint32', 'id'], ['string', 'path'], ['ATTRS', 'attrs']]
15 : 'SSH_FXP_RMDIR', #[['uint32', 'id'], ['string', 'path']]
16 : 'SSH_FXP_REALPATH', #[['uint32', 'id'], ['string', 'path']]
17 : 'SSH_FXP_STAT', #[['uint32', 'id'], ['string', 'path']]
18 : 'SSH_FXP_RENAME', #[['uint32', 'id'], ['string', 'oldpath'], ['string', 'newpath']]
19 : 'SSH_FXP_READLINK', #[['uint32', 'id'], ['string', 'path']]
20 : 'SSH_FXP_SYMLINK', #[['uint32', 'id'], ['string', 'linkpath'], ['string', 'targetpath']]
101 : 'SSH_FXP_STATUS', #[['uint32', 'id'], ['uint32', 'error_code'], ['string', 'error_message'], ['string', 'language']]
102 : 'SSH_FXP_HANDLE', #[['uint32', 'id'], ['string', 'handle']]
103 : 'SSH_FXP_DATA', #[['uint32', 'id'], ['string', 'data']]
104 : 'SSH_FXP_NAME', #[['uint32', 'id'], ['uint32', 'count'], [['string', 'filename'], ['string', 'longname'], ['ATTRS', 'attrs']]]
105 : 'SSH_FXP_ATTRS', #[['uint32', 'id'], ['ATTRS', 'attrs']]
200 : 'SSH_FXP_EXTENDED', #[]
201 : 'SSH_FXP_EXTENDED_REPLY' #[]
}
def __init__(self, out, uuid, chanName):
self.name = chanName
self.uuid = uuid
self.out = out
self.clientPacket = baseProtocol.BaseProtocol()
self.serverPacket = baseProtocol.BaseProtocol()
def parsePacket(self, parent, payload):
if parent == '[SERVER]':
self.parentPacket = self.serverPacket
elif parent == '[CLIENT]':
self.parentPacket = self.clientPacket
if self.parentPacket.packetSize == 0:
self.parentPacket.packetSize = int(payload[:4].encode('hex'), 16) - len(payload[4:])
payload = payload[4:]
self.parentPacket.data = payload
payload = ''
else:
if len(payload) > self.parentPacket.packetSize:
self.parentPacket.data = self.parentPacket.data + payload[:self.parentPacket.packetSize]
payload = payload[self.parentPacket.packetSize:]
self.parentPacket.packetSize = 0
else:
self.parentPacket.packetSize = self.parentPacket.packetSize - len(payload)
self.parentPacket.data = self.parentPacket.data + payload
payload = ''
if self.parentPacket.packetSize == 0:
self.handlePacket(parent)
if len(payload) != 0:
self.parsePacket(parent, payload)
def handlePacket(self, parent):
self.packetSize = self.parentPacket.packetSize
self.data = self.parentPacket.data
sftpNum = self.extractInt(1)
packet = self.packetLayout[sftpNum]
self.prevID = self.ID
self.ID = self.extractInt(4)
if packet == 'SSH_FXP_OPENDIR':
self.path = self.extractString()
elif packet == 'SSH_FXP_REALPATH':
self.path = self.extractString()
self.command = 'cd ' + self.path
log.msg(parent + '[SFTP] - Entered Command: ' + self.command)
self.out.commandEntered(self.uuid, self.name, self.command)
elif packet == 'SSH_FXP_OPEN':
self.path = self.extractString()
pflags = '{0:08b}'.format(self.extractInt(4))
if pflags[6] == '1':
self.command = 'put ' + self.path
self.theFile = ''
elif pflags[7] == '1':
self.command = 'get ' + self.path
else:
#Unknown PFlag
log.msg(parent + '[SFTP] - New SFTP pflag detected - Please raise a HonSSH issue on google code with the details: %s %s' % (pflags, self.data))
log.msg(parent + '[SFTP] - Entered Command: ' + self.command)
self.out.commandEntered(self.uuid, self.name, self.command)
elif packet == 'SSH_FXP_READ':
pass
elif packet == 'SSH_FXP_WRITE':
if self.handle == self.extractString():
self.offset = self.extractInt(8)
self.theFile = self.theFile[:self.offset] + self.extractData()
elif packet == 'SSH_FXP_HANDLE':
if self.ID == self.prevID:
self.handle = self.extractString()
elif packet == 'SSH_FXP_READDIR':
if self.handle == self.extractString():
self.command = 'ls ' + self.path
elif packet == 'SSH_FXP_SETSTAT':
self.path = self.extractString()
self.command = self.extractAttrs() + ' ' + self.path
elif packet == 'SSH_FXP_EXTENDED':
cmd = self.extractString()
self.path = self.extractString()
if cmd == 'statvfs@openssh.com':
self.command = 'df ' + self.path
elif cmd == 'hardlink@openssh.com':
self.command = 'ln ' + self.path + ' ' + self.extractString()
elif cmd == 'posix-rename@openssh.com':
self.command = 'mv ' + self.path + ' ' + self.extractString()
else:
#UNKNOWN COMMAND
log.msg(parent + '[SFTP] - New SFTP Extended Command detected - Please raise a HonSSH issue on google code with the details: %s %s' % (cmd, self.data))
elif packet == 'SSH_FXP_EXTENDED_REPLY':
log.msg(parent + '[SFTP] - Entered Command: ' + self.command)
self.out.commandEntered(self.uuid, self.name, self.command)
elif packet == 'SSH_FXP_CLOSE':
if self.handle == self.extractString():
if 'get' in self.command:
log.msg(parent + '[SFTP] - Finished Downloading: ' + self.path)
elif 'put' in self.command:
log.msg(parent + '[SFTP] - Finished Uploading: ' + self.path)
if self.out.cfg.get('download','passive') == 'true':
self.out.makeDownloadsFolder()
outfile = self.out.downloadFolder + datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + "-" + self.path.split('/')[-1]
f = open(outfile, 'wb')
f.write(self.theFile)
f.close()
self.out.fileDownloaded((self.name, self.uuid, True, self.path.split('/')[-1], outfile, None))
elif packet == 'SSH_FXP_SYMLINK':
self.command = 'ln -s ' + self.extractString() + ' ' + self.extractString()
elif packet == 'SSH_FXP_MKDIR':
self.command = 'mkdir ' + self.extractString()
elif packet == 'SSH_FXP_REMOVE':
self.command = 'rm ' + self.extractString()
elif packet == 'SSH_FXP_RMDIR':
self.command = 'rmdir ' + self.extractString()
elif packet == 'SSH_FXP_STATUS':
if self.ID == self.prevID:
code = self.extractInt(4)
if code in [0, 1]:
if 'get' not in self.command and 'put' not in self.command:
log.msg(parent + '[SFTP] - Entered Command: ' + self.command)
self.out.commandEntered(self.uuid, self.name, self.command)
else:
message = self.extractString()
log.msg(parent + '[SFTP] - Failed Command: ' + self.command + ' Reason: ' + message)
self.out.commandEntered(self.uuid, self.name + ' [FAILED]', self.command)
def extractAttrs(self):
cmd = ''
flags = '{0:08b}'.format(self.extractInt(4))
if flags[5] == '1':
perms = '{0:09b}'.format(self.extractInt(4))
log.msg(parent + "PERMS:" + perms)
chmod = str(int(perms[:3], 2)) + str(int(perms[3:6], 2)) + str(int(perms[6:], 2))
cmd = 'chmod ' + chmod
elif flags[6] == '1':
user = str(self.extractInt(4))
group = str(self.extractInt(4))
cmd = 'chown ' + user + ':' + group
else:
#Unknown attribute
log.msg(parent + '[SFTP] - New SFTP Attribute detected - Please raise a HonSSH issue on google code with the details: %s %s' % (flags, self.data))
return cmd
'''
CLIENT SERVER
SSH_FXP_INIT -->
<-- SSH_FXP_VERSION
SSH_FXP_OPEN -->
<-- SSH_FXP_HANDLE (or SSH_FXP_STATUS if fail)
SSH_FXP_READ -->
<-- SSH_FXP_DATA (or SSH_FXP_STATUS if fail)
SSH_FXP_WRITE -->
<-- SSH_FXP_STATUS
SSH_FXP_REMOVE -->
<-- SSH_FXP_STATUS
SSH_FXP_RENAME -->
<-- SSH_FXP_STATUS
SSH_FXP_MKDIR -->
<-- SSH_FXP_STATUS
SSH_FXP_RMDIR -->
<-- SSH_FXP_STATUS
SSH_FXP_OPENDIR -->
<-- SSH_FXP_HANDLE (or SSH_FXP_STATUS if fail)
SSH_FXP_READDIR -->
<-- SSH_FXP_NAME (or SSH_FXP_STATUS if fail)
SSH_FXP_STAT --> //Follows symlinks
<-- SSH_FXP_ATTRS (or SSH_FXP_STATUS if fail)
SSH_FXP_LSTAT --> //Does not follow symlinks
<-- SSH_FXP_ATTRS (or SSH_FXP_STATUS if fail)
SSH_FXP_FSTAT --> //Works on an open file/handle not a file path like (L)STAT
<-- SSH_FXP_ATTRS (or SSH_FXP_STATUS if fail)
SSH_FXP_SETSTAT --> //Sets file attributes on path
<-- SSH_FXP_STATUS
SSH_FXP_FSETSTAT--> //Sets file attributes on a handle
<-- SSH_FXP_STATUS
SSH_FXP_READLINK --> //Used to find the target of a symlink
<-- SSH_FXP_NAME (or SSH_FXP_STATUS if fail)
SSH_FXP_SYMLINK --> //Used to create a symlink
<-- SSH_FXP_NAME (or SSH_FXP_STATUS if fail)
SSH_FXP_REALPATH --> //Relative path
<-- SSH_FXP_NAME (or SSH_FXP_STATUS if fail)
SSH_FXP_CLOSE --> //Closes handle not session
<-- SSH_FXP_STATUS
''' | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
from honssh.protocols import baseProtocol
import re, io, datetime
class ExecTerm(baseProtocol.BaseProtocol):
size = -1
fileName = ''
theFile = ''
scp = False
def __init__(self, out, uuid, chanName, command):
self.name = chanName
self.out = out
self.uuid = uuid
if command.startswith('scp'):
self.scp = True
self.out.commandEntered(self.uuid, self.name + ' [SCP]', command)
else:
self.ttylog_file = self.out.logLocation + datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")[:-3] + '_' + self.name[1:-1] + '.tty'
self.out.openTTY(self.ttylog_file)
self.out.inputTTY(self.ttylog_file, 'INPUT: ' + command + '\n\n')
self.processCommand(self.uuid, self.name, command)
def channelClosed(self):
if not self.scp:
self.out.closeTTY(self.ttylog_file)
def parsePacket(self, parent, payload):
self.data = payload
if self.scp:
if parent == '[SERVER]':
if self.size == -1:
match = re.match('C\d{4} (\d*) (.*)', self.data)
if match:
self.size = int(match.group(1))
self.fileName = str(match.group(2))
else:
self.theFile = self.theFile + self.data[:self.size]
self.size = self.size - len(self.data[:self.size])
if self.size == 0:
if self.out.cfg.get('download','passive') == 'true':
self.out.makeDownloadsFolder()
outfile = self.out.downloadFolder + datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + "-" + self.fileName
f = open(outfile, 'wb')
f.write(self.theFile)
f.close()
self.out.fileDownloaded((self.name + ' [SCP]', self.uuid, True, self.fileName, outfile, None))
self.fileName = ''
self.theFile = ''
self.size = -1
else:
self.out.inputTTY(self.ttylog_file, payload) | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
from honssh.protocols import baseProtocol
import datetime
import re
import binascii
class Term(baseProtocol.BaseProtocol):
command = ''
pointer = 0
tabPress = False
upArrow = False
def __init__(self, out, uuid, chanName):
self.name = chanName
self.uuid = uuid
self.out = out
self.ttylog_file = self.out.logLocation + datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")[:-3] + '_' + self.name[1:-1] + '.tty'
self.out.openTTY(self.ttylog_file)
def channelClosed(self):
self.out.closeTTY(self.ttylog_file)
def parsePacket(self, parent, payload):
self.data = payload
if parent == '[SERVER]':
while len(self.data) != 0:
if self.data[:1] == '\x09': #If Tab Pressed
self.tabPress = True
self.data = self.data[1:]
elif self.data[:1] == '\x7f' or self.data[:1] == '\x08': #If Backspace Pressed
if self.pointer != 0:
self.command = self.command[:self.pointer-1] + self.command[self.pointer:]
self.pointer = self.pointer - 1
self.data = self.data[1:]
elif self.data[:1] == '\x0d' or self.data[:1] == '\x03': #if enter or ctrl+c
if self.data[:1] == '\x03':
self.command = self.command + "^C"
self.data = self.data[1:]
log.msg("[SERVER] - Entered command: %s" % (self.command))
self.processCommand(self.uuid, self.name, self.command)
self.command = ''
self.pointer = 0
elif self.data[:3] == '\x1b\x4f\x48': #If Home Pressed
self.pointer = 0
self.data = self.data[3:]
elif self.data[:3] == '\x1b\x4f\x46': #If End Pressed
self.pointer = len(self.command)
self.data = self.data[3:]
elif self.data[:3] == '\x1b\x5b\x43': #If Right Pressed
if self.pointer != len(self.command):
self.pointer = self.pointer + 1
self.data = self.data[3:]
elif self.data[:3] == '\x1b\x5b\x44': #If Left Pressed
if self.pointer != 0:
self.pointer = self.pointer - 1
self.data = self.data[3:]
elif self.data[:3] == '\x1b\x5b\x41' or self.data[:3] == '\x1b\x5b\x42': #If up or down arrow
self.upArrow = True
self.data = self.data[3:]
else:
self.command = self.command[:self.pointer] + self.data[:1] + self.command[self.pointer:]
self.pointer = self.pointer + 1
self.data = self.data[1:]
elif parent == '[CLIENT]':
self.out.inputTTY(self.ttylog_file, self.data) #Log to TTY File
if self.tabPress:
if not self.data.startswith('\x0d'):
if self.data != '\x07':
self.command = self.command + self.data
self.tabPress = False
if self.upArrow:
while len(self.data) != 0:
if self.data[:1] == '\x08': #Backspace
self.command = self.command[:-1]
self.pointer = self.pointer - 1
self.data = self.data[1:]
elif self.data[:3] == '\x1b\x5b\x4b': #ESC[K - Clear Line
self.command = self.command[:self.pointer]
self.data = self.data[3:]
elif self.data[:1] == '\x0d':
self.pointer = 0
self.data = self.data[1:]
elif self.data[:3] == '\x1b\x5b\x43': #Right Arrow
self.pointer = self.pointer + 1
self.data = self.data[3:]
elif self.data[:2] == '\x1b\x5b' and self.data[3] =='\x50':
self.data = self.data[4:]
elif self.data[:1] != '\x07' and self.data[:1] != '\x0d': #Needed?!
self.command = self.command[:self.pointer] + self.data[:1] + self.command[self.pointer:]
self.pointer = self.pointer + 1
self.data = self.data[1:]
self.upArrow = False | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
class BaseProtocol():
data = ''
packetSize = 0
name = ''
uuid = ''
ttylog_file = None
def __init__(self):
pass
def parsePacket(self, parent, theData):
#log.msg(parent + ' ' + repr(theData))
#log.msg(parent + ' ' + '\'\\x' + "\\x".join("{:02x}".format(ord(c)) for c in self.data) + '\'')
pass
def processCommand(self, uuid, name, command):
self.out.commandEntered(uuid, name, command)
def channelClosed(self):
pass
def extractInt(self, len):
value = int(self.data[:len].encode('hex'), 16)
self.packetSize = self.packetSize - len
self.data = self.data[len:]
return value
def extractString(self):
len = self.extractInt(4)
value = str(self.data[:len])
self.packetSize = self.packetSize - len
self.data = self.data[len:]
return value
def extractBool(self):
value = self.extractInt(1)
return bool(value)
def extractData(self):
length = self.extractInt(4)
self.packetSize = length
value = self.data
self.packetSize = self.packetSize - len(value)
self.data = ''
return value | Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.python import log
from twisted.internet import threads, reactor
from kippo.core.config import config
from honssh import txtlog
from kippo.core import ttylog
from kippo.dblog import mysql
from hpfeeds import hpfeeds
import datetime, time, os, struct, re, subprocess, uuid, GeoIP, getopt, hashlib
class Output():
cfg = config()
def __init__(self, hpLog, dbLog):
self.hpLogClient = hpLog
self.dbLogClient = dbLog
def connectionMade(self, ip, port):
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
self.logLocation = self.cfg.get('folders', 'session_path') + "/" + ip + "/"
self.downloadFolder = self.logLocation + 'downloads/'
self.txtlog_file = self.logLocation + dt + ".log"
self.endIP = ip
self.endPort = port
self.sessionID = uuid.uuid4().hex
self.passwordTried = False
self.loginSuccess = False
self.ttyFiles = []
if self.cfg.get('txtlog', 'enabled') == 'true':
self.connectionString = '[POT ] ' + self.cfg.get('honeypot', 'sensor_name')
self.addConnectionString('[SSH ] Incoming Connection from ' + ip + ':' + str(port))
country = self.cname(ip)
if country != None:
self.connectionString = self.connectionString + ' - ' + self.cname(ip)
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog = mysql.DBLogger()
self.dbLog.setClient(self.dbLogClient, self.cfg)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog = hpfeeds.HPLogger()
self.hpLog.setClient(self.hpLogClient, self.cfg)
if self.cfg.has_option('app_hooks', 'connection_made'):
if self.cfg.get('app_hooks', 'connection_made') != '':
cmdString = self.cfg.get('app_hooks', 'connection_made') + " CONNECTION_MADE " + dt + " " + self.endIP + " " + str(port)
threads.deferToThread(self.runCommand, cmdString)
def connectionLost(self):
log.msg("[OUTPUT] Lost Connection with the attacker: %s" % self.endIP)
if not self.passwordTried:
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.authLog(self.cfg.get('folders', 'log_path') + "/" + datetime.datetime.now().strftime("%Y%m%d"), self.endIP, '', '', False)
if self.loginSuccess:
if self.cfg.get('txtlog', 'enabled') == 'true':
if os.path.exists(self.txtlog_file):
txtlog.log(self.txtlog_file, '[SSH ] Lost Connection with ' + self.endIP)
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.handleConnectionLost(self.sessionID)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.handleConnectionLost()
if self.cfg.get('email', 'attack') == 'true':
threads.deferToThread(self.email, 'HonSSH - Attack logged', self.txtlog_file, self.ttyFiles)
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if self.cfg.has_option('app_hooks', 'connection_lost'):
if self.cfg.get('app_hooks', 'connection_lost') != '':
cmdString = self.cfg.get('app_hooks', 'connection_lost') + " CONNECTION_LOST " + dt + " " + self.endIP
threads.deferToThread(self.runCommand, cmdString)
def setVersion(self, version):
self.version = version
if self.cfg.get('txtlog', 'enabled') == 'true':
self.connectionString = self.connectionString + ' - ' + version
def loginSuccessful(self, username, password):
self.passwordTried = True
self.loginSuccess = True
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
self.makeSessionFolder()
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.authLog(self.cfg.get('folders', 'log_path') + "/" + datetime.datetime.now().strftime("%Y%m%d"), self.endIP, username, password, True)
txtlog.log(self.txtlog_file, self.connectionString)
txtlog.log(self.txtlog_file, '[SSH ] Login Successful: ' + username + ':' + password)
if self.cfg.get('email', 'login') == 'true':
threads.deferToThread(self.email, 'HonSSH - Login Successful', self.txtlog_file)
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.handleLoginSucceeded(username, password)
self.dbLog.createSession(self.sessionID, self.endIP, self.endPort, self.cfg.get('honeypot', 'ssh_addr'), self.cfg.get('honeypot', 'ssh_port'))
self.dbLog.handleClientVersion(self.sessionID, self.version)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.handleLoginSucceeded(username, password)
self.hpLog.createSession(self.sessionID, self.endIP, self.endPort, self.cfg.get('honeypot', 'ssh_addr'), self.cfg.get('honeypot', 'ssh_port'))
self.hpLog.handleClientVersion(self.version)
if self.cfg.has_option('app_hooks', 'login_successful'):
if self.cfg.get('app_hooks', 'login_successful') != '':
cmdString = self.cfg.get('app_hooks', 'login_successful') + " LOGIN_SUCCESSFUL " + dt + " " + self.endIP + " " + username + " " + password
threads.deferToThread(self.runCommand, cmdString)
def loginFailed(self, username, password):
self.passwordTried = True
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.authLog(self.cfg.get('folders', 'log_path') + "/" + datetime.datetime.now().strftime("%Y%m%d"), self.endIP, username, password, False)
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.handleLoginFailed(username, password)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.handleLoginFailed(username, password)
if self.cfg.has_option('app_hooks', 'login_failed'):
if self.cfg.get('app_hooks', 'login_failed') != '':
cmdString = self.cfg.get('app_hooks', 'login_failed') + " LOGIN_FAILED " + dt + " " + self.endIP + " " + username + " " + password
threads.deferToThread(self.runCommand, cmdString)
def commandEntered(self, uuid, channelName, theCommand):
if self.cfg.get('txtlog', 'enabled') == 'true':
theCMD = theCommand.replace('\n', '\\n')
txtlog.log(self.txtlog_file, channelName + " Command Executed: %s" % (theCMD))
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.handleCommand(uuid, theCommand)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.handleCommand(uuid, theCommand)
theCommandsSplit = re.findall(r'(?:[^;&|<>"\']|["\'](?:\\.|[^"\'])*[\'"])+', theCommand)
theCMDs = []
for cmd in theCommandsSplit:
theCMDs.extend(cmd.split('\n'))
for command in theCMDs:
command = command.strip().rstrip()
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if self.cfg.has_option('app_hooks', 'command_entered'):
if self.cfg.get('app_hooks', 'command_entered') != '':
cmdString = self.cfg.get('app_hooks', 'command_entered') + " COMMAND_ENTERED " + dt + " " + self.endIP + " '" + command + "'"
threads.deferToThread(self.runCommand, cmdString)
if self.cfg.get('download','active') == 'true':
if command.startswith('wget '):
command = command[4:]
commandArgs = re.findall(r'(?:[^\s"]|"(?:\\.|[^"])*")+', command)
args, links = getopt.getopt(commandArgs, 'VhbdqvFcNS46xErkKmpHLnp:e:o:a:i:B:t:O:T:w:Q:P:U:l:A:R:D:I:X:', ['version','help','background','execute=','output-file=','append-output=','debug','quiet','verbose','report-speed=','input-file=','force-html','base=','config=','bind-address=','tries=','output-document=', 'backups=','continue','progress=','timestamping','no-use-server-timestamps','server-response','spider','timeout=','dns-timeout=','connect-timeout=','read-timeout=','limit-rate=','wait=','waitretry=', 'random-wait','no-proxy','quota=','no-dns-cache','restrict-file-names=','inet4-only','inet6-only','prefer-family=','retry-connrefused','user=','password=','ask-password','no-iri','local-encoding=','remote-encoding=','unlink','force-directories','protocol-directories','cut-dirs=','directory-prefix=','default-page=','adjust-extension','http-user=','http-password=','no-http-keep-alive','no-cache','no-cookies','load-cookies=','save-cookies=','keep-session-cookies','ignore-length','header=','max-redirect=','proxy-user=','proxy-password=','referer=','save-headers','user-agent=','post-data=','post-file=','method=','body-data=','body-file=','content-disposition','content-on-error','trust-server-names','auth-no-challenge','secure-protocol=','https-only','no-check-certificate','certificate=','certificate-type=','private-key=','private-key-type=','ca-certificate=','ca-directory=','random-file=','egd-file=','warc-file=','warc-header=','warc-max-size=','warc-cdx','warc-dedup=','no-warc-compression','no-warc-digests','no-warc-keep-log','warc-tempdir=','ftp-user=','ftp-password=','no-remove-listing','no-glob','no-passive-ftp','preserve-permissions','retr-symlinks','recursive','level=','delete-after','convert-links','backup-converted','mirror','page-requisites','strict-comments','accept=','reject=','accept-regex=','reject-regex=','regex-type=','domains=','exclude-domains=','follow-ftp','follow-tags=','ignore-tags=','ignore-case','span-hosts','relative','include-directories=','exclude-directories=','no-verbose','no-clobber','no-directories','no-host-directories','no-parent'])
username = ''
password = ''
for a in args:
if a[0] in ['user', 'http-user', 'ftp-user']:
username = a[1]
if a[0] in ['password', 'http-password', 'ftp-password']:
password = a[1]
for l in links:
self.activeDownload(channelName, uuid, l, username, password)
def activeDownload(self, channelName, uuid, link, user, password):
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
self.makeDownloadsFolder()
filename = dt + "-" + link.split("/")[-1]
fileOut = self.cfg.get('folders', 'session_path') + '/' + self.endIP + '/downloads/' + filename
wgetCommand = 'wget -O ' + fileOut + " "
if user != '':
wgetCommand = wgetCommand + '--user=' + user + ' '
if password != '':
wgetCommand = wgetCommand + '--password=' + password + ' '
wgetCommand = wgetCommand + link
d = threads.deferToThread(self.wget, channelName, uuid, wgetCommand, link, fileOut)
d.addCallback(self.fileDownloaded)
if self.cfg.has_option('app_hooks', 'download_started'):
if self.cfg.get('app_hooks', 'download_started') != '':
cmdString = self.cfg.get('app_hooks', 'download_started') + " DOWNLOAD_STARTED " + dt + " " + self.endIP + " " + link + " " + fileOut
threads.deferToThread(self.runCommand, cmdString)
def fileDownloaded(self, input):
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
channelName, uuid, success, link, file, wgetError = input
if success:
if self.cfg.get('txtlog', 'enabled') == 'true':
threads.deferToThread(self.generateMD5, channelName, dt, self.cfg.get('folders', 'log_path') + '/downloads.log', self.endIP, link, file)
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.handleFileDownload(uuid, link, file)
if self.cfg.has_option('app_hooks', 'download_finished'):
if self.cfg.get('app_hooks', 'download_finished') != '':
cmdString = self.cfg.get('app_hooks', 'download_finished') + " DOWNLOAD_FINISHED " + dt + " " + self.endIP + " " + link + " " + file
threads.deferToThread(self.runCommand, cmdString)
else:
log.msg('[OUTPUT] FILE DOWNLOAD FAILED')
log.msg('[OUTPUT] ' + wgetError)
def channelOpened(self, uuid, channelName):
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.log(self.txtlog_file, channelName + ' Opened Channel')
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.channelOpened(self.sessionID, uuid, channelName)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.channelOpened(uuid, channelName)
def channelClosed(self, channel):
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.log(self.txtlog_file, channel.name + ' Closed Channel')
if self.cfg.get('database_mysql', 'enabled') == 'true':
self.dbLog.channelClosed(channel.uuid, channel.ttylog_file)
if self.cfg.get('hpfeeds', 'enabled') == 'true':
self.hpLog.channelClosed(channel.uuid, channel.ttylog_file)
if channel.ttylog_file != None:
self.ttyFiles.append(channel.ttylog_file)
def openTTY(self, ttylog_file):
ttylog.ttylog_open(ttylog_file, time.time())
def inputTTY(self, ttylog_file, data):
ttylog.ttylog_write(ttylog_file, len(data), ttylog.TYPE_INPUT, time.time(), data)
def closeTTY(self, ttylog_file):
ttylog.ttylog_close(ttylog_file, time.time())
def genericLog(self, message):
self.makeSessionFolder()
if self.cfg.get('txtlog', 'enabled') == 'true':
txtlog.log(self.txtlog_file, message)
def addConnectionString(self, message):
self.connectionString = self.connectionString + '\n' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ' - ' + message
def writePossibleLink(self, ips):
if not self.endIP in ips:
self.connectionString = self.connectionString + '\n' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ' - [SSH ] Attempted login with the same username and password as ' + ', '.join(ips) + ' - Possible link'
def errLog(self, message):
self.makeSessionFolder()
txtlog.log(self.txtlog_file + "-err", message)
def advancedLog(self, message):
self.makeSessionFolder()
txtlog.log(self.txtlog_file + "-adv", message)
def writeSpoofPass(self, username, password):
txtlog.spoofLog(self.cfg.get('folders', 'log_path') + "/spoof.log", username, password, self.endIP)
def makeSessionFolder(self):
if not os.path.exists(os.path.join(self.cfg.get('folders', 'session_path') + '/' + self.endIP)):
os.makedirs(os.path.join(self.cfg.get('folders', 'session_path') + '/' + self.endIP))
os.chmod(os.path.join(self.cfg.get('folders', 'session_path') + '/' + self.endIP),0755)
def makeDownloadsFolder(self):
if not os.path.exists(self.cfg.get('folders', 'session_path') + '/' + self.endIP + '/downloads'):
os.makedirs(self.cfg.get('folders', 'session_path') + '/' + self.endIP + '/downloads')
os.chmod(self.cfg.get('folders', 'session_path') + '/' + self.endIP + '/downloads',0755)
def email(self, subject, body, attachment=None):
try:
#Start send mail code - provided by flofrihandy, modified by peg
import smtplib
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import Encoders
msg = MIMEMultipart()
msg['Subject'] = subject
msg['From'] = self.cfg.get('email', 'from')
msg['To'] = self.cfg.get('email', 'to')
fp = open(self.txtlog_file, 'rb')
msg_text = MIMEText(fp.read())
fp.close()
msg.attach(msg_text)
if attachment != None:
for tty in attachment:
fp = open(tty, 'rb')
logdata = MIMEBase('application', "octet-stream")
logdata.set_payload(fp.read())
fp.close()
Encoders.encode_base64(logdata)
logdata.add_header('Content-Disposition', 'attachment', filename=os.path.basename(tty))
msg.attach(logdata)
s = smtplib.SMTP(self.cfg.get('email', 'host'), int(self.cfg.get('email', 'port')))
if self.cfg.get('email', 'username') != '' and self.cfg.get('email', 'password') != '':
s.ehlo()
if self.cfg.get('email', 'use_tls') == 'true':
s.starttls()
if self.cfg.get('email', 'use_smtpauth') == 'true':
s.login(self.cfg.get('email', 'username'), self.cfg.get('email', 'password'))
s.sendmail(msg['From'], msg['To'].split(','), msg.as_string())
s.quit() #End send mail code
except Exception, ex:
log.msg('[OUTPUT][EMAIL][ERR] - ' + str(ex))
def cname(self, ipv4_str): #Thanks Are.
"""Checks the ipv4_str against the GeoIP database. Returns the full country name of origin if
the IPv4 address is found in the database. Returns None if not found."""
geo = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
country = geo.country_name_by_addr(ipv4_str)
return country
def generateMD5(self, channelName, dt, logPath, theIP, link, outFile):
f = file(outFile, 'rb')
md5 = hashlib.md5()
while True:
data = f.read(2**20)
if not data:
break
md5.update(data)
f.close()
theMD5 = md5.hexdigest()
theSize = os.path.getsize(outFile)
txtlog.log(self.txtlog_file, channelName + ' Downloaded: ' + link + ' - Saved: ' + outFile + ' - Size: ' + str(theSize) + ' - MD5: ' + str(theMD5))
txtlog.downloadLog(dt, logPath, theIP, link, outFile, theSize, theMD5)
def wget(self, channelName, uuid, wgetCommand, link, fileOut):
sp = subprocess.Popen(wgetCommand, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = sp.communicate()
if sp.returncode == 0:
return channelName, uuid, True, link, fileOut, None
else:
return channelName, uuid, False, link, None, result[0]
def runCommand(self, command):
log.msg('[APP-HOOKS] - ' + command)
sp = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
sp.communicate()
| Python |
# Copyright (c) 2013 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from twisted.internet import reactor
from twisted.conch.ssh.keys import Key
from twisted.python import log
from twisted.application import internet, service
import sys, os
from honssh import server, networking
from kippo.core.config import config
from kippo.core.config import validateConfig
if not os.path.exists('honssh.cfg'):
print 'ERROR: honssh.cfg is missing!'
sys.exit(1)
log.startLogging(sys.stdout, setStdout=0)
cfg = config()
if not validateConfig(cfg):
sys.exit(1)
ssh_addr = cfg.get('honeypot', 'ssh_addr')
if not os.path.exists(cfg.get('folders', 'log_path')):
os.makedirs(cfg.get('folders', 'log_path'))
os.chmod(cfg.get('folders', 'log_path'),0755)
if not os.path.exists(cfg.get('folders', 'session_path')):
os.makedirs(cfg.get('folders', 'session_path'))
os.chmod(cfg.get('folders', 'session_path'),0755)
with open(cfg.get('honeypot', 'private_key')) as privateBlobFile:
privateBlob = privateBlobFile.read()
privateKey = Key.fromString(data=privateBlob)
with open(cfg.get('honeypot', 'public_key')) as publicBlobFile:
publicBlob = publicBlobFile.read()
publicKey = Key.fromString(data=publicBlob)
serverFactory = server.HonsshServerFactory()
serverFactory.privateKeys = {'ssh-rsa': privateKey}
serverFactory.publicKeys = {'ssh-rsa': publicKey}
application = service.Application('honeypot')
service = internet.TCPServer(int(cfg.get('honeypot', 'ssh_port')), serverFactory, interface=ssh_addr)
service.setServiceParent(application)
#reactor.listenTCP(int(cfg.get('honeypot', 'ssh_port')), serverFactory, interface=ssh_addr)
#Interaction - Disabled in this release
#if cfg.get('interact', 'enabled')== 'true':
# iport = int(cfg.get('interact', 'port'))
# from kippo.core import interact
# from twisted.internet import protocol
# service = internet.TCPServer(iport, interact.makeInteractFactory(serverFactory), interface=cfg.get('interact', 'interface'))
# service.setServiceParent(application)
# #reactor.listenTCP(iport, interact.makeInteractFactory(serverFactory), interface=cfg.get('interact', 'interface'))
#reactor.run() | Python |
from distutils.core import setup
import os
pathtofeapy = os.path.join(os.getcwd(),'src')
setup(name='feapy',
version='3.14159265',
description='Finite Element Method Solver and Utilities',
author='Jimmy Leta',
author_email='jimmy.leta@gmail.com',
url='http://code.google.com/p/feapy/',
packages=['feapy'],
package_dir={'feapy':pathtofeapy}
)
| Python |
class SPConstraint:
"""
Each SP_Constraint object is associated with
a single node object.
The SPConstraint class only
allows time-invariant contraints. Any constraint
that varies with time must be implemented in subclasses
of SPConstraint.
"""
def __init__(self, constraint_id, node_id, \
global_degree_of_freedom, value=0.0, \
is_homogeneous=False):
self.constraint_id = constraint_id
self.node_id = node_id
self.global_degree_of_freedom = global_degree_of_freedom
self.value = value
self.is_homogeneous = is_homogeneous
def print_debug_info(self):
print "node id: " + str(self.node_id)
print "global dof: " + str(self.global_degree_of_freedom)
print "value: " + str(self.value)
print ""
class MPConstraint:
def __init__(self, retained_node_id, constrained_node_id, \
constraint_matrix, constrained_dof):
self.retained_node_id = retained_node_id
self.constrained_node_id = constrained_node_id
self.constraint_matrix = constraint_matrix
self.constrained_dof = constrained_dof
| Python |
from numpy import array, linspace, append
import feapy
x = linspace(0.0, 0.25, 3)
y = linspace(0.0, 1.5, 5)
mydomain = feapy.domain.Domain()
node_id = 1
for i in range(y.size):
for j in range(x.size):
mydomain.add_node(feapy.node.Node(node_id, 2, \
coordinates=array([x[j], y[i]])))
mydomain.nodes[node_id].print_nodal_coordinates()
node_id += 1
e1 = feapy.element.FourNodeQuad(1, element_node_ids=[1, 2, 4, 5])
e2 = feapy.element.FourNodeQuad(2, element_node_ids=[2, 3, 5, 6])
e3 = feapy.element.FourNodeQuad(3, element_node_ids=[4, 5, 7, 8])
e4 = feapy.element.FourNodeQuad(4, element_node_ids=[5, 6, 8, 9])
e5 = feapy.element.FourNodeQuad(5, element_node_ids=[7, 8, 10, 11])
e6 = feapy.element.FourNodeQuad(6, element_node_ids=[8, 9, 11, 12])
e7 = feapy.element.FourNodeQuad(7, element_node_ids=[10, 11, 13, 14])
e8 = feapy.element.FourNodeQuad(8, element_node_ids=[11, 12, 14, 15])
mydomain.add_element(e1)
mydomain.add_element(e2)
mydomain.add_element(e3)
mydomain.add_element(e4)
mydomain.add_element(e5)
mydomain.add_element(e6)
mydomain.add_element(e7)
mydomain.add_element(e8)
for element_id in mydomain.elements:
mydomain.elements[element_id].print_debug_info()
c1 = feapy.constraint.SPConstraint(1, 1, 1, 0.0)
c2 = feapy.constraint.SPConstraint(2, 10, 1, 0.0)
c3 = feapy.constraint.SPConstraint(3, 1, 2, 0.0)
c4 = feapy.constraint.SPConstraint(4, 3, 2, 0.0)
mydomain.add_sp_constraint(c1)
mydomain.add_sp_constraint(c2)
mydomain.add_sp_constraint(c3)
mydomain.add_sp_constraint(c4)
for constraint_id in mydomain.sp_constraints:
mydomain.sp_constraints[constraint_id].print_debug_info()
# Add 6 four-node-quad elements.
#print mydomain.nodes[n+1].coordinates.size
#
#lengths = array([])
#area = 10.0
#youngs_modulus = 30e6
#for n in range(1,x.size):
# length = x[n] - x[n-1]
# element_nodes = [mydomain.nodes[n], mydomain.nodes[n+1]]
# r = RodElement(n, youngs_modulus, area, length, element_nodes)
# mydomain.add_element(r)
#for element_id in mydomain.elements:
# mydomain.elements[element_id].print_debug_info()
| Python |
import sys
from numpy import zeros, array
class Node:
def __init__(self, node_id, number_of_degrees_of_freedom, \
coordinates=zeros(2)):
self.node_id = node_id
self.coordinates = coordinates
self.number_of_degrees_of_freedom = number_of_degrees_of_freedom
self.coordinate_system = 'cartesian'
self.number_of_dimensions = coordinates.size
self.mass = 0.0
self.unbalanced_load = zeros(self.number_of_dimensions)
self.trial_displacement = zeros(self.number_of_dimensions)
self.trial_velocity = zeros(self.number_of_dimensions)
self.trial_acceleration = zeros(self.number_of_dimensions)
self.committed_displacement = zeros(self.number_of_dimensions)
self.committed_velocity = zeros(self.number_of_dimensions)
self.committed_acceleration = zeros(self.number_of_dimensions)
def add_unbalanced_load(self, load_vector, factor=1.0):
"""
Add unbalanced load to the load vector.
load_vector.size must equal coordinates.size.
"""
if load_vector.size != self.coordinates.size:
print >> sys.stderr, "The dimension of the load vector must \
be equal to the dimension of the \
coordinates vector"
sys.exit(1)
else:
self.unbalanced_load = self.unbalanced_load + load_vector * factor
def commit(self):
"""
Update the values of committed values of nodal
acceleration, velocity, and displacement.
"""
self.committed_displacement = self.trial_displacement
self.committed_velocity = self.trial_velocity
self.committed_acceleration = self.trial_acceleration
def increment_trial_acceleration(self, acceleration_vector):
if acceleration_vector.size != self.coordinates.size:
print >> sys.stderr, "The dimension of the acceleration vector \
must be equal to the dimension of the \
coordinates vector"
sys.exit(1)
else:
self.trial_acceleration += acceleration_vector
def increment_trial_displacement(self, displacement_vector):
if displacement_vector.size != self.coordinates.size:
print >> sys.stderr, "The dimension of the displacement vector \
must be equal to the dimension of the \
coordinates vector"
sys.exit(1)
else:
self.trial_displacement += displacement_vector
def increment_trial_velocity(self, velocity_vector):
if velocity_vector.size != self.coordinates.size:
print >> sys.stderr, "The dimension of the velocity vector must \
ual to the dimension of the coordinates \
vector"
sys.exit(1)
else:
self.trial_velocity += velocity_vector
def print_nodal_coordinates(self):
print "Node ID: " + str(self.node_id)
if self.coordinate_system == 'cartesian':
print "x = " + str(self.coordinates[0])
if self.coordinates.size > 1:
print "y = " + str(self.coordinates[1])
if self.coordinates.size > 2:
print "z = " + str(self.coordinates[2])
print ""
def zero_unbalanced_load(self):
self.unbalanced_load = 0.0
if __name__ == "__main__":
print "1D"
n = Node(1, coordinates=array([1.0]))
n.print_nodal_coordinates()
print "2D"
n = Node(1, coordinates=array([1.0, 0.0]))
n.print_nodal_coordinates()
print "3D"
n = Node(1, coordinates=array([1.0, 0.0, 0.0]))
n.print_nodal_coordinates()
| Python |
from numpy import zeros
class Element:
def __init__(self):
self.number_of_dof = 0
self.stiffness_matrix = None
self.element_nodes = []
def print_debug_info(self):
"""
This is a virtual method that should be defined
in all subclasses of Element.
"""
return None
class PiecewiseLinearElement:
def __init__(self):
self.element_id = element_id
self.element_node_ids = element_node_ids
class FourNodeQuad(Element):
def __init__(self, element_id, element_node_ids):
self.element_id = element_id
self.element_node_ids = element_node_ids
self.centroid_coordinates = zeros(2)
def print_debug_info(self):
print "element id: " + str(self.element_id)
print "global id of node 1: " + str(self.element_node_ids[0])
print "global id of node 2: " + str(self.element_node_ids[1])
print "global id of node 3: " + str(self.element_node_ids[2])
print "global id of node 4: " + str(self.element_node_ids[3])
print ""
class RodElement(Element):
def __init__(self, element_id, youngs_modulus, area, \
length, element_nodes):
self.element_id = element_id
self.element_nodes = element_nodes
self.youngs_modulus = youngs_modulus
self.area = area
self.length = length
self.number_of_dof = 2
self.set_stiffness()
def print_debug_info(self):
print "Element Type: Rod"
print ""
print "element id: " + str(self.element_id)
print ""
print "element node 1 id: " + str(self.element_nodes[0].node_id)
print "element node 2 id: " + str(self.element_nodes[1].node_id)
print ""
print "element stiffness = " + str(self.stiffness_matrix)
print ""
def set_stiffness(self):
stiffness = (self.youngs_modulus * self.area) / self.length
self.stiffness_matrix = stiffness * ([-1.0, 1.0], [1.0, -1.0])
| Python |
from numpy import zeros, matrix
from element import element
class BeamElement(Element):
def __init__(self, element_id, youngs_modulus, moment_of_inertia, length):
self.element_id = element_id
self.youngs_modulus = youngs_modulus
self.moment_of_inertia = moment_of_inertia
self.length = length
def calculate_stiffness_matrix(self, E, I, L):
return (E*I/L**3)*matrix([[12.0, 6.0*L, -12.0, 6.0*L],
[6.0*L, 4.0*L**2, -6.0*L, 2.0*L**2],
[-12.0, -6.0*L, 12.0, -6.0*L],
[6.0*L, 2.0*L**2, -6.0*L, 4.0*L*2]])
def set_stiffness(self):
self.stiffness_matrix = self.calculate_stiffness_matrix(self.youngs_modulus,
self.moment_of_inertia, self.length)
| Python |
import node
import element
import constraint
import domain
if __name__ == "__main__":
print "feapy!"
| Python |
import sys
class Domain:
"""
Contains dictionaries of elements and nodes which describe
the problem domain.
"""
def __init__(self):
self.current_time = 0.0
self.elements = {}
self.nodes = {}
self.sp_constraints = {}
self.number_of_sp_constraints = 0
self.has_domain_changed = False
def add_element(self, element, check=False):
"""
Adds an element object to the domain. If check is set to true, then
verify that all of the node objects associated with the element
exist in the domain.
"""
if check:
for node in element.element_nodes:
if not node.node_id in self.nodes:
print >> sys.stderr, "Error: cannot add element\nNode " \
+ str(node.node_id) + " is not in this domain"
return False
self.elements[element.element_id] = element
return True
def add_node(self, node, check=False):
"""
Adds a node object to the domain. If check is set to True, then verify
that we're not overwriting an existing node object.
"""
if check:
if node.node_id in self.nodes:
print >> sys.stderr, "Warning - A node with node id: " \
+ str(node.node_id) + " already " \
+ " exists in the domain."
return False
self.nodes[node.node_id] = node
return True
def add_sp_constraint(self, sp_constraint, check=False):
if check:
if not sp_constraint.node.node_id in self.nodes:
print >> sys.stderr, "Error: cannot add sp_constraint\nNode " \
+ str(sp_constraint.node_id) + " is not in this domain"
return False
self.sp_constraints[sp_constraint.constraint_id] = sp_constraint
return True
def draw_elements(self):
from numpy import array
fig = figure()
ax = fig.add_subplot(111)
return None
def draw_nodes(self):
"""
Creates an image file that illustrates the location of all of the
nodes in the domain.
"""
from matplotlib.pyplot import plot, savefig, figure, show, grid
xdata = []
ydata = []
fig = figure()
ax = fig.add_subplot(111)
for node_id in self.nodes:
x = self.nodes[node_id].coordinates[0]
y = self.nodes[node_id].coordinates[1]
xdata.append(x)
ydata.append(y)
ax.annotate(str(node_id), xy=(x, y), xycoords='data')
ax.plot(xdata, ydata, 'o')
ax.set_xlim(min(xdata) - 0.25, max(xdata) + 0.25)
ax.set_ylim(min(ydata) - 0.25, max(ydata) + 0.25)
grid()
show()
savefig('nodes.png')
def remove_element(self, element_id):
"""
Removes an element object from the domain.
"""
if element_id in self.elements:
del self.elements[element_id]
else:
print >> sys.stderr, "Error: Cannot remove element " \
+ str(element_id) + " from domain.\n \
This element does not exist."
sys.exit(1)
def remove_node(self, node_id):
"""
Removes a node object from the domain.
"""
if node_id in self.nodes:
del self.nodes[node_id]
else:
print >> sys.stderr, "Error: Cannot remove node "
+ str(node_id) + " from domain.\nThis node does not exist."
sys.exit(1)
| Python |
#!/usr/bin/python
# pep8.py - Check Python source code formatting, according to PEP 8
# Copyright (C) 2006 Johann C. Rocholl <johann@rocholl.net>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Check Python source code formatting, according to PEP 8:
http://www.python.org/dev/peps/pep-0008/
For usage and a list of options, try this:
$ python pep8.py -h
This program and its regression test suite live here:
http://github.com/jcrocholl/pep8
Groups of errors and warnings:
E errors
W warnings
100 indentation
200 whitespace
300 blank lines
400 imports
500 line length
600 deprecation
700 statements
You can add checks to this program by writing plugins. Each plugin is
a simple function that is called for each line of source code, either
physical or logical.
Physical line:
- Raw line of text from the input file.
Logical line:
- Multi-line statements converted to a single line.
- Stripped left and right.
- Contents of strings replaced with 'xxx' of same length.
- Comments removed.
The check function requests physical or logical lines by the name of
the first argument:
def maximum_line_length(physical_line)
def extraneous_whitespace(logical_line)
def blank_lines(logical_line, blank_lines, indent_level, line_number)
The last example above demonstrates how check plugins can request
additional information with extra arguments. All attributes of the
Checker object are available. Some examples:
lines: a list of the raw lines from the input file
tokens: the tokens that contribute to this logical line
line_number: line number in the input file
blank_lines: blank lines before this one
indent_char: first indentation character in this file (' ' or '\t')
indent_level: indentation (with tabs expanded to multiples of 8)
previous_indent_level: indentation on previous line
previous_logical: previous logical line
The docstring of each check function shall be the relevant part of
text from PEP 8. It is printed if the user enables --show-pep8.
Several docstrings contain examples directly from the PEP 8 document.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
These examples are verified automatically when pep8.py is run with the
--doctest option. You can add examples for your own check functions.
The format is simple: "Okay" or error/warning code followed by colon
and space, the rest of the line is example source code. If you put 'r'
before the docstring, you can use \n for newline, \t for tab and \s
for space.
"""
__version__ = '0.5.1dev'
import os
import sys
import re
import time
import inspect
import keyword
import tokenize
from optparse import OptionParser
from fnmatch import fnmatch
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git'
DEFAULT_IGNORE = 'E24'
MAX_LINE_LENGTH = 79
INDENT_REGEX = re.compile(r'([ \t]*)')
RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*(,)')
SELFTEST_REGEX = re.compile(r'(Okay|[EW]\d{3}):\s(.*)')
ERRORCODE_REGEX = re.compile(r'[EW]\d{3}')
DOCSTRING_REGEX = re.compile(r'u?r?["\']')
WHITESPACE_AROUND_OPERATOR_REGEX = \
re.compile('([^\w\s]*)\s*(\t| )\s*([^\w\s]*)')
EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
WHITESPACE_AROUND_NAMED_PARAMETER_REGEX = \
re.compile(r'[()]|\s=[^=]|[^=!<>]=\s')
WHITESPACE = ' \t'
BINARY_OPERATORS = frozenset(['**=', '*=', '+=', '-=', '!=', '<>',
'%=', '^=', '&=', '|=', '==', '/=', '//=', '<=', '>=', '<<=', '>>=',
'%', '^', '&', '|', '=', '/', '//', '<', '>', '<<'])
UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
OPERATORS = BINARY_OPERATORS | UNARY_OPERATORS
SKIP_TOKENS = frozenset([tokenize.COMMENT, tokenize.NL, tokenize.INDENT,
tokenize.DEDENT, tokenize.NEWLINE])
E225NOT_KEYWORDS = (frozenset(keyword.kwlist + ['print']) -
frozenset(['False', 'None', 'True']))
BENCHMARK_KEYS = ('directories', 'files', 'logical lines', 'physical lines')
options = None
args = None
##############################################################################
# Plugins (check functions) for physical lines
##############################################################################
def tabs_or_spaces(physical_line, indent_char):
r"""
Never mix tabs and spaces.
The most popular way of indenting Python is with spaces only. The
second-most popular way is with tabs only. Code indented with a mixture
of tabs and spaces should be converted to using spaces exclusively. When
invoking the Python command line interpreter with the -t option, it issues
warnings about code that illegally mixes tabs and spaces. When using -tt
these warnings become errors. These options are highly recommended!
Okay: if a == 0:\n a = 1\n b = 1
E101: if a == 0:\n a = 1\n\tb = 1
"""
indent = INDENT_REGEX.match(physical_line).group(1)
for offset, char in enumerate(indent):
if char != indent_char:
return offset, "E101 indentation contains mixed spaces and tabs"
def tabs_obsolete(physical_line):
r"""
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
Okay: if True:\n return
W191: if True:\n\treturn
"""
indent = INDENT_REGEX.match(physical_line).group(1)
if indent.count('\t'):
return indent.index('\t'), "W191 indentation contains tabs"
def trailing_whitespace(physical_line):
r"""
JCR: Trailing whitespace is superfluous.
FBM: Except when it occurs as part of a blank line (i.e. the line is
nothing but whitespace). According to Python docs[1] a line with only
whitespace is considered a blank line, and is to be ignored. However,
matching a blank line to its indentation level avoids mistakenly
terminating a multi-line statement (e.g. class declaration) when
pasting code into the standard Python interpreter.
[1] http://docs.python.org/reference/lexical_analysis.html#blank-lines
The warning returned varies on whether the line itself is blank, for easier
filtering for those who want to indent their blank lines.
Okay: spam(1)
W291: spam(1)\s
W293: class Foo(object):\n \n bang = 12
"""
physical_line = physical_line.rstrip('\n') # chr(10), newline
physical_line = physical_line.rstrip('\r') # chr(13), carriage return
physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
stripped = physical_line.rstrip()
if physical_line != stripped:
if stripped:
return len(stripped), "W291 trailing whitespace"
else:
return 0, "W293 blank line contains whitespace"
def trailing_blank_lines(physical_line, lines, line_number):
r"""
JCR: Trailing blank lines are superfluous.
Okay: spam(1)
W391: spam(1)\n
"""
if physical_line.strip() == '' and line_number == len(lines):
return 0, "W391 blank line at end of file"
def missing_newline(physical_line):
"""
JCR: The last line should have a newline.
"""
if physical_line.rstrip() == physical_line:
return len(physical_line), "W292 no newline at end of file"
def maximum_line_length(physical_line):
"""
Limit all lines to a maximum of 79 characters.
There are still many devices around that are limited to 80 character
lines; plus, limiting windows to 80 characters makes it possible to have
several windows side-by-side. The default wrapping on such devices looks
ugly. Therefore, please limit all lines to a maximum of 79 characters.
For flowing long blocks of text (docstrings or comments), limiting the
length to 72 characters is recommended.
"""
line = physical_line.rstrip()
length = len(line)
if length > MAX_LINE_LENGTH:
try:
# The line could contain multi-byte characters
if not hasattr(line, 'decode'): # Python 3
line = line.encode('latin-1')
length = len(line.decode('utf-8'))
except UnicodeDecodeError:
pass
if length > MAX_LINE_LENGTH:
return MAX_LINE_LENGTH, "E501 line too long (%d characters)" % length
##############################################################################
# Plugins (check functions) for logical lines
##############################################################################
def blank_lines(logical_line, blank_lines, indent_level, line_number,
previous_logical, previous_indent_level,
blank_lines_before_comment):
r"""
Separate top-level function and class definitions with two blank lines.
Method definitions inside a class are separated by a single blank line.
Extra blank lines may be used (sparingly) to separate groups of related
functions. Blank lines may be omitted between a bunch of related
one-liners (e.g. a set of dummy implementations).
Use blank lines in functions, sparingly, to indicate logical sections.
Okay: def a():\n pass\n\n\ndef b():\n pass
Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass
E301: class Foo:\n b = 0\n def bar():\n pass
E302: def a():\n pass\n\ndef b(n):\n pass
E303: def a():\n pass\n\n\n\ndef b(n):\n pass
E303: def a():\n\n\n\n pass
E304: @decorator\n\ndef a():\n pass
"""
if line_number == 1:
return # Don't expect blank lines before the first line
max_blank_lines = max(blank_lines, blank_lines_before_comment)
if previous_logical.startswith('@'):
if max_blank_lines:
return 0, "E304 blank lines found after function decorator"
elif max_blank_lines > 2 or (indent_level and max_blank_lines == 2):
return 0, "E303 too many blank lines (%d)" % max_blank_lines
elif (logical_line.startswith('def ') or
logical_line.startswith('class ') or
logical_line.startswith('@')):
if indent_level:
if not (max_blank_lines or previous_indent_level < indent_level or
DOCSTRING_REGEX.match(previous_logical)):
return 0, "E301 expected 1 blank line, found 0"
elif max_blank_lines != 2:
return 0, "E302 expected 2 blank lines, found %d" % max_blank_lines
def extraneous_whitespace(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- Immediately inside parentheses, brackets or braces.
- Immediately before a comma, semicolon, or colon.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
E201: spam(ham[ 1], {eggs: 2})
E201: spam(ham[1], { eggs: 2})
E202: spam(ham[1], {eggs: 2} )
E202: spam(ham[1 ], {eggs: 2})
E202: spam(ham[1], {eggs: 2 })
E203: if x == 4: print x, y; x, y = y , x
E203: if x == 4: print x, y ; x, y = y, x
E203: if x == 4 : print x, y; x, y = y, x
"""
line = logical_line
for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
text = match.group()
char = text.strip()
found = match.start()
if text == char + ' ' and char in '([{':
return found + 1, "E201 whitespace after '%s'" % char
if text == ' ' + char and line[found - 1] != ',':
if char in '}])':
return found, "E202 whitespace before '%s'" % char
if char in ',;:':
return found, "E203 whitespace before '%s'" % char
def missing_whitespace(logical_line):
"""
JCR: Each comma, semicolon or colon should be followed by whitespace.
Okay: [a, b]
Okay: (3,)
Okay: a[1:4]
Okay: a[:4]
Okay: a[1:]
Okay: a[1:4:2]
E231: ['a','b']
E231: foo(bar,baz)
"""
line = logical_line
for index in range(len(line) - 1):
char = line[index]
if char in ',;:' and line[index + 1] not in WHITESPACE:
before = line[:index]
if char == ':' and before.count('[') > before.count(']'):
continue # Slice syntax, no space required
if char == ',' and line[index + 1] == ')':
continue # Allow tuple with only one element: (3,)
return index, "E231 missing whitespace after '%s'" % char
def indentation(logical_line, previous_logical, indent_char,
indent_level, previous_indent_level):
r"""
Use 4 spaces per indentation level.
For really old code that you don't want to mess up, you can continue to
use 8-space tabs.
Okay: a = 1
Okay: if a == 0:\n a = 1
E111: a = 1
Okay: for item in items:\n pass
E112: for item in items:\npass
Okay: a = 1\nb = 2
E113: a = 1\n b = 2
"""
if indent_char == ' ' and indent_level % 4:
return 0, "E111 indentation is not a multiple of four"
indent_expect = previous_logical.endswith(':')
if indent_expect and indent_level <= previous_indent_level:
return 0, "E112 expected an indented block"
if indent_level > previous_indent_level and not indent_expect:
return 0, "E113 unexpected indentation"
def whitespace_before_parameters(logical_line, tokens):
"""
Avoid extraneous whitespace in the following situations:
- Immediately before the open parenthesis that starts the argument
list of a function call.
- Immediately before the open parenthesis that starts an indexing or
slicing.
Okay: spam(1)
E211: spam (1)
Okay: dict['key'] = list[index]
E211: dict ['key'] = list[index]
E211: dict['key'] = list [index]
"""
prev_type = tokens[0][0]
prev_text = tokens[0][1]
prev_end = tokens[0][3]
for index in range(1, len(tokens)):
token_type, text, start, end, line = tokens[index]
if (token_type == tokenize.OP and
text in '([' and
start != prev_end and
(prev_type == tokenize.NAME or prev_text in '}])') and
# Syntax "class A (B):" is allowed, but avoid it
(index < 2 or tokens[index - 2][1] != 'class') and
# Allow "return (a.foo for a in range(5))"
(not keyword.iskeyword(prev_text))):
return prev_end, "E211 whitespace before '%s'" % text
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_operator(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
Okay: a = 12 + 3
E221: a = 4 + 5
E222: a = 4 + 5
E223: a = 4\t+ 5
E224: a = 4 +\t5
"""
for match in WHITESPACE_AROUND_OPERATOR_REGEX.finditer(logical_line):
before, whitespace, after = match.groups()
tab = whitespace == '\t'
offset = match.start(2)
if before in OPERATORS:
return offset, (tab and "E224 tab after operator" or
"E222 multiple spaces after operator")
elif after in OPERATORS:
return offset, (tab and "E223 tab before operator" or
"E221 multiple spaces before operator")
def missing_whitespace_around_operator(logical_line, tokens):
r"""
- Always surround these binary operators with a single space on
either side: assignment (=), augmented assignment (+=, -= etc.),
comparisons (==, <, >, !=, <>, <=, >=, in, not in, is, is not),
Booleans (and, or, not).
- Use spaces around arithmetic operators.
Okay: i = i + 1
Okay: submitted += 1
Okay: x = x * 2 - 1
Okay: hypot2 = x * x + y * y
Okay: c = (a + b) * (a - b)
Okay: foo(bar, key='word', *args, **kwargs)
Okay: baz(**kwargs)
Okay: negative = -1
Okay: spam(-1)
Okay: alpha[:-i]
Okay: if not -5 < x < +5:\n pass
Okay: lambda *args, **kw: (args, kw)
E225: i=i+1
E225: submitted +=1
E225: x = x*2 - 1
E225: hypot2 = x*x + y*y
E225: c = (a+b) * (a-b)
E225: c = alpha -4
E225: z = x **y
"""
parens = 0
need_space = False
prev_type = tokenize.OP
prev_text = prev_end = None
for token_type, text, start, end, line in tokens:
if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
# ERRORTOKEN is triggered by backticks in Python 3000
continue
if text in ('(', 'lambda'):
parens += 1
elif text == ')':
parens -= 1
if need_space:
if start != prev_end:
need_space = False
elif text == '>' and prev_text == '<':
# Tolerate the "<>" operator, even if running Python 3
pass
else:
return prev_end, "E225 missing whitespace around operator"
elif token_type == tokenize.OP and prev_end is not None:
if text == '=' and parens:
# Allow keyword args or defaults: foo(bar=None).
pass
elif text in BINARY_OPERATORS:
need_space = True
elif text in UNARY_OPERATORS:
# Allow unary operators: -123, -x, +1.
# Allow argument unpacking: foo(*args, **kwargs).
if prev_type == tokenize.OP:
if prev_text in '}])':
need_space = True
elif prev_type == tokenize.NAME:
if prev_text not in E225NOT_KEYWORDS:
need_space = True
else:
need_space = True
if need_space and start == prev_end:
return prev_end, "E225 missing whitespace around operator"
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_comma(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
JCR: This should also be applied around comma etc.
Note: these checks are disabled by default
Okay: a = (1, 2)
E241: a = (1, 2)
E242: a = (1,\t2)
"""
line = logical_line
for separator in ',;:':
found = line.find(separator + ' ')
if found > -1:
return found + 1, "E241 multiple spaces after '%s'" % separator
found = line.find(separator + '\t')
if found > -1:
return found + 1, "E242 tab after '%s'" % separator
def whitespace_around_named_parameter_equals(logical_line):
"""
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag)
"""
parens = 0
for match in WHITESPACE_AROUND_NAMED_PARAMETER_REGEX.finditer(
logical_line):
text = match.group()
if parens and len(text) == 3:
issue = "E251 no spaces around keyword / parameter equals"
return match.start(), issue
if text == '(':
parens += 1
elif text == ')':
parens -= 1
def whitespace_before_inline_comment(logical_line, tokens):
"""
Separate inline comments by at least two spaces.
An inline comment is a comment on the same line as a statement. Inline
comments should be separated by at least two spaces from the statement.
They should start with a # and a single space.
Okay: x = x + 1 # Increment x
Okay: x = x + 1 # Increment x
E261: x = x + 1 # Increment x
E262: x = x + 1 #Increment x
E262: x = x + 1 # Increment x
"""
prev_end = (0, 0)
for token_type, text, start, end, line in tokens:
if token_type == tokenize.NL:
continue
if token_type == tokenize.COMMENT:
if not line[:start[1]].strip():
continue
if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
return (prev_end,
"E261 at least two spaces before inline comment")
if (len(text) > 1 and text.startswith('# ')
or not text.startswith('# ')):
return start, "E262 inline comment should start with '# '"
else:
prev_end = end
def imports_on_separate_lines(logical_line):
r"""
Imports should usually be on separate lines.
Okay: import os\nimport sys
E401: import sys, os
Okay: from subprocess import Popen, PIPE
Okay: from myclas import MyClass
Okay: from foo.bar.yourclass import YourClass
Okay: import myclass
Okay: import foo.bar.yourclass
"""
line = logical_line
if line.startswith('import '):
found = line.find(',')
if found > -1:
return found, "E401 multiple imports on one line"
def compound_statements(logical_line):
r"""
Compound statements (multiple statements on the same line) are
generally discouraged.
While sometimes it's okay to put an if/for/while with a small body
on the same line, never do this for multi-clause statements. Also
avoid folding such long lines!
Okay: if foo == 'blah':\n do_blah_thing()
Okay: do_one()
Okay: do_two()
Okay: do_three()
E701: if foo == 'blah': do_blah_thing()
E701: for x in lst: total += x
E701: while t < 10: t = delay()
E701: if foo == 'blah': do_blah_thing()
E701: else: do_non_blah_thing()
E701: try: something()
E701: finally: cleanup()
E701: if foo == 'blah': one(); two(); three()
E702: do_one(); do_two(); do_three()
"""
line = logical_line
found = line.find(':')
if -1 < found < len(line) - 1:
before = line[:found]
if (before.count('{') <= before.count('}') and # {'a': 1} (dict)
before.count('[') <= before.count(']') and # [1:2] (slice)
not re.search(r'\blambda\b', before)): # lambda x: x
return found, "E701 multiple statements on one line (colon)"
found = line.find(';')
if -1 < found:
return found, "E702 multiple statements on one line (semicolon)"
def python_3000_has_key(logical_line):
"""
The {}.has_key() method will be removed in the future version of
Python. Use the 'in' operation instead, like:
d = {"a": 1, "b": 2}
if "b" in d:
print d["b"]
"""
pos = logical_line.find('.has_key(')
if pos > -1:
return pos, "W601 .has_key() is deprecated, use 'in'"
def python_3000_raise_comma(logical_line):
"""
When raising an exception, use "raise ValueError('message')"
instead of the older form "raise ValueError, 'message'".
The paren-using form is preferred because when the exception arguments
are long or include string formatting, you don't need to use line
continuation characters thanks to the containing parentheses. The older
form will be removed in Python 3000.
"""
match = RAISE_COMMA_REGEX.match(logical_line)
if match:
return match.start(1), "W602 deprecated form of raising exception"
def python_3000_not_equal(logical_line):
"""
!= can also be written <>, but this is an obsolete usage kept for
backwards compatibility only. New code should always use !=.
The older syntax is removed in Python 3000.
"""
pos = logical_line.find('<>')
if pos > -1:
return pos, "W603 '<>' is deprecated, use '!='"
def python_3000_backticks(logical_line):
"""
Backticks are removed in Python 3000.
Use repr() instead.
"""
pos = logical_line.find('`')
if pos > -1:
return pos, "W604 backticks are deprecated, use 'repr()'"
##############################################################################
# Helper functions
##############################################################################
if '' == ''.encode():
# Python 2: implicit encoding.
def readlines(filename):
return open(filename).readlines()
else:
# Python 3: decode to latin-1.
# This function is lazy, it does not read the encoding declaration.
# XXX: use tokenize.detect_encoding()
def readlines(filename):
return open(filename, encoding='latin-1').readlines()
def expand_indent(line):
"""
Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
16
"""
result = 0
for char in line:
if char == '\t':
result = result // 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
def mute_string(text):
"""
Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
start = 1
end = len(text) - 1
# String modifiers (e.g. u or r)
if text.endswith('"'):
start += text.index('"')
elif text.endswith("'"):
start += text.index("'")
# Triple quotes
if text.endswith('"""') or text.endswith("'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
def message(text):
"""Print a message."""
# print >> sys.stderr, options.prog + ': ' + text
# print >> sys.stderr, text
print(text)
##############################################################################
# Framework to run all checks
##############################################################################
def find_checks(argument_name):
"""
Find all globally visible functions where the first argument name
starts with argument_name.
"""
checks = []
for name, function in globals().items():
if not inspect.isfunction(function):
continue
args = inspect.getargspec(function)[0]
if args and args[0].startswith(argument_name):
codes = ERRORCODE_REGEX.findall(inspect.getdoc(function) or '')
for code in codes or ['']:
if not code or not ignore_code(code):
checks.append((name, function, args))
break
checks.sort()
return checks
class Checker(object):
"""
Load a Python source file, tokenize it, check coding style.
"""
def __init__(self, filename, lines=None):
self.filename = filename
if filename is None:
self.filename = 'stdin'
self.lines = lines or []
elif lines is None:
self.lines = readlines(filename)
else:
self.lines = lines
options.counters['physical lines'] += len(self.lines)
def readline(self):
"""
Get the next line from the input buffer.
"""
self.line_number += 1
if self.line_number > len(self.lines):
return ''
return self.lines[self.line_number - 1]
def readline_check_physical(self):
"""
Check and return the next physical line. This method can be
used to feed tokenize.generate_tokens.
"""
line = self.readline()
if line:
self.check_physical(line)
return line
def run_check(self, check, argument_names):
"""
Run a check plugin.
"""
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments)
def check_physical(self, line):
"""
Run all physical checks on a raw input line.
"""
self.physical_line = line
if self.indent_char is None and len(line) and line[0] in ' \t':
self.indent_char = line[0]
for name, check, argument_names in options.physical_checks:
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
self.report_error(self.line_number, offset, text, check)
def build_tokens_line(self):
"""
Build a logical line from tokens.
"""
self.mapping = []
logical = []
length = 0
previous = None
for token in self.tokens:
token_type, text = token[0:2]
if token_type in SKIP_TOKENS:
continue
if token_type == tokenize.STRING:
text = mute_string(text)
if previous:
end_line, end = previous[3]
start_line, start = token[2]
if end_line != start_line: # different row
prev_text = self.lines[end_line - 1][end - 1]
if prev_text == ',' or (prev_text not in '{[('
and text not in '}])'):
logical.append(' ')
length += 1
elif end != start: # different column
fill = self.lines[end_line - 1][end:start]
logical.append(fill)
length += len(fill)
self.mapping.append((length, token))
logical.append(text)
length += len(text)
previous = token
self.logical_line = ''.join(logical)
assert self.logical_line.lstrip() == self.logical_line
assert self.logical_line.rstrip() == self.logical_line
def check_logical(self):
"""
Build a line from tokens and run all logical checks on it.
"""
options.counters['logical lines'] += 1
self.build_tokens_line()
first_line = self.lines[self.mapping[0][1][2][0] - 1]
indent = first_line[:self.mapping[0][1][2][1]]
self.previous_indent_level = self.indent_level
self.indent_level = expand_indent(indent)
if options.verbose >= 2:
print(self.logical_line[:80].rstrip())
for name, check, argument_names in options.logical_checks:
if options.verbose >= 4:
print(' ' + name)
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
if isinstance(offset, tuple):
original_number, original_offset = offset
else:
for token_offset, token in self.mapping:
if offset >= token_offset:
original_number = token[2][0]
original_offset = (token[2][1]
+ offset - token_offset)
self.report_error(original_number, original_offset,
text, check)
self.previous_logical = self.logical_line
def check_all(self, expected=None, line_offset=0):
"""
Run all checks on the input file.
"""
self.expected = expected or ()
self.line_offset = line_offset
self.line_number = 0
self.file_errors = 0
self.indent_char = None
self.indent_level = 0
self.previous_logical = ''
self.blank_lines = 0
self.blank_lines_before_comment = 0
self.tokens = []
parens = 0
for token in tokenize.generate_tokens(self.readline_check_physical):
if options.verbose >= 3:
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
print('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], token[1]))
self.tokens.append(token)
token_type, text = token[0:2]
if token_type == tokenize.OP and text in '([{':
parens += 1
if token_type == tokenize.OP and text in '}])':
parens -= 1
if token_type == tokenize.NEWLINE and not parens:
self.check_logical()
self.blank_lines = 0
self.blank_lines_before_comment = 0
self.tokens = []
if token_type == tokenize.NL and not parens:
if len(self.tokens) <= 1:
# The physical line contains only this token.
self.blank_lines += 1
self.tokens = []
if token_type == tokenize.COMMENT:
source_line = token[4]
token_start = token[2][1]
if source_line[:token_start].strip() == '':
self.blank_lines_before_comment = max(self.blank_lines,
self.blank_lines_before_comment)
self.blank_lines = 0
if text.endswith('\n') and not parens:
# The comment also ends a physical line. This works around
# Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
self.tokens = []
return self.file_errors
def report_error(self, line_number, offset, text, check):
"""
Report an error, according to options.
"""
code = text[:4]
if ignore_code(code):
return
if options.quiet == 1 and not self.file_errors:
message(self.filename)
if code in options.counters:
options.counters[code] += 1
else:
options.counters[code] = 1
options.messages[code] = text[5:]
if options.quiet or code in self.expected:
# Don't care about expected errors or warnings
return
self.file_errors += 1
if options.counters[code] == 1 or options.repeat:
message("%s:%s:%d: %s" %
(self.filename, self.line_offset + line_number,
offset + 1, text))
if options.show_source:
line = self.lines[line_number - 1]
message(line.rstrip())
message(' ' * offset + '^')
if options.show_pep8:
message(check.__doc__.lstrip('\n').rstrip())
def input_file(filename):
"""
Run all checks on a Python source file.
"""
if options.verbose:
message('checking ' + filename)
errors = Checker(filename).check_all()
def input_dir(dirname, runner=None):
"""
Check all Python source files in this directory and all subdirectories.
"""
dirname = dirname.rstrip('/')
if excluded(dirname):
return
if runner is None:
runner = input_file
for root, dirs, files in os.walk(dirname):
if options.verbose:
message('directory ' + root)
options.counters['directories'] += 1
dirs.sort()
for subdir in dirs:
if excluded(subdir):
dirs.remove(subdir)
files.sort()
for filename in files:
if filename_match(filename) and not excluded(filename):
options.counters['files'] += 1
runner(os.path.join(root, filename))
def excluded(filename):
"""
Check if options.exclude contains a pattern that matches filename.
"""
basename = os.path.basename(filename)
for pattern in options.exclude:
if fnmatch(basename, pattern):
# print basename, 'excluded because it matches', pattern
return True
def filename_match(filename):
"""
Check if options.filename contains a pattern that matches filename.
If options.filename is unspecified, this always returns True.
"""
if not options.filename:
return True
for pattern in options.filename:
if fnmatch(filename, pattern):
return True
def ignore_code(code):
"""
Check if options.ignore contains a prefix of the error code.
If options.select contains a prefix of the error code, do not ignore it.
"""
for select in options.select:
if code.startswith(select):
return False
for ignore in options.ignore:
if code.startswith(ignore):
return True
def reset_counters():
for key in list(options.counters.keys()):
if key not in BENCHMARK_KEYS:
del options.counters[key]
options.messages = {}
def get_error_statistics():
"""Get error statistics."""
return get_statistics("E")
def get_warning_statistics():
"""Get warning statistics."""
return get_statistics("W")
def get_statistics(prefix=''):
"""
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
stats = []
keys = list(options.messages.keys())
keys.sort()
for key in keys:
if key.startswith(prefix):
stats.append('%-7s %s %s' %
(options.counters[key], key, options.messages[key]))
return stats
def get_count(prefix=''):
"""Return the total count of errors and warnings."""
keys = list(options.messages.keys())
count = 0
for key in keys:
if key.startswith(prefix):
count += options.counters[key]
return count
def print_statistics(prefix=''):
"""Print overall statistics (number of errors and warnings)."""
for line in get_statistics(prefix):
print(line)
def print_benchmark(elapsed):
"""
Print benchmark numbers.
"""
print('%-7.2f %s' % (elapsed, 'seconds elapsed'))
for key in BENCHMARK_KEYS:
print('%-7d %s per second (%d total)' % (
options.counters[key] / elapsed, key,
options.counters[key]))
def run_tests(filename):
"""
Run all the tests from a file.
A test file can provide many tests. Each test starts with a declaration.
This declaration is a single line starting with '#:'.
It declares codes of expected failures, separated by spaces or 'Okay'
if no failure is expected.
If the file does not contain such declaration, it should pass all tests.
If the declaration is empty, following lines are not checked, until next
declaration.
Examples:
* Only E224 and W701 are expected: #: E224 W701
* Following example is conform: #: Okay
* Don't check these lines: #:
"""
lines = readlines(filename) + ['#:\n']
line_offset = 0
codes = ['Okay']
testcase = []
for index, line in enumerate(lines):
if not line.startswith('#:'):
if codes:
# Collect the lines of the test case
testcase.append(line)
continue
if codes and index > 0:
label = '%s:%s:1' % (filename, line_offset + 1)
codes = [c for c in codes if c != 'Okay']
# Run the checker
errors = Checker(filename, testcase).check_all(codes, line_offset)
# Check if the expected errors were found
for code in codes:
if not options.counters.get(code):
errors += 1
message('%s: error %s not found' % (label, code))
if options.verbose and not errors:
message('%s: passed (%s)' % (label, ' '.join(codes)))
# Keep showing errors for multiple tests
reset_counters()
# output the real line numbers
line_offset = index
# configure the expected errors
codes = line.split()[1:]
# empty the test case buffer
del testcase[:]
def selftest():
"""
Test all check functions with test cases in docstrings.
"""
count_passed = 0
count_failed = 0
checks = options.physical_checks + options.logical_checks
for name, check, argument_names in checks:
for line in check.__doc__.splitlines():
line = line.lstrip()
match = SELFTEST_REGEX.match(line)
if match is None:
continue
code, source = match.groups()
checker = Checker(None)
for part in source.split(r'\n'):
part = part.replace(r'\t', '\t')
part = part.replace(r'\s', ' ')
checker.lines.append(part + '\n')
options.quiet = 2
checker.check_all()
error = None
if code == 'Okay':
if len(options.counters) > len(BENCHMARK_KEYS):
codes = [key for key in options.counters.keys()
if key not in BENCHMARK_KEYS]
error = "incorrectly found %s" % ', '.join(codes)
elif not options.counters.get(code):
error = "failed to find %s" % code
# Reset the counters
reset_counters()
if not error:
count_passed += 1
else:
count_failed += 1
if len(checker.lines) == 1:
print("pep8.py: %s: %s" %
(error, checker.lines[0].rstrip()))
else:
print("pep8.py: %s:" % error)
for line in checker.lines:
print(line.rstrip())
if options.verbose:
print("%d passed and %d failed." % (count_passed, count_failed))
if count_failed:
print("Test failed.")
else:
print("Test passed.")
def process_options(arglist=None):
"""
Process options passed either via arglist or via command line args.
"""
global options, args
parser = OptionParser(version=__version__,
usage="%prog [options] input ...")
parser.add_option('-v', '--verbose', default=0, action='count',
help="print status messages, or debug with -vv")
parser.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
parser.add_option('-r', '--repeat', action='store_true',
help="show all occurrences of the same error")
parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %s)" %
DEFAULT_EXCLUDE)
parser.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns (default: "
"*.py)")
parser.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
parser.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W)")
parser.add_option('--show-source', action='store_true',
help="show source code for each error")
parser.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error")
parser.add_option('--statistics', action='store_true',
help="count errors and warnings")
parser.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
parser.add_option('--benchmark', action='store_true',
help="measure processing speed")
parser.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
parser.add_option('--doctest', action='store_true',
help="run doctest on myself")
options, args = parser.parse_args(arglist)
if options.testsuite:
args.append(options.testsuite)
if not args and not options.doctest:
parser.error('input not specified')
options.prog = os.path.basename(sys.argv[0])
options.exclude = options.exclude.split(',')
for index in range(len(options.exclude)):
options.exclude[index] = options.exclude[index].rstrip('/')
if options.filename:
options.filename = options.filename.split(',')
if options.select:
options.select = options.select.split(',')
else:
options.select = []
if options.ignore:
options.ignore = options.ignore.split(',')
elif options.select:
# Ignore all checks which are not explicitly selected
options.ignore = ['']
elif options.testsuite or options.doctest:
# For doctest and testsuite, all checks are required
options.ignore = []
else:
# The default choice: ignore controversial checks
options.ignore = DEFAULT_IGNORE.split(',')
options.physical_checks = find_checks('physical_line')
options.logical_checks = find_checks('logical_line')
options.counters = dict.fromkeys(BENCHMARK_KEYS, 0)
options.messages = {}
return options, args
def _main():
"""
Parse options and run checks on Python source.
"""
options, args = process_options()
if options.doctest:
import doctest
doctest.testmod(verbose=options.verbose)
selftest()
if options.testsuite:
runner = run_tests
else:
runner = input_file
start_time = time.time()
for path in args:
if os.path.isdir(path):
input_dir(path, runner=runner)
elif not excluded(path):
options.counters['files'] += 1
runner(path)
elapsed = time.time() - start_time
if options.statistics:
print_statistics()
if options.benchmark:
print_benchmark(elapsed)
count = get_count()
if count:
if options.count:
sys.stderr.write(str(count) + '\n')
sys.exit(1)
if __name__ == '__main__':
_main()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic object editor view that uses REST services."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import json
import os
import urllib
# a set of YUI and inputex modules required by the editor; need to be optimized
# to load what is needed for a specific schema; for now we use a static list
REQUIRED_MODULES = """
"querystring-stringify-simple",
"inputex-group", "inputex-select", "inputex-string", "inputex-form",
"inputex-radio", "inputex-date", "inputex-datepicker", "inputex-jsonschema",
"inputex-checkbox", "inputex-list", "inputex-color", "inputex-rte",
"inputex-textarea", "inputex-uneditable", "inputex-integer"
"""
class ObjectEditor(object):
"""Generic object editor powered by jsonschema."""
@classmethod
def format_annotations(cls, annotations):
"""Formats annotations into JavaScript.
An annotation is a tuple of two elements. The first element is a
list of key names forming xpath of a target schema element. The second
is a dictionary, items of which must be attached to the target element.
Args:
annotations: an array of annotations
Returns:
The JavaScript representation of the annotations.
"""
annotations_lines = []
for item in annotations:
path = []
for element in item[0]:
path.append('[\'%s\']' % element)
annotations_lines.append('schema.root%s = %s;' % (
''.join(path), json.dumps(item[1])))
return '\n'.join(annotations_lines)
@classmethod
def get_html_for(
cls, handler, schema_json, annotations, object_key, rest_url, exit_url,
delete_url=None):
"""Creates an HTML code needed to embed and operate this form.
This method creates an HTML, JS and CSS required to embed JSON
schema-based object editor into a view.
Args:
handler: a BaseHandler class, which will host this HTML, JS and CSS
schema_json: a text of JSON schema for the object being edited
annotations: schema annotations dictionary
object_key: a key of an object being edited
rest_url: a REST endpoint for object GET/PUT operation
exit_url: a URL to go to after the editor form is dismissed
delete_url: optional URL for delete POST operation
Returns:
The HTML, JS and CSS text that will instantiate an object editor.
"""
# extract label
type_label = json.loads(schema_json)['description']
if not type_label:
type_label = 'Generic Object'
# construct parameters
get_url = rest_url
get_args = {'key': object_key}
post_url = rest_url
post_args = {'key': object_key}
template_values = {
'schema': schema_json,
'type_label': type_label,
'get_url': '%s?%s' % (get_url, urllib.urlencode(get_args, True)),
'save_url': post_url,
'save_args': json.dumps(post_args),
'exit_url': exit_url,
'required_modules': REQUIRED_MODULES,
'schema_annotations': cls.format_annotations(annotations)
}
if delete_url:
template_values['delete_url'] = delete_url
return handler.get_template(
'oeditor.html', [os.path.dirname(__file__)]).render(template_values)
def create_bool_select_annotation(
keys_list, label, true_label, false_label, description=None):
"""Creates inputex annotation to display bool type as a select."""
properties = {
'label': label, 'choices': [
{'value': True, 'label': true_label},
{'value': False, 'label': false_label}]}
if description:
properties['description'] = description
return (keys_list, {'type': 'select', '_inputex': properties})
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes supporting configuration property editor and REST operations."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import cgi
import json
import urllib
from controllers.utils import BaseRESTHandler
from controllers.utils import XsrfTokenManager
from models import config
from models import models
from models import roles
from models import transforms
from modules.oeditor import oeditor
from google.appengine.api import users
from google.appengine.ext import db
# This is a template because the value type is not yet known.
SCHEMA_JSON_TEMPLATE = """
{
"id": "Configuration Property",
"type": "object",
"description": "Configuration Property Override",
"properties": {
"name" : {"type": "string"},
"value": {"optional": true, "type": "%s"},
"is_draft": {"type": "boolean"}
}
}
"""
# This is a template because the doc_string is not yet known.
SCHEMA_ANNOTATIONS_TEMPLATE = [
(['title'], 'Configuration Property Override'),
(['properties', 'name', '_inputex'], {
'label': 'Name', '_type': 'uneditable'}),
oeditor.create_bool_select_annotation(
['properties', 'is_draft'], 'Status', 'Pending', 'Active',
description='<strong>Active</strong>: This value is active and '
'overrides all other defaults.<br/><strong>Pending</strong>: This '
'value is not active yet, and the default settings still apply.')]
class ConfigPropertyRights(object):
"""Manages view/edit rights for configuration properties."""
@classmethod
def can_view(cls):
return cls.can_edit()
@classmethod
def can_edit(cls):
return roles.Roles.is_super_admin()
@classmethod
def can_delete(cls):
return cls.can_edit()
@classmethod
def can_add(cls):
return cls.can_edit()
class ConfigPropertyEditor(object):
"""An editor for any configuration property."""
# Map of configuration property type into inputex type.
type_map = {str: 'string', int: 'integer', bool: 'boolean'}
@classmethod
def get_schema_annotations(cls, config_property):
"""Gets editor specific schema annotations."""
doc_string = '%s Default: \'%s\'.' % (
config_property.doc_string, config_property.default_value)
item_dict = [] + SCHEMA_ANNOTATIONS_TEMPLATE
item_dict.append((
['properties', 'value', '_inputex'], {
'label': 'Value', '_type': '%s' % cls.get_value_type(
config_property),
'description': doc_string}))
return item_dict
@classmethod
def get_value_type(cls, config_property):
"""Gets an editor specific type for the property."""
value_type = cls.type_map[config_property.value_type]
if not value_type:
raise Exception('Unknown type: %s', config_property.value_type)
if config_property.value_type == str and config_property.multiline:
return 'text'
return value_type
@classmethod
def get_schema_json(cls, config_property):
"""Gets JSON schema for configuration property."""
return SCHEMA_JSON_TEMPLATE % cls.get_value_type(config_property)
def get_config_edit(self):
"""Handles 'edit' property action."""
key = self.request.get('name')
if not key:
self.redirect('/admin?action=settings')
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
template_values = {}
template_values[
'page_title'] = 'Course Builder - Edit Settings'
exit_url = '/admin?action=settings#%s' % cgi.escape(key)
rest_url = '/rest/config/item'
delete_url = '/admin?%s' % urllib.urlencode({
'action': 'config_reset',
'name': key,
'xsrf_token': cgi.escape(self.create_xsrf_token('config_reset'))})
template_values['main_content'] = oeditor.ObjectEditor.get_html_for(
self, ConfigPropertyEditor.get_schema_json(item),
ConfigPropertyEditor.get_schema_annotations(item),
key, rest_url, exit_url, delete_url)
self.render_page(template_values)
def post_config_override(self):
"""Handles 'override' property action."""
name = self.request.get('name')
# Find item in registry.
item = None
if name and name in config.Registry.registered.keys():
item = config.Registry.registered[name]
if not item:
self.redirect('/admin?action=settings')
# Add new entity if does not exist.
try:
entity = config.ConfigPropertyEntity.get_by_key_name(name)
except db.BadKeyError:
entity = None
if not entity:
entity = config.ConfigPropertyEntity(key_name=name)
entity.value = str(item.value)
entity.is_draft = True
entity.put()
models.EventEntity.record(
'override-property', users.get_current_user(), json.dumps({
'name': name, 'value': str(entity.value)}))
self.redirect('/admin?%s' % urllib.urlencode(
{'action': 'config_edit', 'name': name}))
def post_config_reset(self):
"""Handles 'reset' property action."""
name = self.request.get('name')
# Find item in registry.
item = None
if name and name in config.Registry.registered.keys():
item = config.Registry.registered[name]
if not item:
self.redirect('/admin?action=settings')
# Delete if exists.
try:
entity = config.ConfigPropertyEntity.get_by_key_name(name)
if entity:
old_value = entity.value
entity.delete()
models.EventEntity.record(
'delete-property', users.get_current_user(), json.dumps({
'name': name, 'value': str(old_value)}))
except db.BadKeyError:
pass
self.redirect('/admin?action=settings')
class ConfigPropertyItemRESTHandler(BaseRESTHandler):
"""Provides REST API for a configuration property."""
def get(self):
"""Handles REST GET verb and returns an object as JSON payload."""
key = self.request.get('key')
if not ConfigPropertyRights.can_view():
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
item = None
if key and key in config.Registry.registered.keys():
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
try:
entity = config.ConfigPropertyEntity.get_by_key_name(key)
except db.BadKeyError:
entity = None
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
else:
entity_dict = {'name': key, 'is_draft': entity.is_draft}
entity_dict['value'] = transforms.string_to_value(
entity.value, item.value_type)
json_payload = transforms.dict_to_json(
entity_dict,
json.loads(ConfigPropertyEditor.get_schema_json(item)))
transforms.send_json_response(
self, 200, 'Success.',
payload_dict=json_payload,
xsrf_token=XsrfTokenManager.create_xsrf_token(
'config-property-put'))
def put(self):
"""Handles REST PUT verb with JSON payload."""
request = json.loads(self.request.get('request'))
key = request.get('key')
if not self.assert_xsrf_token_or_fail(
request, 'config-property-put', {'key': key}):
return
if not ConfigPropertyRights.can_edit():
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
item = None
if key and key in config.Registry.registered.keys():
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
try:
entity = config.ConfigPropertyEntity.get_by_key_name(key)
except db.BadKeyError:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
payload = request.get('payload')
json_object = json.loads(payload)
old_value = entity.value
entity.value = str(item.value_type(json_object['value']))
entity.is_draft = json_object['is_draft']
entity.put()
models.EventEntity.record(
'put-property', users.get_current_user(), json.dumps({
'name': key,
'before': str(old_value), 'after': str(entity.value)}))
transforms.send_json_response(self, 200, 'Saved.')
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Site administration functionality."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import cgi
import cStringIO
import datetime
import os
import sys
import time
import urllib
from appengine_config import PRODUCTION_MODE
from controllers import sites
from controllers.utils import ReflectiveRequestHandler
import jinja2
from models import config
from models import counters
from models import roles
from models.config import ConfigProperty
from modules.admin.config import ConfigPropertyEditor
import webapp2
from google.appengine.api import users
import google.appengine.api.app_identity as app
# A time this module was initialized.
BEGINNING_OF_TIME = time.time()
DELEGATED_ACCESS_IS_NOT_ALLOWED = """
You must be an actual admin user to continue.
Users with the delegated admin rights are not allowed."""
def evaluate_python_code(code):
"""Compiles and evaluates a Python script in a restricted environment."""
code = code.replace('\r\n', '\n')
save_stdout = sys.stdout
results_io = cStringIO.StringIO()
try:
sys.stdout = results_io
try:
compiled_code = compile(code, '<string>', 'exec')
exec(compiled_code, globals()) # pylint: disable-msg=exec-statement
except Exception as e: # pylint: disable-msg=broad-except
results_io.write('Error: %s' % e)
return results_io.getvalue(), False
finally:
sys.stdout = save_stdout
return results_io.getvalue(), True
class AdminHandler(
webapp2.RequestHandler, ReflectiveRequestHandler, ConfigPropertyEditor):
"""Handles all pages and actions required for administration of site."""
default_action = 'courses'
get_actions = [
default_action, 'settings', 'deployment', 'perf', 'config_edit',
'console']
post_actions = ['config_reset', 'config_override', 'console_run']
def can_view(self):
"""Checks if current user has viewing rights."""
return roles.Roles.is_super_admin()
def can_edit(self):
"""Checks if current user has editing rights."""
return self.can_view()
def get(self):
"""Enforces rights to all GET operations."""
if not self.can_view():
self.redirect('/')
return
return super(AdminHandler, self).get()
def post(self):
"""Enforces rights to all POST operations."""
if not self.can_edit():
self.redirect('/')
return
return super(AdminHandler, self).post()
def get_template(self, template_name, dirs):
"""Sets up an environment and Gets jinja template."""
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(dirs + [os.path.dirname(__file__)]))
return jinja_environment.get_template(template_name)
def render_page(self, template_values):
"""Renders a page using provided template values."""
if PRODUCTION_MODE:
app_id = app.get_application_id()
console_link = """
<a target="_blank"
href="https://appengine.google.com/dashboard?app_id=s~%s">
Google App Engine
</a>
""" % app_id
else:
console_link = """
<a target="_blank" href="/_ah/admin">Google App Engine</a>
"""
template_values['top_nav'] = """
<a href="/admin">Courses</a>
<a href="/admin?action=settings">Settings</a>
<a href="/admin?action=perf">Metrics</a>
<a href="/admin?action=deployment">Deployment</a>
<a href="/admin?action=console">Console</a>
%s
""" % console_link
template_values['user_nav'] = '%s | <a href="%s">Logout</a>' % (
users.get_current_user().email(), users.create_logout_url('/'))
template_values[
'page_footer'] = 'Created on: %s' % datetime.datetime.now()
self.response.write(
self.get_template('view.html', []).render(template_values))
def render_dict(self, source_dict, title):
"""Renders a dictionary ordered by keys."""
keys = sorted(source_dict.keys())
content = []
content.append('<h3>%s</h3>' % title)
content.append('<ol>')
for key in keys:
value = source_dict[key]
if isinstance(value, ConfigProperty):
value = value.value
content.append(
'<li>%s: %s</li>' % (cgi.escape(key), cgi.escape(str(value))))
content.append('</ol>')
return '\n'.join(content)
def format_title(self, text):
"""Formats standard title."""
return 'Course Builder > Admin > %s' % text
def get_perf(self):
"""Shows server performance counters page."""
template_values = {}
template_values['page_title'] = self.format_title('Metrics')
perf_counters = {}
# built in counters
perf_counters['gcb-admin-uptime-sec'] = long(
time.time() - BEGINNING_OF_TIME)
# config counters
perf_counters['gcb-config-overrides'] = len(
config.Registry.get_overrides())
perf_counters['gcb-config-age-sec'] = (
long(time.time()) - config.Registry.last_update_time)
perf_counters['gcb-config-update-time-sec'] = (
config.Registry.last_update_time)
perf_counters['gcb-config-update-index'] = config.Registry.update_index
# add all registered counters
all_counters = counters.Registry.registered.copy()
for name in all_counters.keys():
perf_counters[name] = all_counters[name].value
template_values['main_content'] = self.render_dict(
perf_counters, 'In-process Performance Counters')
self.render_page(template_values)
def get_deployment(self):
"""Shows server environment and deployment information page."""
template_values = {}
template_values['page_title'] = self.format_title('Deployment')
# Yaml file content.
yaml_content = []
yaml_content.append('<h3>Contents of <code>app.yaml</code></h3>')
yaml_content.append('<ol>')
yaml_lines = open(os.path.join(os.path.dirname(
__file__), '../../app.yaml'), 'r').readlines()
for line in yaml_lines:
yaml_content.append('<li>%s</li>' % cgi.escape(line))
yaml_content.append('</ol>')
yaml_content = ''.join(yaml_content)
# Application identity.
app_id = app.get_application_id()
app_dict = {}
app_dict['application_id'] = app_id
app_dict['default_ver_hostname'] = app.get_default_version_hostname()
template_values['main_content'] = self.render_dict(
app_dict,
'About the Application') + yaml_content + self.render_dict(
os.environ, 'Server Environment Variables')
self.render_page(template_values)
def get_settings(self):
"""Shows configuration properties information page."""
template_values = {}
template_values['page_title'] = self.format_title('Settings')
content = []
content.append("""
<style>
span.gcb-db-diff, td.gcb-db-diff {
background-color: #A0FFA0;
}
span.gcb-env-diff, td.gcb-env-diff {
background-color: #A0A0FF;
}
</style>
""")
content.append('<h3>All Settings</h3>')
content.append('<table class="gcb-config">')
content.append("""
<tr>
<th>Name</th>
<th>Current Value</th>
<th>Actions</th>
<th>Description</th>
</tr>
""")
def get_style_for(value, value_type):
"""Formats CSS style for given value."""
style = ''
if not value or value_type in [int, long, bool]:
style = 'style="text-align: center;"'
return style
def get_action_html(caption, args, onclick=None):
"""Formats actions <a> link."""
handler = ''
if onclick:
handler = 'onclick="%s"' % onclick
return '<a %s class="gcb-button" href="/admin?%s">%s</a>' % (
handler, urllib.urlencode(args), cgi.escape(caption))
def get_actions(name, override):
"""Creates actions appropriate to an item."""
actions = []
if override:
actions.append(get_action_html('Edit', {
'action': 'config_edit', 'name': name}))
else:
actions.append("""
<form action='/admin?%s' method='POST'>
<input type="hidden" name="xsrf_token" value="%s">
<button class="gcb-button" type="submit">
Override
</button></form>""" % (
urllib.urlencode(
{'action': 'config_override', 'name': name}),
cgi.escape(self.create_xsrf_token('config_override'))
))
return ''.join(actions)
def get_doc_string(item, default_value):
"""Formats an item documentation string for display."""
doc_string = item.doc_string
if doc_string:
doc_string = cgi.escape(doc_string)
else:
doc_string = 'No documentation available.'
doc_string = ' %s Default: "%s".' % (doc_string, default_value)
return doc_string
overrides = config.Registry.get_overrides(True)
registered = config.Registry.registered.copy()
count = 0
for name in sorted(registered.keys()):
count += 1
item = registered[name]
default_value = item.default_value
has_environ_value, environ_value = item.get_environ_value()
value = item.value
class_current = 'class="gcb-db-diff"'
if value == default_value:
class_current = ''
if has_environ_value and value == environ_value:
class_current = 'class="gcb-env-diff"'
if default_value:
default_value = cgi.escape(str(default_value))
if value:
value = cgi.escape(str(value))
style_current = get_style_for(value, item.value_type)
content.append("""
<tr>
<td style='white-space: nowrap;'>%s</td>
<td %s %s>%s</td>
<td style='white-space: nowrap;' align='center'>%s</td>
<td>%s</td>
</tr>
""" % (
item.name, class_current, style_current, value,
get_actions(name, name in overrides),
get_doc_string(item, default_value)))
content.append("""
<tr><td colspan="4" align="right">Total: %s item(s)</td></tr>
""" % count)
content.append('</table>')
content.append("""
<p><strong>Legend</strong>:
For each property, the value shown corresponds to, in
descending order of priority:
<span class='gcb-db-diff'>
[ the value set via this page ] </span>,
<span class='gcb-env-diff'>
[ the environment value in app.yaml ] </span>,
and the [ default value ] in the Course Builder codebase.""")
template_values['main_content'] = ''.join(content)
self.render_page(template_values)
def get_courses(self):
"""Shows a list of all courses available on this site."""
template_values = {}
template_values['page_title'] = self.format_title('Courses')
content = []
content.append('<h3>All Courses</h3>')
content.append('<table>')
content.append("""
<tr>
<th>Course Title</th>
<th>Context Path</th>
<th>Content Location</th>
<th>Datastore Namespace</th>
</tr>
""")
courses = sites.get_all_courses()
count = 0
for course in courses:
count += 1
error = ''
slug = course.get_slug()
location = sites.abspath(course.get_home_folder(), '/')
try:
name = cgi.escape(course.get_environ()['course']['title'])
except Exception as e: # pylint: disable-msg=broad-except
name = 'UNKNOWN COURSE'
error = (
'<p>Error in <strong>course.yaml</strong> file:<br/>'
'<pre>\n%s\n%s\n</pre></p>' % (
e.__class__.__name__, cgi.escape(str(e))))
if slug == '/':
link = '/dashboard'
else:
link = '%s/dashboard' % slug
link = '<a href="%s">%s</a>' % (link, name)
content.append("""
<tr>
<td>%s%s</td>
<td>%s</td>
<td>%s</td>
<td>%s</td>
</tr>
""" % (
link, error, slug, location, course.get_namespace_name()))
content.append("""
<tr><td colspan="4" align="right">Total: %s item(s)</td></tr>
""" % count)
content.append('</table>')
template_values['main_content'] = ''.join(content)
self.render_page(template_values)
def get_console(self):
"""Shows interactive Python console page."""
template_values = {}
template_values['page_title'] = self.format_title('Console')
# Check rights.
if not roles.Roles.is_direct_super_admin():
template_values['main_content'] = DELEGATED_ACCESS_IS_NOT_ALLOWED
self.render_page(template_values)
return
content = []
content.append("""
<p><i><strong>WARNING!</strong> The Interactive Console has the same
access to the application's environment and services as a .py file
inside the application itself. Be careful, because this means writes
to your data store will be executed for real!</i></p>
<p><strong>
Input your Python code below and press "Run Program" to execute.
</strong><p>
<form action='/admin?action=console_run' method='POST'>
<input type="hidden" name="xsrf_token" value="%s">
<textarea
style='width: 95%%; height: 200px;' name='code'></textarea>
<p align='center'>
<button class="gcb-button" type="submit">Run Program</button>
</p>
</form>""" % cgi.escape(self.create_xsrf_token('console_run')))
template_values['main_content'] = ''.join(content)
self.render_page(template_values)
def post_console_run(self):
"""Executes dynamically submitted Python code."""
template_values = {}
template_values['page_title'] = self.format_title('Execution Results')
# Check rights.
if not roles.Roles.is_direct_super_admin():
template_values['main_content'] = DELEGATED_ACCESS_IS_NOT_ALLOWED
self.render_page(template_values)
return
# Execute code.
code = self.request.get('code')
time_before = time.time()
output, results = evaluate_python_code(code)
duration = long(time.time() - time_before)
status = 'FAILURE'
if results:
status = 'SUCCESS'
# Render results.
content = []
content.append('<h3>Submitted Python Code</h3>')
content.append('<ol>')
for line in code.split('\n'):
content.append('<li>%s</li>' % cgi.escape(line))
content.append('</ol>')
content.append("""
<h3>Execution Results</h3>
<ol>
<li>Status: %s</li>
<li>Duration (sec): %s</li>
</ol>
""" % (status, duration))
content.append('<h3>Program Output</h3>')
content.append(
'<blockquote><pre>%s</pre></blockquote>' % cgi.escape(
output))
template_values['main_content'] = ''.join(content)
self.render_page(template_values)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to create and manage Announcements."""
__author__ = 'Saifu Angto (saifu@google.com)'
import datetime
import json
import urllib
from controllers.utils import BaseHandler
from controllers.utils import BaseRESTHandler
from controllers.utils import ReflectiveRequestHandler
from controllers.utils import XsrfTokenManager
from models import entities
from models import roles
from models.models import MemcacheManager
import models.transforms as transforms
import modules.announcements.samples as samples
from modules.oeditor import oeditor
from google.appengine.ext import db
# TODO(psimakov): we should really use an ordered dictionary, not plain text; it
# can't be just a normal dict because a dict iterates its items in undefined
# order; thus when we render a dict to JSON an order of fields will not match
# what we specify here; the final editor will also show the fields in an
# undefined order; for now we use the raw JSON, rather than the dict, but will
# move to an ordered dict later
SCHEMA_JSON = """
{
"id": "Announcement Entity",
"type": "object",
"description": "Announcement",
"properties": {
"key" : {"type": "string"},
"title": {"optional": true, "type": "string"},
"date": {"optional": true, "type": "date"},
"html": {"optional": true, "type": "text"},
"is_draft": {"type": "boolean"}
}
}
"""
SCHEMA_DICT = json.loads(SCHEMA_JSON)
# inputex specific schema annotations to control editor look and feel
SCHEMA_ANNOTATIONS_DICT = [
(['title'], 'Announcement'),
(['properties', 'key', '_inputex'], {
'label': 'ID', '_type': 'uneditable'}),
(['properties', 'date', '_inputex'], {
'label': 'Date', '_type': 'date', 'dateFormat': 'Y/m/d',
'valueFormat': 'Y/m/d'}),
(['properties', 'title', '_inputex'], {'label': 'Title'}),
(['properties', 'html', '_inputex'], {'label': 'Body', '_type': 'text'}),
oeditor.create_bool_select_annotation(
['properties', 'is_draft'], 'Status', 'Draft', 'Published')]
class AnnouncementsRights(object):
"""Manages view/edit rights for announcements."""
@classmethod
def can_view(cls, unused_handler):
return True
@classmethod
def can_edit(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_delete(cls, handler):
return cls.can_edit(handler)
@classmethod
def can_add(cls, handler):
return cls.can_edit(handler)
@classmethod
def apply_rights(cls, handler, items):
"""Filter out items that current user can't see."""
if AnnouncementsRights.can_edit(handler):
return items
allowed = []
for item in items:
if not item.is_draft:
allowed.append(item)
return allowed
class AnnouncementsHandler(BaseHandler, ReflectiveRequestHandler):
"""Handler for announcements."""
default_action = 'list'
get_actions = [default_action, 'edit']
post_actions = ['add', 'delete']
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [('/rest/announcements/item', AnnouncementsItemRESTHandler)]
def get_action_url(self, action, key=None):
args = {'action': action}
if key:
args['key'] = key
return self.canonicalize_url(
'/announcements?%s' % urllib.urlencode(args))
def format_items_for_template(self, items):
"""Formats a list of entities into template values."""
template_items = []
for item in items:
item = transforms.entity_to_dict(item)
# add 'edit' actions
if AnnouncementsRights.can_edit(self):
item['edit_action'] = self.get_action_url('edit', item['key'])
item['delete_xsrf_token'] = self.create_xsrf_token('delete')
item['delete_action'] = self.get_action_url(
'delete', item['key'])
template_items.append(item)
output = {}
output['children'] = template_items
# add 'add' action
if AnnouncementsRights.can_edit(self):
output['add_xsrf_token'] = self.create_xsrf_token('add')
output['add_action'] = self.get_action_url('add')
return output
def put_sample_announcements(self):
"""Loads sample data into a database."""
items = []
for item in samples.SAMPLE_ANNOUNCEMENTS:
entity = AnnouncementEntity()
transforms.dict_to_entity(entity, item)
entity.put()
items.append(entity)
return items
def get_list(self):
"""Shows a list of announcements."""
if not self.personalize_page_and_get_enrolled():
return
items = AnnouncementEntity.get_announcements()
if not items and AnnouncementsRights.can_edit(self):
items = self.put_sample_announcements()
items = AnnouncementsRights.apply_rights(self, items)
self.template_value['announcements'] = self.format_items_for_template(
items)
self.template_value['navbar'] = {'announcements': True}
self.render('announcements.html')
def get_edit(self):
"""Shows an editor for an announcement."""
if not AnnouncementsRights.can_edit(self):
self.error(401)
return
key = self.request.get('key')
exit_url = self.canonicalize_url(
'/announcements#%s' % urllib.quote(key, safe=''))
rest_url = self.canonicalize_url('/rest/announcements/item')
form_html = oeditor.ObjectEditor.get_html_for(
self, SCHEMA_JSON, SCHEMA_ANNOTATIONS_DICT,
key, rest_url, exit_url)
self.template_value['navbar'] = {'announcements': True}
self.template_value['content'] = form_html
self.render('bare.html')
def post_delete(self):
"""Deletes an announcement."""
if not AnnouncementsRights.can_delete(self):
self.error(401)
return
key = self.request.get('key')
entity = AnnouncementEntity.get(key)
if entity:
entity.delete()
self.redirect('/announcements')
def post_add(self):
"""Adds a new announcement and redirects to an editor for it."""
if not AnnouncementsRights.can_add(self):
self.error(401)
return
entity = AnnouncementEntity()
entity.title = 'Sample Announcement'
entity.date = datetime.datetime.now().date()
entity.html = 'Here is my announcement!'
entity.is_draft = True
entity.put()
self.redirect(self.get_action_url('edit', entity.key()))
class AnnouncementsItemRESTHandler(BaseRESTHandler):
"""Provides REST API for an announcement."""
def get(self):
"""Handles REST GET verb and returns an object as JSON payload."""
key = self.request.get('key')
try:
entity = AnnouncementEntity.get(key)
except db.BadKeyError:
entity = None
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
viewable = AnnouncementsRights.apply_rights(self, [entity])
if not viewable:
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
entity = viewable[0]
json_payload = transforms.dict_to_json(transforms.entity_to_dict(
entity), SCHEMA_DICT)
transforms.send_json_response(
self, 200, 'Success.',
payload_dict=json_payload,
xsrf_token=XsrfTokenManager.create_xsrf_token(
'announcement-put'))
def put(self):
"""Handles REST PUT verb with JSON payload."""
request = json.loads(self.request.get('request'))
key = request.get('key')
if not self.assert_xsrf_token_or_fail(
request, 'announcement-put', {'key': key}):
return
if not AnnouncementsRights.can_edit(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
entity = AnnouncementEntity.get(key)
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
payload = request.get('payload')
transforms.dict_to_entity(entity, transforms.json_to_dict(
json.loads(payload), SCHEMA_DICT))
entity.put()
transforms.send_json_response(self, 200, 'Saved.')
class AnnouncementEntity(entities.BaseEntity):
"""A class that represents a persistent database entity of announcement."""
title = db.StringProperty(indexed=False)
date = db.DateProperty()
html = db.TextProperty(indexed=False)
is_draft = db.BooleanProperty()
memcache_key = 'announcements'
@classmethod
def get_announcements(cls, allow_cached=True):
items = MemcacheManager.get(cls.memcache_key)
if not allow_cached or items is None:
items = AnnouncementEntity.all().order('-date').fetch(1000)
# TODO(psimakov): prepare to exceed 1MB max item size
# read more here: http://stackoverflow.com
# /questions/5081502/memcache-1-mb-limit-in-google-app-engine
MemcacheManager.set(cls.memcache_key, items)
return items
def put(self):
"""Do the normal put() and also invalidate memcache."""
result = super(AnnouncementEntity, self).put()
MemcacheManager.delete(self.memcache_key)
return result
def delete(self):
"""Do the normal delete() and invalidate memcache."""
super(AnnouncementEntity, self).delete()
MemcacheManager.delete(self.memcache_key)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Sample announcements."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
SAMPLE_ANNOUNCEMENT_1 = {
'edit_url': None,
'title': 'Example Announcement',
'date': datetime.date(2012, 10, 6),
'is_draft': False,
'html': """
<br>Certificates will be e-mailed to qualifying participants by
Friday, October 12.
<br>
<br>Do you want to check your assessment scores? Visit the
<a href="student/home">"My profile"</a> page!</p>
"""}
SAMPLE_ANNOUNCEMENT_2 = {
'edit_url': None,
'title': 'Welcome to Class 6 and the Post-class Assessment',
'date': datetime.date(2012, 10, 5),
'is_draft': True,
'html': """
<br>Welcome to the final class! <a href="class?class=6"> Class 6</a>
focuses on combining the skills you have learned throughout the class
to maximize the effectiveness of your searches.
<br>
<br><b>Customize Your Experience</b>
<br>You can customize your experience in several ways:
<ul>
<li>You can watch the videos multiple times for a deeper understanding
of each lesson. </li>
<li>You can read the text version for each lesson. Click the button
above the video to access it.</li>
<li>Lesson activities are designed for multiple levels of experience.
The first question checks your recall of the material in the video;
the second question lets you verify your mastery of the lesson; the
third question is an opportunity to apply your skills and share your
experiences in the class forums. You can answer some or all of the
questions depending on your familiarity and interest in the topic.
Activities are not graded and do not affect your final grade. </li>
<li>We'll also post extra challenges in the forums for people who seek
additional opportunities to practice and test their new skills!</li>
</ul>
<br><b>Forum</b>
<br>Apply your skills, share with others, and connect with your peers
and course staff in the <a href="forum">forum.</a> Discuss your favorite
search tips and troubleshoot technical issues. We'll also post bonus
videos and challenges there!
<p> </p>
<p>For an optimal learning experience, please plan to use the most
recent version of your browser, as well as a desktop, laptop or a tablet
computer instead of your mobile phone.</p>
"""}
SAMPLE_ANNOUNCEMENTS = [SAMPLE_ANNOUNCEMENT_1, SAMPLE_ANNOUNCEMENT_2]
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to create and manage Courses."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import cgi
import datetime
import json
import os
from controllers import sites
from controllers.utils import ApplicationHandler
from controllers.utils import ReflectiveRequestHandler
import jinja2
from models import courses
from models import jobs
from models import roles
from models.models import Student
from google.appengine.api import users
from google.appengine.ext import db
class DashboardHandler(ApplicationHandler, ReflectiveRequestHandler):
"""Handles all pages and actions required for managing a course."""
default_action = 'outline'
get_actions = [default_action, 'assets', 'settings', 'students']
post_actions = ['compute_student_stats']
def can_view(self):
"""Checks if current user has viewing rights."""
return roles.Roles.is_super_admin()
def get(self):
"""Enforces rights to all GET operations."""
if not self.can_view():
self.redirect(self.app_context.get_slug())
return
return super(DashboardHandler, self).get()
def get_template(self, template_name, dirs):
"""Sets up an environment and Gets jinja template."""
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(dirs + [os.path.dirname(__file__)]))
return jinja_environment.get_template(template_name)
def render_page(self, template_values):
"""Renders a page using provided template values."""
admin_menu = ''
if roles.Roles.is_super_admin():
admin_menu = '<a href="/admin">Admin</a>'
template_values['top_nav'] = """
<a href="dashboard">Outline</a>
<a href="dashboard?action=assets">Assets</a>
<a href="dashboard?action=settings">Settings</a>
<a href="dashboard?action=students">Students</a>
%s
""" % admin_menu
template_values['gcb_course_base'] = self.get_base_href(self)
template_values['user_nav'] = '%s | <a href="%s">Logout</a>' % (
users.get_current_user().email(), users.create_logout_url('/'))
template_values[
'page_footer'] = 'Created on: %s' % datetime.datetime.now()
self.response.write(
self.get_template('view.html', []).render(template_values))
def format_title(self, text):
"""Formats standard title."""
title = self.app_context.get_environ()['course']['title']
return ('Course Builder > %s > Dashboard > %s' %
(cgi.escape(title), text))
def get_outline(self):
"""Renders course outline view."""
template_values = {}
template_values['page_title'] = self.format_title('Outline')
course = courses.Course(self)
lines = []
lines.append(
'<h3>Course Units, Lessons, Activities and Assessments</h3>')
lines.append('<ul style="list-style: none;">')
for unit in course.get_units():
if unit.type == 'A':
lines.append('<li>')
lines.append(
'<strong><a href="assessment?name=%s">%s</a></strong>' % (
unit.unit_id, cgi.escape(unit.title)))
lines.append('</li>\n')
continue
if unit.type == 'O':
lines.append('<li>')
lines.append(
'<strong><a href="%s">%s</a></strong>' % (
unit.unit_id, cgi.escape(unit.title)))
lines.append('</li>\n')
continue
if unit.type == 'U':
lines.append('<li>')
lines.append('<strong>Unit %s - %s</strong>' % (
unit.unit_id, cgi.escape(unit.title)))
if unit.type == 'U':
lines.append('<ol>')
for lesson in course.get_lessons(unit.unit_id):
href = 'unit?unit=%s&lesson=%s' % (
unit.unit_id, lesson.id)
lines.append(
'<li><a href="%s">%s</a></li>\n' % (
href, lesson.title))
lines.append('</ol>')
lines.append('</li>\n')
continue
raise Exception('Unknown unit type: %s.' % unit.type)
lines.append('</ul>')
lines = ''.join(lines)
template_values['main_content'] = lines
self.render_page(template_values)
def get_settings(self):
"""Renders course settings view."""
template_values = {}
template_values['page_title'] = self.format_title('Settings')
# Course identity.
title = self.app_context.get_environ()['course']['title']
location = sites.abspath(self.app_context.get_home_folder(), '/')
yaml = self.app_context.get_config_filename()
slug = self.app_context.get_slug()
namespace = self.app_context.get_namespace_name()
course_info = []
course_info.append('<h3>About the Course</h3>')
course_info.append("""
<ol>
<li>Course Title: %s</li>
<li>Content Location: %s</li>
<li>config.yaml: %s</li>
<li>Context Path: %s</li>
<li>Datastore Namespace: %s</li>
</ol>
""" % (title, location, yaml, slug, namespace))
course_info = ''.join(course_info)
# Yaml file content.
yaml_content = []
yaml_content.append(
'<h3>Contents of <code>course.yaml</code> file</h3>')
yaml_content.append('<ol>')
yaml_lines = self.app_context.fs.open(
self.app_context.get_config_filename()).read().decode('utf-8')
for line in yaml_lines.split('\n'):
yaml_content.append('<li>%s</li>\n' % cgi.escape(line))
yaml_content.append('</ol>')
yaml_content = ''.join(yaml_content)
template_values['main_content'] = course_info + yaml_content
self.render_page(template_values)
def list_and_format_file_list(self, subfolder, links=False):
"""Walks files in folders and renders their names."""
home = sites.abspath(self.app_context.get_home_folder(), '/')
files = self.app_context.fs.list(
sites.abspath(self.app_context.get_home_folder(), subfolder))
lines = []
for abs_filename in sorted(files):
filename = os.path.relpath(abs_filename, home)
if links:
lines.append(
'<li><a href="%s">%s</a></li>\n' % (filename, filename))
else:
lines.append('<li>%s</li>\n' % filename)
return lines
def get_assets(self):
"""Renders course assets view."""
template_values = {}
template_values['page_title'] = self.format_title('Assets')
lines = []
lines.append('<h3>Content Location</h3>')
lines.append('<blockquote>%s</blockquote>' % sites.abspath(
self.app_context.get_home_folder(), '/'))
lines.append('<h3>Course Data Files</h3>')
lines.append('<ol>')
lines += self.list_and_format_file_list('/data/')
lines.append('</ol>')
lines.append('<h3>Course Assets</h3>')
lines.append('<ol>')
lines += self.list_and_format_file_list('/assets/', True)
lines.append('</ol>')
lines = ''.join(lines)
template_values['main_content'] = lines
self.render_page(template_values)
def get_students(self):
"""Renders course students view."""
template_values = {}
template_values['page_title'] = self.format_title('Students')
details = """
<h3>Enrollment Statistics</h3>
<ul><li>pending</li></ul>
<h3>Assessment Statistics</h3>
<ul><li>pending</li></ul>
"""
update_message = ''
update_action = """
<form
id='gcb-compute-student-stats'
action='dashboard?action=compute_student_stats'
method='POST'>
<input type="hidden" name="xsrf_token" value="%s">
<p>
<button class="gcb-button" type="submit">
Re-Calculate Now
</button>
</p>
</form>
""" % self.create_xsrf_token('compute_student_stats')
job = ComputeStudentStats(self.app_context).load()
if not job:
update_message = """
Student statistics have not been calculated yet."""
else:
if job.status_code == jobs.STATUS_CODE_COMPLETED:
stats = json.loads(job.output)
enrolled = stats['enrollment']['enrolled']
unenrolled = stats['enrollment']['unenrolled']
enrollment = []
enrollment.append(
'<li>previously enrolled: %s</li>' % unenrolled)
enrollment.append(
'<li>currently enrolled: %s</li>' % enrolled)
enrollment.append(
'<li>total: %s</li>' % (unenrolled + enrolled))
enrollment = ''.join(enrollment)
assessment = []
total = 0
for key, value in stats['scores'].items():
total += value[0]
avg_score = 0
if value[0]:
avg_score = round(value[1] / value[0], 1)
assessment.append("""
<li>%s: completed %s, average score %s
""" % (key, value[0], avg_score))
assessment.append('<li>total: %s</li>' % total)
assessment = ''.join(assessment)
details = """
<h3>Enrollment Statistics</h3>
<ul>%s</ul>
<h3>Assessment Statistics</h3>
<ul>%s</ul>
""" % (enrollment, assessment)
update_message = """
Student statistics were last updated on
%s in about %s second(s).""" % (
job.updated_on, job.execution_time_sec)
elif job.status_code == jobs.STATUS_CODE_FAILED:
update_message = """
There was an error updating student statistics.
Here is the message:<br>
<blockquote>
<pre>\n%s</pre>
</blockquote>
""" % cgi.escape(job.output)
else:
update_action = ''
update_message = """
Student statistics update started on %s and is running
now. Please come back shortly.""" % job.updated_on
lines = []
lines.append(details)
lines.append(update_message)
lines.append(update_action)
lines = ''.join(lines)
template_values['main_content'] = lines
self.render_page(template_values)
def post_compute_student_stats(self):
"""Submits a new student statistics calculation task."""
job = ComputeStudentStats(self.app_context)
job.submit()
self.redirect('/dashboard?action=students')
class ScoresAggregator(object):
"""Aggregates scores statistics."""
def __init__(self):
# We store all data as tuples keyed by the assessment type name. Each
# tuple keeps:
# (student_count, sum(score))
self.name_to_tuple = {}
def visit(self, student):
if student.scores:
scores = json.loads(student.scores)
for key in scores.keys():
if key in self.name_to_tuple:
count = self.name_to_tuple[key][0]
score_sum = self.name_to_tuple[key][1]
else:
count = 0
score_sum = 0
self.name_to_tuple[key] = (
count + 1, score_sum + float(scores[key]))
class EnrollmentAggregator(object):
"""Aggregates enrollment statistics."""
def __init__(self):
self.enrolled = 0
self.unenrolled = 0
def visit(self, student):
if student.is_enrolled:
self.enrolled += 1
else:
self.unenrolled += 1
class ComputeStudentStats(jobs.DurableJob):
"""A job that computes student statistics."""
def run(self):
"""Computes student statistics."""
enrollment = EnrollmentAggregator()
scores = ScoresAggregator()
query = db.GqlQuery(
'SELECT * FROM %s' % Student().__class__.__name__,
batch_size=10000)
for student in query.run():
enrollment.visit(student)
scores.visit(student)
data = {
'enrollment': {
'enrolled': enrollment.enrolled,
'unenrolled': enrollment.unenrolled},
'scores': scores.name_to_tuple}
return data
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main package for Course Builder, which handles URL routing."""
import os
import webapp2
# The following import is needed in order to add third-party libraries.
import appengine_config # pylint: disable-msg=unused-import
from controllers import assessments
from controllers import lessons
from controllers import sites
from controllers import utils
from modules.admin import admin
from modules.admin import config
from modules.announcements import announcements
from modules.dashboard import dashboard
urls = [
('/', lessons.CourseHandler),
('/activity', lessons.ActivityHandler),
('/announcements', announcements.AnnouncementsHandler),
('/answer', assessments.AnswerHandler),
('/assessment', lessons.AssessmentHandler),
('/course', lessons.CourseHandler),
('/forum', utils.ForumHandler),
('/dashboard', dashboard.DashboardHandler),
('/preview', utils.PreviewHandler),
('/register', utils.RegisterHandler),
('/student/editstudent', utils.StudentEditStudentHandler),
('/student/home', utils.StudentProfileHandler),
('/student/unenroll', utils.StudentUnenrollHandler),
('/unit', lessons.UnitHandler)]
sites.ApplicationRequestHandler.bind(urls)
inputex_handler = (
'/static/inputex-3.1.0/(.*)', sites.make_zip_handler(
os.path.join(appengine_config.BUNDLE_ROOT, 'lib/inputex-3.1.0.zip')))
admin_handlers = [
('/admin', admin.AdminHandler),
('/rest/config/item', config.ConfigPropertyItemRESTHandler)]
app_handler = (r'(.*)', sites.ApplicationRequestHandler)
webapp2_i18n_config = {'translations_path': os.path.join(
appengine_config.BUNDLE_ROOT, 'modules/i18n/resources/locale')}
debug = not appengine_config.PRODUCTION_MODE
app = webapp2.WSGIApplication(
admin_handlers + [inputex_handler] + [app_handler],
config={'webapp2_extras.i18n': webapp2_i18n_config}, debug=debug)
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom configurations and functions for Google App Engine."""
__author__ = 'psimakov@google.com (Pavel Simakov)'
import os
import sys
# Whether we are running in the production environment.
PRODUCTION_MODE = not os.environ.get(
'SERVER_SOFTWARE', 'Development').startswith('Development')
# this is the official location of this app for computing of all relative paths
BUNDLE_ROOT = os.path.dirname(__file__)
# Default namespace name is '' and not None.
DEFAULT_NAMESPACE_NAME = ''
# Third-party library zip files.
THIRD_PARTY_LIBS = ['babel-0.9.6.zip', 'gaepytz-2011h.zip']
def gcb_force_default_encoding(encoding):
"""Force default encoding to a specific value."""
# Eclipse silently sets default encoding to 'utf-8', while GAE forces
# 'ascii'. We need to control this directly for consistency.
if not sys.getdefaultencoding() == encoding:
reload(sys)
sys.setdefaultencoding(encoding)
def gcb_init_third_party():
"""Add all third party libraries to system path."""
for lib in THIRD_PARTY_LIBS:
thirdparty_lib = os.path.join(BUNDLE_ROOT, 'lib/%s' % lib)
if not os.path.exists(thirdparty_lib):
raise Exception('Library does not exist: %s' % thirdparty_lib)
sys.path.insert(0, thirdparty_lib)
def gcb_configure_dev_server_if_running():
"""Configure various aspects of development server if not production."""
if not PRODUCTION_MODE:
# pylint: disable-msg=g-import-not-at-top
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_stub_util
# Make dev_appserver run with PseudoRandomHRConsistencyPolicy, which we
# believe is the best for localhost manual testing; normally
# dev_appserver runs either under MasterSlave policy, which does not
# allow XG transactions, or under TimeBasedHR policy, which serves
# counter-intuitive dirty query results; this also matches policy for
# the functional tests
stub = apiproxy_stub_map.apiproxy.GetStub(
'datastore_v3')
if stub:
policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=1)
stub.SetConsistencyPolicy(policy)
gcb_init_third_party()
gcb_configure_dev_server_if_running()
| Python |
# coding: utf-8
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs all unit tests."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import os
import sys
import time
import unittest
import appengine_config
from controllers import sites
from controllers import utils
from models import config
from models import transforms
from tools import verify
class ShouldHaveFailedByNow(Exception):
"""Special exception raised when a prior method did not raise."""
pass
def assert_fails(function):
"""Checks that function invocation raises an exception."""
try:
function()
raise ShouldHaveFailedByNow(
'Expected to fail: %s().' % function.__name__)
except ShouldHaveFailedByNow as e:
raise e
except Exception:
pass
class InvokeExistingUnitTest(unittest.TestCase):
"""Run all units tests declared elsewhere."""
def test_existing_unit_tests(self):
"""Run all units tests declared elsewhere."""
sites.run_all_unit_tests()
config.run_all_unit_tests()
verify.run_all_unit_tests()
transforms.run_all_unit_tests()
def test_xsrf_token_manager(self):
"""Test XSRF token operations."""
os.environ['AUTH_DOMAIN'] = 'test_domain'
# Issues and verify anonymous user token.
action = 'test-action'
token = utils.XsrfTokenManager.create_xsrf_token(action)
assert '/' in token
assert utils.XsrfTokenManager.is_xsrf_token_valid(token, action)
# Impersonate real user.
os.environ['USER_EMAIL'] = 'test_email'
os.environ['USER_ID'] = 'test_id'
# Issues and verify real user token.
action = 'test-action'
token = utils.XsrfTokenManager.create_xsrf_token(action)
assert '/' in token
assert utils.XsrfTokenManager.is_xsrf_token_valid(token, action)
# Check forged time stamp invalidates token.
parts = token.split('/')
assert len(parts) == 2
forgery = '%s/%s' % (long(parts[0]) + 1000, parts[1])
assert not forgery == token
assert not utils.XsrfTokenManager.is_xsrf_token_valid(forgery, action)
# Check token properly expires.
action = 'test-action'
time_in_the_past = long(
time.time() - utils.XsrfTokenManager.XSRF_TOKEN_AGE_SECS)
old_token = utils.XsrfTokenManager._create_token(
action, time_in_the_past)
assert not utils.XsrfTokenManager.is_xsrf_token_valid(old_token, action)
# Clean up.
del os.environ['AUTH_DOMAIN']
del os.environ['USER_EMAIL']
del os.environ['USER_ID']
def test_string_encoding(self):
"""Test our understanding of Python string encoding aspects.
We were quite naive to believe Python solves all string encoding issues
automatically. That is not completely true and we have to do a lot of
manual work to get it right. Here we capture some of the patterns.
"""
original_encoding = sys.getdefaultencoding()
# Test with 'ascii' default encoding. Note that GAE runs in 'ascii',
# and not in 'utf-8'. There is no way to override this currently.
appengine_config.gcb_force_default_encoding('ascii')
# Note that Python bravely ignores the file encoding declaration
# 'coding: utf-8' at the top of this file. The intuitive behavior would
# be to change the default encoding to 'utf-8' for all the code running
# in the scope of this file.
# Initialization.
test_1 = 'My Test Title Мой заголовок теста'
test_2 = u'My Test Title Мой заголовок теста'
# Types.
assert isinstance(test_1, str)
assert isinstance(test_2, unicode)
assert test_1 != test_2
# Conversions.
assert_fails(lambda: unicode(test_1))
assert unicode(test_1, 'utf-8')
assert isinstance(unicode(test_1, 'utf-8'), unicode)
assert unicode(test_1, 'utf-8') == test_2
# Expressions.
assert_fails(lambda: test_1 + test_2)
assert_fails(lambda: '%s %s' % (test_1, test_2))
assert_fails(lambda: u'%s %s' % (test_1, test_2)) # Why does it fail?
assert_fails(lambda: ''.join([test_1, test_2]))
assert_fails(lambda: u''.join([test_1, test_2])) # Why does it fail?
''.join([unicode(test_1, 'utf-8'), test_2])
# Test with 'utf-8' default encoding.
appengine_config.gcb_force_default_encoding('utf-8')
# Initialization.
test_1 = 'My Test Title Мой заголовок теста'
test_2 = u'My Test Title Мой заголовок теста'
# Types.
assert isinstance(test_1, str) # How can this be true?
assert isinstance(test_2, unicode)
assert test_1 == test_2 # Note '!=' above, and '==' here. Who knew!!!
# Conversions.
assert unicode(test_1) == test_2
assert unicode(test_1, 'utf-8') == test_2
# Expressions.
assert test_1 + test_2
assert '%s %s' % (test_1, test_2)
assert u'%s %s' % (test_1, test_2)
# Clean up.
appengine_config.gcb_force_default_encoding(original_encoding)
if __name__ == '__main__':
unittest.TextTestRunner().run(
unittest.TestLoader().loadTestsFromTestCase(InvokeExistingUnitTest))
| Python |
# coding: utf-8
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that walk through Course Builder pages."""
__author__ = 'Sean Lip'
import csv
import datetime
import json
import logging
import os
import re
import shutil
import urllib
import appengine_config
from controllers import lessons
from controllers import sites
from controllers import utils
from controllers.utils import XsrfTokenManager
from models import config
from models import jobs
from models import models
from models import vfs
from models.utils import get_all_scores
from models.utils import get_score
from modules.announcements.announcements import AnnouncementEntity
from tools import verify
import actions
from actions import assert_contains
from actions import assert_contains_all_of
from actions import assert_does_not_contain
from actions import assert_equals
from google.appengine.api import namespace_manager
class InfrastructureTest(actions.TestBase):
"""Test core infrastructure classes agnostic to specific user roles."""
def test_utf8_datastore(self):
"""Test writing to and reading from datastore using UTF-8 content."""
event = models.EventEntity()
event.source = 'test-source'
event.user_id = 'test-user-id'
event.data = u'Test Data (тест данные)'
event.put()
stored_event = models.EventEntity().get_by_id([event.key().id()])
assert 1 == len(stored_event)
assert event.data == stored_event[0].data
def assert_queriable(self, entity, name, date_type=datetime.datetime):
"""Create some entities and check that single-property queries work."""
for i in range(1, 32):
item = entity(
key_name='%s_%s' % (date_type.__class__.__name__, i))
setattr(item, name, date_type(2012, 1, i))
item.put()
# Descending order.
items = entity.all().order('-%s' % name).fetch(1000)
assert len(items) == 31
assert getattr(items[0], name) == date_type(2012, 1, 31)
# Ascending order.
items = entity.all().order('%s' % name).fetch(1000)
assert len(items) == 31
assert getattr(items[0], name) == date_type(2012, 1, 1)
def test_indexed_properties(self):
"""Test whether entities support specific query types."""
# A 'DateProperty' or 'DateTimeProperty' of each persistent entity must
# be indexed. This is true even if the application doesn't execute any
# queries relying on the index. The index is still critically important
# for managing data, for example, for bulk data download or for
# incremental computations. Using index, the entire table can be
# processed in daily, weekly, etc. chunks and it is easy to query for
# new data. If we did not have an index, chunking would have to be done
# by the primary index, where it is impossible to separate recently
# added/modified rows from the rest of the data. Having this index adds
# to the cost of datastore writes, but we believe it is important to
# have it. Below we check that all persistent date/datetime properties
# are indexed.
self.assert_queriable(AnnouncementEntity, 'date', datetime.date)
self.assert_queriable(models.EventEntity, 'recorded_on')
self.assert_queriable(models.Student, 'enrolled_on')
self.assert_queriable(models.StudentAnswersEntity, 'updated_on')
self.assert_queriable(jobs.DurableJobEntity, 'updated_on')
def test_assets_and_date(self):
"""Verify semantics of all asset and data files."""
def echo(unused_message):
pass
warnings, errors = verify.Verifier().load_and_verify_model(echo)
assert not errors and not warnings
def test_config_visible_from_any_namespace(self):
"""Test that ConfigProperty is visible from any namespace."""
assert (
config.UPDATE_INTERVAL_SEC.value ==
config.UPDATE_INTERVAL_SEC.default_value)
new_value = config.UPDATE_INTERVAL_SEC.default_value + 5
# Add datastore override for known property.
prop = config.ConfigPropertyEntity(
key_name=config.UPDATE_INTERVAL_SEC.name)
prop.value = str(new_value)
prop.is_draft = False
prop.put()
# Check visible from default namespace.
config.Registry.last_update_time = 0
assert config.UPDATE_INTERVAL_SEC.value == new_value
# Check visible from another namespace.
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(
'ns-test_config_visible_from_any_namespace')
config.Registry.last_update_time = 0
assert config.UPDATE_INTERVAL_SEC.value == new_value
finally:
namespace_manager.set_namespace(old_namespace)
class AdminAspectTest(actions.TestBase):
"""Test site from the Admin perspective."""
def test_python_console(self):
"""Test access rights to the Python console."""
email = 'test_python_console@google.com'
# Check normal user has no access.
actions.login(email)
response = self.testapp.get('/admin?action=console')
assert_equals(response.status_int, 302)
response = self.testapp.post('/admin?action=console')
assert_equals(response.status_int, 302)
# Check delegated admin has no access.
os.environ['gcb_admin_user_emails'] = '[%s]' % email
actions.login(email)
response = self.testapp.get('/admin?action=console')
assert_equals(response.status_int, 200)
assert_contains(
'You must be an actual admin user to continue.', response.body)
response = self.testapp.get('/admin?action=console')
assert_equals(response.status_int, 200)
assert_contains(
'You must be an actual admin user to continue.', response.body)
del os.environ['gcb_admin_user_emails']
# Check actual admin has access.
actions.login(email, True)
response = self.testapp.get('/admin?action=console')
assert_equals(response.status_int, 200)
response.form.set('code', 'print "foo" + "bar"')
response = self.submit(response.form)
assert_contains('foobar', response.body)
def test_non_admin_has_no_access(self):
"""Test non admin has no access to pages or REST endpoints."""
email = 'test_non_admin_has_no_access@google.com'
actions.login(email)
# Add datastore override.
prop = config.ConfigPropertyEntity(
key_name='gcb_config_update_interval_sec')
prop.value = '5'
prop.is_draft = False
prop.put()
# Check user has no access to specific pages and actions.
response = self.testapp.get('/admin?action=settings')
assert_equals(response.status_int, 302)
response = self.testapp.get(
'/admin?action=config_edit&name=gcb_admin_user_emails')
assert_equals(response.status_int, 302)
response = self.testapp.post(
'/admin?action=config_reset&name=gcb_admin_user_emails')
assert_equals(response.status_int, 302)
# Check user has no rights to GET verb.
response = self.testapp.get(
'/rest/config/item?key=gcb_config_update_interval_sec')
assert_equals(response.status_int, 200)
json_dict = json.loads(response.body)
assert json_dict['status'] == 401
assert json_dict['message'] == 'Access denied.'
# Check user has no rights to PUT verb.
payload_dict = {}
payload_dict['value'] = '666'
payload_dict['is_draft'] = False
request = {}
request['key'] = 'gcb_config_update_interval_sec'
request['payload'] = json.dumps(payload_dict)
# Check XSRF token is required.
response = self.testapp.put('/rest/config/item?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
assert_contains('"status": 403', response.body)
# Check user still has no rights to PUT verb even if he somehow
# obtained a valid XSRF token.
request['xsrf_token'] = XsrfTokenManager.create_xsrf_token(
'config-property-put')
response = self.testapp.put('/rest/config/item?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
json_dict = json.loads(response.body)
assert json_dict['status'] == 401
assert json_dict['message'] == 'Access denied.'
def test_admin_list(self):
"""Test delegation of admin access to another user."""
email = 'test_admin_list@google.com'
actions.login(email)
# Add environment variable override.
os.environ['gcb_admin_user_emails'] = '[%s]' % email
# Add datastore override.
prop = config.ConfigPropertyEntity(
key_name='gcb_config_update_interval_sec')
prop.value = '5'
prop.is_draft = False
prop.put()
# Check user has access now.
response = self.testapp.get('/admin?action=settings')
assert_equals(response.status_int, 200)
# Check overrides are active and have proper management actions.
assert_contains('gcb_admin_user_emails', response.body)
assert_contains('[test_admin_list@google.com]', response.body)
assert_contains(
'/admin?action=config_override&name=gcb_admin_user_emails',
response.body)
assert_contains(
'/admin?action=config_edit&name=gcb_config_update_interval_sec',
response.body)
# Check editor page has proper actions.
response = self.testapp.get(
'/admin?action=config_edit&name=gcb_config_update_interval_sec')
assert_equals(response.status_int, 200)
assert_contains('/admin?action=config_reset', response.body)
assert_contains('name=gcb_config_update_interval_sec', response.body)
# Remove override.
del os.environ['gcb_admin_user_emails']
# Check user has no access.
response = self.testapp.get('/admin?action=settings')
assert_equals(response.status_int, 302)
def test_access_to_admin_pages(self):
"""Test access to admin pages."""
# assert anonymous user has no access
response = self.testapp.get('/admin?action=settings')
assert_equals(response.status_int, 302)
# assert admin user has access
email = 'test_access_to_admin_pages@google.com'
name = 'Test Access to Admin Pages'
actions.login(email, True)
actions.register(self, name)
response = self.testapp.get('/admin')
assert_contains('Power Searching with Google', response.body)
assert_contains('All Courses', response.body)
response = self.testapp.get('/admin?action=settings')
assert_contains('gcb_admin_user_emails', response.body)
assert_contains('gcb_config_update_interval_sec', response.body)
assert_contains('All Settings', response.body)
response = self.testapp.get('/admin?action=perf')
assert_contains('gcb-admin-uptime-sec:', response.body)
assert_contains('In-process Performance Counters', response.body)
response = self.testapp.get('/admin?action=deployment')
assert_contains('application_id: testbed-test', response.body)
assert_contains('About the Application', response.body)
actions.unregister(self)
actions.logout()
# assert not-admin user has no access
actions.login(email)
actions.register(self, name)
response = self.testapp.get('/admin?action=settings')
assert_equals(response.status_int, 302)
def test_multiple_courses(self):
"""Test courses admin page with two courses configured."""
courses = 'course:/foo:/foo-data, course:/bar:/bar-data:nsbar'
os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME] = courses
email = 'test_multiple_courses@google.com'
actions.login(email, True)
response = self.testapp.get('/admin')
assert_contains('Course Builder > Admin > Courses', response.body)
assert_contains('Total: 2 item(s)', response.body)
# Check ocurse URL's.
assert_contains('<a href="/foo/dashboard">', response.body)
assert_contains('<a href="/bar/dashboard">', response.body)
# Check content locations.
assert_contains('/foo-data', response.body)
assert_contains('/bar-data', response.body)
# Check namespaces.
assert_contains('gcb-course-foo-data', response.body)
assert_contains('nsbar', response.body)
class CourseAuthorAspectTest(actions.TestBase):
"""Tests the site from the Course Author perspective."""
def test_dashboard(self):
"""Test course dashboard."""
email = 'test_dashboard@google.com'
name = 'Test Dashboard'
# Non-admin does't have access.
actions.login(email)
response = self.get('dashboard')
assert_equals(response.status_int, 302)
actions.register(self, name)
assert_equals(response.status_int, 302)
actions.logout()
# Admin has access.
actions.login(email, True)
response = self.get('dashboard')
assert_contains('Google > Dashboard > Outline', response.body)
# Tests outline view.
response = self.get('dashboard')
assert_contains('Unit 3 - Advanced techniques', response.body)
# Test assets view.
response = self.get('dashboard?action=assets')
assert_contains('Google > Dashboard > Assets', response.body)
assert_contains('data/lesson.csv', response.body)
assert_contains('assets/css/main.css', response.body)
assert_contains('assets/img/Image1.5.png', response.body)
assert_contains('assets/js/activity-3.2.js', response.body)
# Test settings view.
response = self.get('dashboard?action=settings')
assert_contains(
'Google > Dashboard > Settings', response.body)
assert_contains('course.yaml', response.body)
assert_contains('title: \'Power Searching with Google\'', response.body)
assert_contains('locale: \'en_US\'', response.body)
# Tests student statistics view.
response = self.get('dashboard?action=students')
assert_contains(
'Google > Dashboard > Students', response.body)
assert_contains('have not been calculated yet', response.body)
compute_form = response.forms['gcb-compute-student-stats']
response = self.submit(compute_form)
assert_equals(response.status_int, 302)
assert len(self.taskq.GetTasks('default')) == 1
response = self.get('dashboard?action=students')
assert_contains('is running', response.body)
self.execute_all_deferred_tasks()
response = self.get('dashboard?action=students')
assert_contains('were last updated on', response.body)
assert_contains('currently enrolled: 1', response.body)
assert_contains('total: 1', response.body)
# Tests assessment statistics.
old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(self.namespace)
try:
for i in range(5):
student = models.Student(key_name='key-%s' % i)
student.is_enrolled = True
student.scores = json.dumps({'test-assessment': i})
student.put()
finally:
namespace_manager.set_namespace(old_namespace)
response = self.get('dashboard?action=students')
compute_form = response.forms['gcb-compute-student-stats']
response = self.submit(compute_form)
self.execute_all_deferred_tasks()
response = self.get('dashboard?action=students')
assert_contains('currently enrolled: 6', response.body)
assert_contains(
'test-assessment: completed 5, average score 2.0', response.body)
def test_trigger_sample_announcements(self):
"""Test course author can trigger adding sample announcements."""
email = 'test_announcements@google.com'
name = 'Test Announcements'
actions.login(email, True)
actions.register(self, name)
response = actions.view_announcements(self)
assert_contains('Example Announcement', response.body)
assert_contains('Welcome to the final class!', response.body)
assert_does_not_contain('No announcements yet.', response.body)
def test_manage_announcements(self):
"""Test course author can manage announcements."""
email = 'test_announcements@google.com'
name = 'Test Announcements'
actions.login(email, True)
actions.register(self, name)
# add new
response = actions.view_announcements(self)
add_form = response.forms['gcb-add-announcement']
response = self.submit(add_form)
assert_equals(response.status_int, 302)
# check added
response = actions.view_announcements(self)
assert_contains('Sample Announcement (Draft)', response.body)
# delete draft
response = actions.view_announcements(self)
delete_form = response.forms['gcb-delete-announcement-1']
response = self.submit(delete_form)
assert_equals(response.status_int, 302)
# check deleted
assert_does_not_contain('Welcome to the final class!', response.body)
def test_announcements_rest(self):
"""Test REST access to announcements."""
email = 'test_announcements_rest@google.com'
name = 'Test Announcements Rest'
actions.login(email, True)
actions.register(self, name)
response = actions.view_announcements(self)
assert_does_not_contain('My Test Title', response.body)
# REST GET existing item
items = AnnouncementEntity.all().fetch(1)
for item in items:
response = self.get('rest/announcements/item?key=%s' % item.key())
json_dict = json.loads(response.body)
assert json_dict['status'] == 200
assert 'message' in json_dict
assert 'payload' in json_dict
payload_dict = json.loads(json_dict['payload'])
assert 'title' in payload_dict
assert 'date' in payload_dict
# REST PUT item
payload_dict['title'] = u'My Test Title Мой заголовок теста'
payload_dict['date'] = '2012/12/31'
payload_dict['is_draft'] = True
request = {}
request['key'] = str(item.key())
request['payload'] = json.dumps(payload_dict)
# Check XSRF is required.
response = self.put('rest/announcements/item?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
assert_contains('"status": 403', response.body)
# Check PUT works.
request['xsrf_token'] = json_dict['xsrf_token']
response = self.put('rest/announcements/item?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
assert_contains('"status": 200', response.body)
# Confirm change is visible on the page.
response = self.get('announcements')
assert_contains(
u'My Test Title Мой заголовок теста (Draft)', response.body)
# REST GET not-existing item
response = self.get('rest/announcements/item?key=not_existent_key')
json_dict = json.loads(response.body)
assert json_dict['status'] == 404
class StudentAspectTest(actions.TestBase):
"""Test the site from the Student perspective."""
def test_view_announcements(self):
"""Test student aspect of announcements."""
email = 'test_announcements@google.com'
name = 'Test Announcements'
actions.login(email)
actions.register(self, name)
# Check no announcements yet.
response = actions.view_announcements(self)
assert_does_not_contain('Example Announcement', response.body)
assert_does_not_contain('Welcome to the final class!', response.body)
assert_contains('No announcements yet.', response.body)
actions.logout()
# Login as admin and add announcements.
actions.login('admin@sample.com', True)
actions.register(self, 'admin')
response = actions.view_announcements(self)
actions.logout()
# Check we can see non-draft announcements.
actions.login(email)
response = actions.view_announcements(self)
assert_contains('Example Announcement', response.body)
assert_does_not_contain('Welcome to the final class!', response.body)
assert_does_not_contain('No announcements yet.', response.body)
# Check no access to access to draft announcements via REST handler.
items = AnnouncementEntity.all().fetch(1000)
for item in items:
response = self.get('rest/announcements/item?key=%s' % item.key())
if item.is_draft:
json_dict = json.loads(response.body)
assert json_dict['status'] == 401
else:
assert_equals(response.status_int, 200)
def test_registration(self):
"""Test student registration."""
email = 'test_registration@example.com'
name1 = 'Test Student'
name2 = 'John Smith'
name3 = u'Pavel Simakov (тест данные)'
actions.login(email)
actions.register(self, name1)
actions.check_profile(self, name1)
actions.change_name(self, name2)
actions.unregister(self)
actions.register(self, name3)
actions.check_profile(self, name3)
def test_registration_closed(self):
"""Test student registration when course is full."""
email = 'test_registration_closed@example.com'
name = 'Test Registration Closed'
# Override course.yaml settings by patching app_context.
get_environ_old = sites.ApplicationContext.get_environ
def get_environ_new(self):
environ = get_environ_old(self)
environ['reg_form']['can_register'] = False
return environ
sites.ApplicationContext.get_environ = get_environ_new
# Try to login and register.
actions.login(email)
try:
actions.register(self, name)
raise actions.ShouldHaveFailedByNow(
'Expected to fail: new registrations should not be allowed '
'when registration is closed.')
except actions.ShouldHaveFailedByNow as e:
raise e
except:
pass
# Clean up app_context.
sites.ApplicationContext.get_environ = get_environ_old
def test_permissions(self):
"""Test student permissions, and which pages they can view."""
email = 'test_permissions@example.com'
name = 'Test Permissions'
actions.login(email)
actions.register(self, name)
actions.Permissions.assert_enrolled(self)
actions.unregister(self)
actions.Permissions.assert_unenrolled(self)
actions.register(self, name)
actions.Permissions.assert_enrolled(self)
def test_login_and_logout(self):
"""Test if login and logout behave as expected."""
email = 'test_login_logout@example.com'
actions.Permissions.assert_logged_out(self)
actions.login(email)
actions.Permissions.assert_unenrolled(self)
actions.logout()
actions.Permissions.assert_logged_out(self)
def test_lesson_activity_navigation(self):
"""Test navigation between lesson/activity pages."""
email = 'test_lesson_activity_navigation@example.com'
name = 'Test Lesson Activity Navigation'
actions.login(email)
actions.register(self, name)
response = self.get('unit?unit=1&lesson=1')
assert_does_not_contain('Previous Page', response.body)
assert_contains('Next Page', response.body)
response = self.get('unit?unit=2&lesson=3')
assert_contains('Previous Page', response.body)
assert_contains('Next Page', response.body)
response = self.get('unit?unit=3&lesson=5')
assert_contains('Previous Page', response.body)
assert_does_not_contain('Next Page', response.body)
assert_contains('End', response.body)
def test_attempt_activity_event(self):
"""Test activity attempt generates event."""
email = 'test_attempt_activity_event@example.com'
name = 'Test Attempt Activity Event'
actions.login(email)
actions.register(self, name)
# Enable event recording.
config.Registry.db_overrides[
lessons.CAN_PERSIST_ACTIVITY_EVENTS.name] = True
# Prepare event.
request = {}
request['source'] = 'test-source'
request['payload'] = json.dumps({'Alice': u'Bob (тест данные)'})
# Check XSRF token is required.
response = self.post('rest/events?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
assert_contains('"status": 403', response.body)
# Check PUT works.
request['xsrf_token'] = XsrfTokenManager.create_xsrf_token(
'event-post')
response = self.post('rest/events?%s' % urllib.urlencode(
{'request': json.dumps(request)}), {})
assert_equals(response.status_int, 200)
assert not response.body
# Check event is properly recorded.
old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(self.namespace)
try:
events = models.EventEntity.all().fetch(1000)
assert 1 == len(events)
assert_contains(
u'Bob (тест данные)', json.loads(events[0].data)['Alice'])
finally:
namespace_manager.set_namespace(old_namespace)
# Clean up.
config.Registry.db_overrides = {}
def test_two_students_dont_see_each_other_pages(self):
"""Test a user can't see another user pages."""
email1 = 'user1@foo.com'
name1 = 'User 1'
email2 = 'user2@foo.com'
name2 = 'User 2'
# Login as one user and view 'unit' and other pages, which are not
# cached.
actions.login(email1)
actions.register(self, name1)
actions.Permissions.assert_enrolled(self)
response = actions.view_unit(self)
assert_contains(email1, response.body)
actions.logout()
# Login as another user and check that 'unit' and other pages show
# the correct new email.
actions.login(email2)
actions.register(self, name2)
actions.Permissions.assert_enrolled(self)
response = actions.view_unit(self)
assert_contains(email2, response.body)
actions.logout()
def test_xsrf_defence(self):
"""Test defense against XSRF attack."""
email = 'test_xsrf_defence@example.com'
name = 'Test Xsrf Defence'
actions.login(email)
actions.register(self, name)
response = self.get('student/home')
response.form.set('name', 'My New Name')
response.form.set('xsrf_token', 'bad token')
response = response.form.submit(expect_errors=True)
assert_equals(response.status_int, 403)
class StaticHandlerTest(actions.TestBase):
"""Check serving of static resources."""
def test_static_files_cache_control(self):
"""Test static/zip handlers use proper Cache-Control headers."""
# Check static handler.
response = self.get('/assets/css/main.css')
assert_equals(response.status_int, 200)
assert_contains('max-age=600', response.headers['Cache-Control'])
assert_contains('public', response.headers['Cache-Control'])
assert_does_not_contain('no-cache', response.headers['Cache-Control'])
# Check zip file handler.
response = self.get(
'/static/inputex-3.1.0/src/inputex/assets/skins/sam/inputex.css')
assert_equals(response.status_int, 200)
assert_contains('max-age=600', response.headers['Cache-Control'])
assert_contains('public', response.headers['Cache-Control'])
assert_does_not_contain('no-cache', response.headers['Cache-Control'])
class AssessmentTest(actions.TestBase):
"""Test for assessments."""
def submit_assessment(self, name, args):
"""Test student taking an assessment."""
response = self.get('assessment?name=%s' % name)
assert_contains(
'<script src="assets/js/assessment-%s.js"></script>' % name,
response.body)
# Extract XSRF token from the page.
match = re.search(r'assessmentXsrfToken = [\']([^\']+)', response.body)
assert match
xsrf_token = match.group(1)
args['xsrf_token'] = xsrf_token
response = self.post('answer', args)
assert_equals(response.status_int, 200)
return response
def test_course_pass(self):
"""Test student passing final exam."""
email = 'test_pass@google.com'
name = 'Test Pass'
post = {'assessment_type': 'postcourse', 'score': '100.00'}
# Register.
actions.login(email)
actions.register(self, name)
# Submit answer.
response = self.submit_assessment('Post', post)
assert_equals(response.status_int, 200)
assert_contains('Your score is 70%', response.body)
assert_contains('you have passed the course', response.body)
# Check that the result shows up on the profile page.
response = actions.check_profile(self, name)
assert_contains('70', response.body)
assert_contains('100', response.body)
def test_assessments(self):
"""Test assessment scores are properly submitted and summarized."""
email = 'test_assessments@google.com'
name = 'Test Assessments'
pre_answers = [{'foo': 'bar'}, {'Alice': u'Bob (тест данные)'}]
pre = {
'assessment_type': 'precourse', 'score': '1.00',
'answers': json.dumps(pre_answers)}
mid = {'assessment_type': 'midcourse', 'score': '2.00'}
post = {'assessment_type': 'postcourse', 'score': '3.00'}
second_mid = {'assessment_type': 'midcourse', 'score': '1.00'}
second_post = {'assessment_type': 'postcourse', 'score': '100000'}
# Register.
actions.login(email)
actions.register(self, name)
old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(self.namespace)
try:
# Check that no scores exist right now.
student = models.Student.get_enrolled_student_by_email(email)
assert len(get_all_scores(student)) == 0 # pylint: disable=C6411
# Submit assessments and check the numbers of scores recorded.
self.submit_assessment('Pre', pre)
student = models.Student.get_enrolled_student_by_email(email)
assert len(get_all_scores(student)) == 1
self.submit_assessment('Mid', mid)
student = models.Student.get_enrolled_student_by_email(email)
assert len(get_all_scores(student)) == 2
self.submit_assessment('Post', post)
student = models.Student.get_enrolled_student_by_email(email)
# Check final score also includes overall_score.
assert len(get_all_scores(student)) == 4
# Check assessment answers.
answers = json.loads(
models.StudentAnswersEntity.get_by_key_name(
student.user_id).data)
assert pre_answers == answers['precourse']
# pylint: disable-msg=g-explicit-bool-comparison
assert [] == answers['midcourse']
assert [] == answers['postcourse']
# pylint: enable-msg=g-explicit-bool-comparison
# Check that scores are recorded properly.
student = models.Student.get_enrolled_student_by_email(email)
assert int(get_score(student, 'precourse')) == 1
assert int(get_score(student, 'midcourse')) == 2
assert int(get_score(student, 'postcourse')) == 3
assert (int(get_score(student, 'overall_score')) ==
int((0.30 * 2) + (0.70 * 3)))
# Try posting a new midcourse exam with a lower score;
# nothing should change.
self.submit_assessment('Mid', second_mid)
student = models.Student.get_enrolled_student_by_email(email)
assert int(get_score(student, 'precourse')) == 1
assert int(get_score(student, 'midcourse')) == 2
assert int(get_score(student, 'postcourse')) == 3
assert (int(get_score(student, 'overall_score')) ==
int((0.30 * 2) + (0.70 * 3)))
# Now try posting a postcourse exam with a higher score and note
# the changes.
self.submit_assessment('Post', second_post)
student = models.Student.get_enrolled_student_by_email(email)
assert int(get_score(student, 'precourse')) == 1
assert int(get_score(student, 'midcourse')) == 2
assert int(get_score(student, 'postcourse')) == 100000
assert (int(get_score(student, 'overall_score')) ==
int((0.30 * 2) + (0.70 * 100000)))
finally:
namespace_manager.set_namespace(old_namespace)
# TODO(psimakov): if mixin method names overlap, we don't run them all; must fix
class CourseUrlRewritingTest(
StudentAspectTest, AssessmentTest, CourseAuthorAspectTest, AdminAspectTest):
"""Run existing tests using rewrite rules for '/courses/pswg' base URL."""
def setUp(self): # pylint: disable-msg=g-bad-name
super(CourseUrlRewritingTest, self).setUp()
self.base = '/courses/pswg'
self.namespace = 'gcb-courses-pswg-tests-ns'
courses = 'course:%s:/:%s' % (self.base, self.namespace)
os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME] = courses
def tearDown(self): # pylint: disable-msg=g-bad-name
del os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME]
super(CourseUrlRewritingTest, self).tearDown()
def canonicalize(self, href, response=None):
"""Canonicalize URL's using either <base> or self.base."""
# Check if already canonicalized.
if href.startswith(
self.base) or utils.ApplicationHandler.is_absolute(href):
pass
else:
# Look for <base> tag in the response to compute the canonical URL.
if response:
return super(CourseUrlRewritingTest, self).canonicalize(
href, response)
# Prepend self.base to compute the canonical URL.
if not href.startswith('/'):
href = '/%s' % href
href = '%s%s' % (self.base, href)
self.audit_url(href)
return href
def remove_dir(dir_name):
"""Delete a directory."""
logging.info('removing folder: %s', dir_name)
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
if os.path.exists(dir_name):
raise Exception('Failed to delete directory: %s' % dir_name)
def clean_dir(dir_name):
"""Clean a directory."""
remove_dir(dir_name)
logging.info('creating folder: %s', dir_name)
os.makedirs(dir_name)
if not os.path.exists(dir_name):
raise Exception('Failed to create directory: %s' % dir_name)
def clone_canonical_course_data(src, dst):
"""Makes a copy of canonical course content."""
clean_dir(dst)
def copytree(name):
shutil.copytree(
os.path.join(src, name),
os.path.join(dst, name))
copytree('assets')
copytree('data')
copytree('views')
shutil.copy(
os.path.join(src, 'course.yaml'),
os.path.join(dst, 'course.yaml'))
# Make all files writable.
for root, unused_dirs, files in os.walk(dst):
for afile in files:
fname = os.path.join(root, afile)
os.chmod(fname, 0o777)
class GeneratedCourse(object):
"""A helper class for a dynamically generated course content."""
@classmethod
def set_data_home(cls, test):
"""All data for this test will be placed here."""
cls.data_home = '/tmp/experimental/coursebuilder/test-data/%s' % (
test.__class__.__name__)
def __init__(self, ns):
self.path = ns
@property
def namespace(self):
return 'ns%s' % self.path
@property
def title(self):
return u'Power title-%s Searching with Google (тест данные)' % self.path
@property
def unit_title(self):
return u'Interpreting unit-title-%s results (тест данные)' % self.path
@property
def lesson_title(self):
return u'Word lesson-title-%s order matters (тест данные)' % self.path
@property
def head(self):
return '<!-- head-%s -->' % self.path
@property
def css(self):
return '<!-- css-%s -->' % self.path
@property
def home(self):
return os.path.join(self.data_home, 'data-%s' % self.path)
@property
def email(self):
return 'walk_the_course_named_%s@google.com' % self.path
@property
def name(self):
return 'Walk The Course Named %s' % self.path
class MultipleCoursesTestBase(actions.TestBase):
"""Configures several courses for running concurrently."""
def modify_file(self, filename, find, replace):
"""Read, modify and write back the file."""
text = open(filename, 'r').read().decode('utf-8')
# Make sure target text is not in the file.
assert not replace in text
text = text.replace(find, replace)
assert replace in text
open(filename, 'w').write(text.encode('utf-8'))
def modify_canonical_course_data(self, course):
"""Modify canonical content by adding unique bits to it."""
self.modify_file(
os.path.join(course.home, 'course.yaml'),
'title: \'Power Searching with Google\'',
'title: \'%s\'' % course.title)
self.modify_file(
os.path.join(course.home, 'data/unit.csv'),
',Interpreting results,',
',%s,' % course.unit_title)
self.modify_file(
os.path.join(course.home, 'data/lesson.csv'),
',Word order matters,',
',%s,' % course.lesson_title)
self.modify_file(
os.path.join(course.home, 'data/lesson.csv'),
',Interpreting results,',
',%s,' % course.unit_title)
self.modify_file(
os.path.join(course.home, 'views/base.html'),
'<head>',
'<head>\n%s' % course.head)
self.modify_file(
os.path.join(course.home, 'assets/css/main.css'),
'html {',
'%s\nhtml {' % course.css)
def prepare_course_data(self, course):
"""Create unique course content for a course."""
clone_canonical_course_data(self.bundle_root, course.home)
self.modify_canonical_course_data(course)
def setUp(self): # pylint: disable-msg=g-bad-name
"""Configure the test."""
super(MultipleCoursesTestBase, self).setUp()
GeneratedCourse.set_data_home(self)
self.course_a = GeneratedCourse('a')
self.course_b = GeneratedCourse('b')
self.course_ru = GeneratedCourse('ru')
# Override BUNDLE_ROOT.
self.bundle_root = appengine_config.BUNDLE_ROOT
appengine_config.BUNDLE_ROOT = GeneratedCourse.data_home
# Prepare course content.
clean_dir(GeneratedCourse.data_home)
self.prepare_course_data(self.course_a)
self.prepare_course_data(self.course_b)
self.prepare_course_data(self.course_ru)
# Setup one course for I18N.
self.modify_file(
os.path.join(self.course_ru.home, 'course.yaml'),
'locale: \'en_US\'',
'locale: \'ru_RU\'')
# Configure courses.
courses = '%s, %s, %s' % (
'course:/courses/a:/data-a:nsa',
'course:/courses/b:/data-b:nsb',
'course:/courses/ru:/data-ru:nsru')
os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME] = courses
def tearDown(self): # pylint: disable-msg=g-bad-name
"""Clean up."""
del os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME]
appengine_config.BUNDLE_ROOT = self.bundle_root
super(MultipleCoursesTestBase, self).tearDown()
def walk_the_course(
self, course, first_time=True, is_admin=False, logout=True):
"""Visit a course as a Student would."""
# Check normal user has no access.
actions.login(course.email, is_admin)
# Test schedule.
if first_time:
response = self.testapp.get('/courses/%s/preview' % course.path)
else:
response = self.testapp.get('/courses/%s/course' % course.path)
assert_contains(course.title, response.body)
assert_contains(course.unit_title, response.body)
assert_contains(course.head, response.body)
# Tests static resource.
response = self.testapp.get(
'/courses/%s/assets/css/main.css' % course.path)
assert_contains(course.css, response.body)
if first_time:
# Test registration.
response = self.get('/courses/%s/register' % course.path)
assert_contains(course.title, response.body)
assert_contains(course.head, response.body)
response.form.set('form01', course.name)
response.form.action = '/courses/%s/register' % course.path
response = self.submit(response.form)
assert_contains(course.title, response.body)
assert_contains(course.head, response.body)
assert_contains(course.title, response.body)
assert_contains(
'//groups.google.com/group/My-Course-Announce', response.body)
assert_contains(
'//groups.google.com/group/My-Course', response.body)
# Check lesson page.
response = self.testapp.get(
'/courses/%s/unit?unit=1&lesson=5' % course.path)
assert_contains(course.title, response.body)
assert_contains(course.lesson_title, response.body)
assert_contains(course.head, response.body)
if logout:
actions.logout()
class MultipleCoursesTest(MultipleCoursesTestBase):
"""Test several courses running concurrently."""
def test_courses_are_isolated(self):
"""Test each course serves its own assets, views and data."""
# Pretend students visit courses.
self.walk_the_course(self.course_a)
self.walk_the_course(self.course_b)
self.walk_the_course(self.course_a, False)
self.walk_the_course(self.course_b, False)
# Check course namespaced data.
self.validate_course_data(self.course_a)
self.validate_course_data(self.course_b)
# Check default namespace.
assert (
namespace_manager.get_namespace() ==
appengine_config.DEFAULT_NAMESPACE_NAME)
assert not models.Student.all().fetch(1000)
def validate_course_data(self, course):
"""Check course data is valid."""
old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(course.namespace)
try:
students = models.Student.all().fetch(1000)
assert len(students) == 1
for student in students:
assert_equals(course.email, student.key().name())
assert_equals(course.name, student.name)
finally:
namespace_manager.set_namespace(old_namespace)
class I18NTest(MultipleCoursesTestBase):
"""Test courses running in different locales and containing I18N content."""
def test_csv_supports_utf8(self):
"""Test UTF-8 content in CSV file is handled correctly."""
title_ru = u'Найди факты быстрее'
csv_file = os.path.join(self.course_ru.home, 'data/unit.csv')
self.modify_file(
csv_file, ',Find facts faster,', ',%s,' % title_ru)
self.modify_file(
os.path.join(self.course_ru.home, 'data/lesson.csv'),
',Find facts faster,', ',%s,' % title_ru)
rows = []
for row in csv.reader(open(csv_file)):
rows.append(row)
assert title_ru == rows[6][3].decode('utf-8')
response = self.get('/courses/%s/preview' % self.course_ru.path)
assert_contains(title_ru, response.body)
# Tests student perspective.
self.walk_the_course(self.course_ru, first_time=True)
self.walk_the_course(self.course_ru, first_time=False)
# Test course author dashboard.
self.walk_the_course(
self.course_ru, first_time=False, is_admin=True, logout=False)
def assert_page_contains(page_name, text_array):
dashboard_url = '/courses/%s/dashboard' % self.course_ru.path
response = self.get('%s?action=%s' % (dashboard_url, page_name))
for text in text_array:
assert_contains(text, response.body)
assert_page_contains('', [
title_ru, self.course_ru.unit_title, self.course_ru.lesson_title])
assert_page_contains(
'assets', [self.course_ru.title, self.course_ru.home])
assert_page_contains(
'settings', [self.course_ru.title, self.course_ru.home])
# Clean up.
actions.logout()
def test_i18n(self):
"""Test course is properly internationalized."""
response = self.get('/courses/%s/preview' % self.course_ru.path)
assert_contains_all_of(
[u'Вход', u'Регистрация', u'Расписание', u'Курс'], response.body)
class VirtualFileSystemTest(
StudentAspectTest, AssessmentTest, CourseAuthorAspectTest,
StaticHandlerTest):
"""Run existing tests using virtual local file system."""
def setUp(self): # pylint: disable-msg=g-bad-name
"""Configure the test."""
super(VirtualFileSystemTest, self).setUp()
GeneratedCourse.set_data_home(self)
# Override BUNDLE_ROOT.
self.bundle_root = appengine_config.BUNDLE_ROOT
appengine_config.BUNDLE_ROOT = GeneratedCourse.data_home
# Prepare course content.
home_folder = os.path.join(GeneratedCourse.data_home, 'data-v')
clone_canonical_course_data(self.bundle_root, home_folder)
# Configure course.
self.namespace = 'nsv'
os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME] = (
'course:/:/data-vfs:%s' % self.namespace)
# Modify app_context filesystem to map /data-v to /data-vfs.
def after_create(unused_cls, instance):
# pylint: disable-msg=protected-access
instance._fs = vfs.LocalReadOnlyFileSystem(
os.path.join(GeneratedCourse.data_home, 'data-vfs'),
home_folder)
sites.ApplicationContext.after_create = after_create
def tearDown(self): # pylint: disable-msg=g-bad-name
"""Clean up."""
del os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME]
appengine_config.BUNDLE_ROOT = self.bundle_root
super(VirtualFileSystemTest, self).tearDown()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""A collection of actions for testing Course Builder pages."""
import logging
import os
import re
import appengine_config
from controllers import sites
from controllers import utils
import main
import suite
from google.appengine.api import namespace_manager
# All URLs referred to from all the pages.
UNIQUE_URLS_FOUND = {}
BASE_HOOK_POINTS = [
'<!-- base.before_head_tag_ends -->',
'<!-- base.after_body_tag_begins -->',
'<!-- base.after_navbar_begins -->',
'<!-- base.before_navbar_ends -->',
'<!-- base.after_top_content_ends -->',
'<!-- base.after_main_content_ends -->',
'<!-- base.before_body_tag_ends -->']
UNIT_HOOK_POINTS = [
'<!-- unit.after_leftnav_begins -->',
'<!-- unit.before_leftnav_ends -->',
'<!-- unit.after_content_begins -->',
'<!-- unit.before_content_ends -->']
PREVIEW_HOOK_POINTS = [
'<!-- preview.after_top_content_ends -->',
'<!-- preview.after_main_content_ends -->']
class ShouldHaveFailedByNow(Exception):
"""Special exception raised when a prior method did not raise."""
pass
class TestBase(suite.BaseTestClass):
"""Contains methods common to all tests."""
def getApp(self): # pylint: disable-msg=g-bad-name
main.debug = True
sites.ApplicationRequestHandler.bind(main.urls)
return main.app
def assert_default_namespace(self):
ns = namespace_manager.get_namespace()
if not ns == appengine_config.DEFAULT_NAMESPACE_NAME:
raise Exception('Expected default namespace, found: %s' % ns)
def setUp(self): # pylint: disable-msg=g-bad-name
super(TestBase, self).setUp()
self.assert_default_namespace()
self.namespace = ''
self.base = '/'
def tearDown(self): # pylint: disable-msg=g-bad-name
self.assert_default_namespace()
super(TestBase, self).tearDown()
def canonicalize(self, href, response=None):
"""Create absolute URL using <base> if defined, '/' otherwise."""
if href.startswith('/') or utils.ApplicationHandler.is_absolute(href):
pass
else:
base = '/'
if response:
match = re.search(
r'<base href=[\'"]?([^\'" >]+)', response.body)
if match and not href.startswith('/'):
base = match.groups()[0]
href = '%s%s' % (base, href)
self.audit_url(href)
return href
def audit_url(self, url):
"""Record for audit purposes the URL we encountered."""
UNIQUE_URLS_FOUND[url] = True
def hook_response(self, response):
"""Modify response.goto() to compute URL using <base>, if defined."""
if response.status_int == 200:
self.check_response_hrefs(response)
gotox = response.goto
def new_goto(href, method='get', **args):
return gotox(self.canonicalize(href), method, **args)
response.goto = new_goto
return response
def check_response_hrefs(self, response):
"""Check response page URLs are properly formatted/canonicalized."""
hrefs = re.findall(r'href=[\'"]?([^\'" >]+)', response.body)
srcs = re.findall(r'src=[\'"]?([^\'" >]+)', response.body)
for url in hrefs + srcs:
# We expect all internal URLs to be relative: 'asset/css/main.css',
# and use <base> tag. All others URLs must be whitelisted below.
if url.startswith('/'):
absolute = url.startswith('//')
root = url == '/'
canonical = url.startswith(self.base)
allowed = url.startswith('/admin') or url.startswith('/_ah/')
if not (absolute or root or canonical or allowed):
raise Exception('Invalid reference \'%s\' in:\n%s' % (
url, response.body))
self.audit_url(self.canonicalize(url, response))
def get(self, url):
url = self.canonicalize(url)
logging.info('HTTP Get: %s', url)
response = self.testapp.get(url)
return self.hook_response(response)
def post(self, url, params):
url = self.canonicalize(url)
logging.info('HTTP Post: %s', url)
response = self.testapp.post(url, params)
return self.hook_response(response)
def put(self, url, params):
url = self.canonicalize(url)
logging.info('HTTP Put: %s', url)
response = self.testapp.put(url, params)
return self.hook_response(response)
def click(self, response, name):
logging.info('Link click: %s', name)
response = response.click(name)
return self.hook_response(response)
def submit(self, form):
logging.info('Form submit: %s', form)
response = form.submit()
return self.hook_response(response)
def assert_equals(expected, actual):
if not expected == actual:
raise Exception('Expected \'%s\', does not match actual \'%s\'.' %
(expected, actual))
def to_unicode(text):
"""Converts text to Unicode if is not Unicode already."""
if not isinstance(text, unicode):
return unicode(text, 'utf-8')
return text
def assert_contains(needle, haystack):
if not to_unicode(needle) in to_unicode(haystack):
raise Exception('Can\'t find \'%s\' in \'%s\'.' % (needle, haystack))
def assert_contains_all_of(needles, haystack):
for needle in needles:
if not to_unicode(needle) in to_unicode(haystack):
raise Exception(
'Can\'t find \'%s\' in \'%s\'.' % (needle, haystack))
def assert_does_not_contain(needle, haystack):
if to_unicode(needle) in to_unicode(haystack):
raise Exception('Found \'%s\' in \'%s\'.' % (needle, haystack))
def assert_contains_none_of(needles, haystack):
for needle in needles:
if to_unicode(needle) in to_unicode(haystack):
raise Exception('Found \'%s\' in \'%s\'.' % (needle, haystack))
def assert_none_fail(browser, callbacks):
"""Invokes all callbacks and expects each one not to fail."""
for callback in callbacks:
callback(browser)
def assert_all_fail(browser, callbacks):
"""Invokes all callbacks and expects each one to fail."""
for callback in callbacks:
try:
callback(browser)
raise ShouldHaveFailedByNow(
'Expected to fail: %s().' % callback.__name__)
except ShouldHaveFailedByNow as e:
raise e
except Exception:
pass
def login(email, is_admin=False):
os.environ['USER_EMAIL'] = email
os.environ['USER_ID'] = 'user1'
is_admin_value = '0'
if is_admin:
is_admin_value = '1'
os.environ['USER_IS_ADMIN'] = is_admin_value
def get_current_user_email():
email = os.environ['USER_EMAIL']
if not email:
raise Exception('No current user.')
return email
def logout():
del os.environ['USER_EMAIL']
del os.environ['USER_ID']
del os.environ['USER_IS_ADMIN']
def register(browser, name):
"""Registers a new student with the given name."""
response = browser.get('/')
assert_equals(response.status_int, 302)
response = view_registration(browser)
response.form.set('form01', name)
response = browser.submit(response.form)
assert_contains('Thank you for registering for', response.body)
check_profile(browser, name)
def check_profile(browser, name):
response = view_my_profile(browser)
assert_contains('Email', response.body)
assert_contains(name, response.body)
assert_contains(get_current_user_email(), response.body)
return response
def view_registration(browser):
response = browser.get('register')
assert_contains('What is your name?', response.body)
assert_contains_all_of([
'<!-- reg_form.additional_registration_fields -->'], response.body)
return response
def view_preview(browser):
"""Views /preview page."""
response = browser.get('preview')
assert_contains(' the stakes are high.', response.body)
assert_contains(
'<li><p class="top_content">Pre-course assessment</p></li>',
response.body)
assert_contains_none_of(UNIT_HOOK_POINTS, response.body)
assert_contains_all_of(PREVIEW_HOOK_POINTS, response.body)
return response
def view_course(browser):
"""Views /course page."""
response = browser.get('course')
assert_contains(' the stakes are high.', response.body)
assert_contains('<a href="assessment?name=Pre">Pre-course assessment</a>',
response.body)
assert_contains(get_current_user_email(), response.body)
assert_contains_all_of(BASE_HOOK_POINTS, response.body)
assert_contains_none_of(UNIT_HOOK_POINTS, response.body)
assert_contains_none_of(PREVIEW_HOOK_POINTS, response.body)
return response
def view_unit(browser):
"""Views /unit page."""
response = browser.get('unit?unit=1&lesson=1')
assert_contains('Unit 1 - Introduction', response.body)
assert_contains('1.3 How search works', response.body)
assert_contains('1.6 Finding text on a web page', response.body)
assert_contains('http://www.youtube.com/embed/1ppwmxidyIE', response.body)
assert_contains(get_current_user_email(), response.body)
assert_contains_all_of(BASE_HOOK_POINTS, response.body)
assert_contains_all_of(UNIT_HOOK_POINTS, response.body)
assert_contains_none_of(PREVIEW_HOOK_POINTS, response.body)
return response
def view_activity(browser):
response = browser.get('activity?unit=1&lesson=2')
assert_contains('<script src="assets/js/activity-1.2.js"></script>',
response.body)
assert_contains(get_current_user_email(), response.body)
return response
def view_announcements(browser):
response = browser.get('announcements')
assert_equals(response.status_int, 200)
assert_contains(get_current_user_email(), response.body)
return response
def view_my_profile(browser):
response = browser.get('student/home')
assert_contains('Date enrolled', response.body)
assert_contains(get_current_user_email(), response.body)
return response
def view_forum(browser):
response = browser.get('forum')
assert_contains('document.getElementById("forum_embed").src =',
response.body)
assert_contains(get_current_user_email(), response.body)
return response
def view_assessments(browser):
for name in ['Pre', 'Mid', 'Fin']:
response = browser.get('assessment?name=%s' % name)
assert 'assets/js/assessment-%s.js' % name in response.body
assert_equals(response.status_int, 200)
assert_contains(get_current_user_email(), response.body)
def change_name(browser, new_name):
response = browser.get('student/home')
response.form.set('name', new_name)
response = browser.submit(response.form)
assert_equals(response.status_int, 302)
check_profile(browser, new_name)
def unregister(browser):
response = browser.get('student/home')
response = browser.click(response, 'Unenroll')
assert_contains('to unenroll from', response.body)
browser.submit(response.form)
class Permissions(object):
"""Defines who can see what."""
@classmethod
def get_logged_out_allowed_pages(cls):
"""Returns all pages that a logged-out user can see."""
return [view_preview]
@classmethod
def get_logged_out_denied_pages(cls):
"""Returns all pages that a logged-out user can't see."""
return [view_announcements, view_forum, view_course, view_assessments,
view_unit, view_activity, view_my_profile, view_registration]
@classmethod
def get_enrolled_student_allowed_pages(cls):
"""Returns all pages that a logged-in, enrolled student can see."""
return [view_announcements, view_forum, view_course,
view_assessments, view_unit, view_activity, view_my_profile]
@classmethod
def get_enrolled_student_denied_pages(cls):
"""Returns all pages that a logged-in, enrolled student can't see."""
return [view_registration, view_preview]
@classmethod
def get_unenrolled_student_allowed_pages(cls):
"""Returns all pages that a logged-in, unenrolled student can see."""
return [view_registration, view_preview]
@classmethod
def get_unenrolled_student_denied_pages(cls):
"""Returns all pages that a logged-in, unenrolled student can't see."""
pages = Permissions.get_enrolled_student_allowed_pages()
for allowed in Permissions.get_unenrolled_student_allowed_pages():
if allowed in pages:
pages.remove(allowed)
return pages
@classmethod
def assert_logged_out(cls, browser):
"""Check that only pages for a logged-out user are visible."""
assert_none_fail(browser, Permissions.get_logged_out_allowed_pages())
assert_all_fail(browser, Permissions.get_logged_out_denied_pages())
@classmethod
def assert_enrolled(cls, browser):
"""Check that only pages for an enrolled student are visible."""
assert_none_fail(
browser, Permissions.get_enrolled_student_allowed_pages())
assert_all_fail(
browser, Permissions.get_enrolled_student_denied_pages())
@classmethod
def assert_unenrolled(cls, browser):
"""Check that only pages for an unenrolled student are visible."""
assert_none_fail(
browser, Permissions.get_unenrolled_student_allowed_pages())
assert_all_fail(
browser, Permissions.get_unenrolled_student_denied_pages())
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Course Builder test suite.
This script runs all functional and units test in the Course Builder project.
Here is how to use the script:
- download WebTest Python package from a URL below and put
the files in a folder of your choice, for example: tmp/webtest:
http://pypi.python.org/packages/source/W/WebTest/WebTest-1.4.2.zip
- update your Python path:
PYTHONPATH=$PYTHONPATH:/tmp/webtest
- run this command from a command line:
python tests/suite.py
- review the output to make sure there are no errors or warnings
Good luck!
"""
__author__ = 'Sean Lip'
import base64
import os
import sys
import unittest
# The following import is needed in order to add third-party libraries.
import appengine_config # pylint: disable-msg=unused-import
import webtest
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import deferred
from google.appengine.ext import testbed
EXPECTED_TEST_COUNT = 67
def empty_environ():
os.environ['AUTH_DOMAIN'] = 'example.com'
os.environ['SERVER_NAME'] = 'localhost'
os.environ['HTTP_HOST'] = 'localhost'
os.environ['SERVER_PORT'] = '8080'
os.environ['USER_EMAIL'] = ''
os.environ['USER_ID'] = ''
class BaseTestClass(unittest.TestCase):
"""Base class for setting up and tearing down test cases."""
def getApp(self): # pylint: disable-msg=g-bad-name
"""Returns the main application to be tested."""
raise Exception('Not implemented.')
def setUp(self): # pylint: disable-msg=g-bad-name
empty_environ()
# setup an app to be tested
self.testapp = webtest.TestApp(self.getApp())
self.testbed = testbed.Testbed()
self.testbed.activate()
# configure datastore policy to emulate instantaneously and globally
# consistent HRD; we also patch dev_appserver in main.py to run under
# the same policy
policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=1)
# declare any relevant App Engine service stubs here
self.testbed.init_user_stub()
self.testbed.init_memcache_stub()
self.testbed.init_datastore_v3_stub(consistency_policy=policy)
self.testbed.init_taskqueue_stub()
self.taskq = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
def tearDown(self): # pylint: disable-msg=g-bad-name
self.testbed.deactivate()
def execute_all_deferred_tasks(self, queue_name='default'):
"""Executes all pending deferred tasks."""
for task in self.taskq.GetTasks(queue_name):
deferred.run(base64.b64decode(task['body']))
def create_test_suite():
"""Loads all test classes from appropriate modules."""
import tests.functional.tests as functional_tests # pylint: disable=C6204
import tests.unit.tests as unit_tests # pylint: disable=C6204
tests = []
for item in [unit_tests, functional_tests]:
tests += unittest.TestLoader().loadTestsFromModule(item)
# Here is how to test just one test case:
# tests = unittest.TestLoader().loadTestsFromTestCase(
# functional_tests.MultipleCoursesTest)
return unittest.TestLoader().suiteClass(tests)
def fix_sys_path():
"""Fix the sys.path to include GAE extra paths."""
import dev_appserver # pylint: disable=C6204
# dev_appserver.fix_sys_path() prepends GAE paths to sys.path and hides
# our classes like 'tests' behind other modules that have 'tests'.
# Here, unlike dev_appserver, we append the path instead of prepending it,
# so that our classes come first.
sys.path += dev_appserver.EXTRA_PATHS[:]
def main():
"""Starts in-process server and runs all test cases in this module."""
fix_sys_path()
result = unittest.TextTestRunner(verbosity=2).run(create_test_suite())
if result.testsRun != EXPECTED_TEST_COUNT:
raise Exception('Expected %s tests to be run, not %s.' %
(EXPECTED_TEST_COUNT, result.testsRun))
if result.errors or result.failures:
raise Exception(
'Functional test suite failed: %s errors, %s failures of '
' %s tests run.' % (
len(result.errors), len(result.failures), result.testsRun))
import tests.functional.actions as actions # pylint: disable-msg=g-import-not-at-top
count = len(actions.UNIQUE_URLS_FOUND.keys())
result.stream.writeln('INFO: Unique URLs found: %s' % count)
result.stream.writeln('INFO: All %s tests PASSED!' % EXPECTED_TEST_COUNT)
if __name__ == '__main__':
appengine_config.gcb_force_default_encoding('ascii')
main()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of converters between db models, Python and JSON dictionaries, etc."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
import json
from google.appengine.ext import db
SIMPLE_TYPES = (int, long, float, bool, dict, basestring, list)
SUPPORTED_TYPES = (db.GeoPt, datetime.date)
JSON_TYPES = ['string', 'date', 'text', 'boolean', 'integer']
JSON_DATE_FORMAT = '%Y/%m/%d'
def dict_to_json(source_dict, unused_schema):
"""Converts Python dictionary into JSON dictionary using schema."""
output = {}
for key, value in source_dict.items():
if value is None or isinstance(value, SIMPLE_TYPES):
output[key] = value
elif isinstance(value, datetime.date):
output[key] = value.strftime(JSON_DATE_FORMAT)
elif isinstance(value, db.GeoPt):
output[key] = {'lat': value.lat, 'lon': value.lon}
else:
raise ValueError(
'Failed to encode key \'%s\' with value \'%s\'.' % (key, value))
return output
def json_to_dict(source_dict, schema):
"""Converts JSON dictionary into Python dictionary using schema."""
output = {}
for key, attr in schema['properties'].items():
# Skip schema elements that don't exist in source.
if not key in source_dict:
continue
attr_type = attr['type']
if not attr_type in JSON_TYPES:
raise ValueError('Unsupported JSON type: %s' % attr_type)
if attr_type == 'date':
output[key] = datetime.datetime.strptime(
source_dict[key], JSON_DATE_FORMAT).date()
else:
output[key] = source_dict[key]
return output
def entity_to_dict(entity):
"""Puts model object attributes into a Python dictionary."""
output = {}
for key, prop in entity.properties().iteritems():
value = getattr(entity, key)
if value is None or isinstance(value, SIMPLE_TYPES) or isinstance(
value, SUPPORTED_TYPES):
output[key] = value
else:
raise ValueError('Failed to encode: %s' % prop)
# explicitly add entity key as a 'string' attribute
output['key'] = str(entity.key())
return output
def dict_to_entity(entity, source_dict):
"""Sets model object attributes from a Python dictionary."""
for key, value in source_dict.items():
if value is None or isinstance(value, SIMPLE_TYPES) or isinstance(
value, SUPPORTED_TYPES):
setattr(entity, key, value)
else:
raise ValueError('Failed to encode: %s' % value)
return entity
def string_to_value(string, value_type):
"""Converts string representation to a value."""
if value_type == str:
if not string:
return ''
else:
return string
elif value_type == bool:
if string == '1' or string == 'True' or string == 1:
return True
else:
return False
elif value_type == int or value_type == long:
if not string:
return 0
else:
return long(string)
else:
raise ValueError('Unknown type: %s' % value_type)
def value_to_string(value, value_type):
"""Converts value to a string representation."""
if value_type == str:
return value
elif value_type == bool:
if value:
return 'True'
else:
return 'False'
elif value_type == int or value_type == long:
return str(value)
else:
raise ValueError('Unknown type: %s' % value_type)
def send_json_response(
handler, status_code, message, payload_dict=None, xsrf_token=None):
"""Formats and sends out a JSON REST response envelope and body."""
response = {}
response['status'] = status_code
response['message'] = message
if payload_dict:
response['payload'] = json.dumps(payload_dict)
if xsrf_token:
response['xsrf_token'] = xsrf_token
handler.response.write(json.dumps(response))
def run_all_unit_tests():
"""Runs all unit tests."""
assert value_to_string(True, bool) == 'True'
assert value_to_string(False, bool) == 'False'
assert value_to_string(None, bool) == 'False'
assert string_to_value('True', bool)
assert string_to_value('1', bool)
assert string_to_value(1, bool)
assert not string_to_value('False', bool)
assert not string_to_value('0', bool)
assert not string_to_value('5', bool)
assert not string_to_value(0, bool)
assert not string_to_value(5, bool)
assert not string_to_value(None, bool)
assert string_to_value('15', int) == 15
assert string_to_value(15, int) == 15
assert string_to_value(None, int) == 0
assert string_to_value('foo', str) == 'foo'
assert string_to_value(None, str) == str('')
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and methods for managing long running jobs."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
from datetime import datetime
import json
import logging
import time
import traceback
import entities
from google.appengine.api import namespace_manager
from google.appengine.ext import db
from google.appengine.ext import deferred
# A job can be in one of these states.
STATUS_CODE_NONE = 0
STATUS_CODE_STARTED = 1
STATUS_CODE_COMPLETED = 2
STATUS_CODE_FAILED = 3
class DurableJob(object):
"""A class that represents a deferred durable job at runtime."""
def __init__(self, app_context):
self._namespace = app_context.get_namespace_name()
self._job_name = 'job-%s-%s' % (
self.__class__.__name__, self._namespace)
def run(self):
"""Override this method to provide actual business logic."""
def main(self):
"""Main method of the deferred task."""
logging.info('Job started: %s', self._job_name)
time_started = time.time()
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(self._namespace)
try:
result = self.run()
DurableJobEntity.complete_job(
self._job_name, json.dumps(result),
long(time.time() - time_started))
logging.info('Job completed: %s', self._job_name)
except Exception as e:
logging.error(traceback.format_exc())
logging.error('Job failed: %s\n%s', self._job_name, e)
DurableJobEntity.fail_job(
self._job_name, traceback.format_exc(),
long(time.time() - time_started))
raise deferred.PermanentTaskFailure(e)
finally:
namespace_manager.set_namespace(old_namespace)
def submit(self):
"""Submits this job for deferred execution."""
DurableJobEntity.create_job(self._job_name)
deferred.defer(self.main)
def load(self):
"""Loads the last known state of this job from the datastore."""
return DurableJobEntity.get_by_name(self._job_name)
class DurableJobEntity(entities.BaseEntity):
"""A class that represents a persistent database entity of durable job."""
updated_on = db.DateTimeProperty(indexed=True)
execution_time_sec = db.IntegerProperty(indexed=False)
status_code = db.IntegerProperty(indexed=False)
output = db.TextProperty(indexed=False)
@classmethod
def get_by_name(cls, name):
return DurableJobEntity.get_by_key_name(name)
@classmethod
def update(cls, name, status_code, output, execution_time_sec):
"""Updates job state in a datastore."""
def mutation():
job = DurableJobEntity.get_by_name(name)
if not job:
logging.error('Job was not started or was deleted: %s', name)
return
job.updated_on = datetime.now()
job.execution_time_sec = execution_time_sec
job.status_code = status_code
job.output = output
job.put()
db.run_in_transaction(mutation)
@classmethod
def create_job(cls, name):
"""Creates new or reset a state of existing job in a datastore."""
def mutation():
job = DurableJobEntity.get_by_name(name)
if not job:
job = DurableJobEntity(key_name=name)
job.updated_on = datetime.now()
job.execution_time_sec = 0
job.status_code = STATUS_CODE_NONE
job.output = None
job.put()
db.run_in_transaction(mutation)
@classmethod
def start_job(cls, name):
return cls.update(name, STATUS_CODE_STARTED, None, 0)
@classmethod
def complete_job(cls, name, output, execution_time_sec):
return cls.update(
name, STATUS_CODE_COMPLETED, output, execution_time_sec)
@classmethod
def fail_job(cls, name, output, execution_time_sec):
return cls.update(name, STATUS_CODE_FAILED, output, execution_time_sec)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and methods for managing persistent entities."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
from counters import PerfCounter
from google.appengine.ext import db
# datastore performance counters
DB_QUERY = PerfCounter(
'gcb-models-db-query',
'A number of times a query()/all() was executed on a datastore.')
DB_GET = PerfCounter(
'gcb-models-db-get',
'A number of times an object was fetched from datastore.')
DB_PUT = PerfCounter(
'gcb-models-db-put',
'A number of times an object was put into datastore.')
DB_DELETE = PerfCounter(
'gcb-models-db-delete',
'A number of times an object was deleted from datastore.')
class BaseEntity(db.Model):
"""A common class to all datastore entities."""
@classmethod
def all(cls, **kwds):
DB_QUERY.inc()
return super(BaseEntity, cls).all(**kwds)
@classmethod
def get(cls, keys):
DB_GET.inc()
return super(BaseEntity, cls).get(keys)
@classmethod
def get_by_key_name(cls, key_names):
DB_GET.inc()
return super(BaseEntity, cls).get_by_key_name(key_names)
def put(self):
DB_PUT.inc()
return super(BaseEntity, self).put()
def delete(self):
DB_DELETE.inc()
super(BaseEntity, self).delete()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manages performance counters of an application and/or its modules."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
class PerfCounter(object):
"""A generic, in-process integer counter."""
def __init__(self, name, doc_string):
self._name = name
self._doc_string = doc_string
self._value = 0
Registry.registered[self.name] = self
def inc(self, increment=1):
"""Increments value by a given increment."""
self._value += increment
@property
def name(self):
return self._name
@property
def doc_string(self):
return self._doc_string
@property
def value(self):
return self._value
class Registry(object):
"""Holds all registered counters."""
registered = {}
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Core data model classes."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import appengine_config
from config import ConfigProperty
from counters import PerfCounter
from entities import BaseEntity
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import db
# The default amount of time to cache the items for in memcache.
DEFAULT_CACHE_TTL_SECS = 60 * 5
# Global memcache controls.
CAN_USE_MEMCACHE = ConfigProperty(
'gcb_can_use_memcache', bool, (
'Whether or not to cache various objects in memcache. For production '
'this value should be on to enable maximum performance. For '
'development this value should be off so you can see your changes to '
'course content instantaneously.'),
appengine_config.PRODUCTION_MODE)
# performance counters
CACHE_PUT = PerfCounter(
'gcb-models-cache-put',
'A number of times an object was put into memcache.')
CACHE_HIT = PerfCounter(
'gcb-models-cache-hit',
'A number of times an object was found in memcache.')
CACHE_MISS = PerfCounter(
'gcb-models-cache-miss',
'A number of times an object was not found in memcache.')
CACHE_DELETE = PerfCounter(
'gcb-models-cache-delete',
'A number of times an object was deleted from memcache.')
class MemcacheManager(object):
"""Class that consolidates all memcache operations."""
@classmethod
def get(cls, key):
"""Gets an item from memcache if memcache is enabled."""
if not CAN_USE_MEMCACHE.value:
return None
value = memcache.get(key)
if value:
CACHE_HIT.inc()
else:
CACHE_MISS.inc()
return value
@classmethod
def set(cls, key, value, ttl=DEFAULT_CACHE_TTL_SECS):
"""Sets an item in memcache if memcache is enabled."""
if CAN_USE_MEMCACHE.value:
CACHE_PUT.inc()
memcache.set(key, value, ttl)
@classmethod
def delete(cls, key):
"""Deletes an item from memcache if memcache is enabled."""
if CAN_USE_MEMCACHE.value:
CACHE_DELETE.inc()
memcache.delete(key)
class Student(BaseEntity):
"""Student profile."""
enrolled_on = db.DateTimeProperty(auto_now_add=True, indexed=True)
user_id = db.StringProperty(indexed=False)
name = db.StringProperty(indexed=False)
is_enrolled = db.BooleanProperty(indexed=False)
# Each of the following is a string representation of a JSON dict.
scores = db.TextProperty(indexed=False)
def put(self):
"""Do the normal put() and also add the object to memcache."""
result = super(Student, self).put()
MemcacheManager.set(self.key().name(), self)
return result
def delete(self):
"""Do the normal delete() and also remove the object from memcache."""
super(Student, self).delete()
MemcacheManager.delete(self.key().name())
@classmethod
def get_by_email(cls, email):
return Student.get_by_key_name(email.encode('utf8'))
@classmethod
def get_enrolled_student_by_email(cls, email):
student = MemcacheManager.get(email)
if not student:
student = Student.get_by_email(email)
MemcacheManager.set(email, student)
if student and student.is_enrolled:
return student
else:
return None
@classmethod
def rename_current(cls, new_name):
"""Gives student a new name."""
user = users.get_current_user()
if not user:
raise Exception('No current user.')
if new_name:
student = Student.get_by_email(user.email())
student.name = new_name
student.put()
@classmethod
def set_enrollment_status_for_current(cls, is_enrolled):
"""Changes student enrollment status."""
user = users.get_current_user()
if not user:
raise Exception('No current user.')
student = Student.get_by_email(user.email())
student.is_enrolled = is_enrolled
student.put()
class EventEntity(BaseEntity):
"""Generic events.
Each event has a 'source' that defines a place in a code where the event was
recorded. Each event has a 'user_id' to represent an actor who triggered
the event. The event 'data' is a JSON object, the format of which is defined
elsewhere and depends on the type of the event.
"""
recorded_on = db.DateTimeProperty(auto_now_add=True, indexed=True)
source = db.StringProperty(indexed=False)
user_id = db.StringProperty(indexed=False)
# Each of the following is a string representation of a JSON dict.
data = db.TextProperty(indexed=False)
@classmethod
def record(cls, source, user, data):
"""Records new event into a datastore."""
event = EventEntity()
event.source = source
event.user_id = user.user_id()
event.data = data
event.put()
class StudentAnswersEntity(BaseEntity):
"""Student answers to the assessments."""
updated_on = db.DateTimeProperty(indexed=True)
# Each of the following is a string representation of a JSON dict.
data = db.TextProperty(indexed=False)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manages mapping of users to roles and roles to privileges."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import config
from google.appengine.api import users
GCB_ADMIN_LIST = config.ConfigProperty(
'gcb_admin_user_emails', str, (
'A list of email addresses for super-admin users. '
'WARNING! Super-admin users have the highest level of access to your '
'Google App Engine instance and to all data about all courses and '
'students within that instance. Be very careful when modifying this '
'property. Syntax: Surround each email address with [ and ]; for '
'example, [test@example.com]. Separate the entries with either a new '
'line or a space. Do not use regular expressions.'),
'', multiline=True)
KEY_COURSE = 'course'
KEY_ADMIN_USER_EMAILS = 'admin_user_emails'
class Roles(object):
"""A class that provides information about user roles."""
@classmethod
def is_direct_super_admin(cls):
"""Checks if current user is a super admin, without delegation."""
return users.is_current_user_admin()
@classmethod
def is_super_admin(cls):
"""Checks if current user is a super admin, possibly via delegation."""
if cls.is_direct_super_admin():
return True
user = users.get_current_user()
if user and '[%s]' % user.email() in GCB_ADMIN_LIST.value:
return True
return False
@classmethod
def is_course_admin(cls, app_context):
"""Checks if a user is a course admin, possibly via delegation."""
if cls.is_super_admin():
return True
if KEY_COURSE in app_context.get_environ():
environ = app_context.get_environ()[KEY_COURSE]
if KEY_ADMIN_USER_EMAILS in environ:
allowed = environ[KEY_ADMIN_USER_EMAILS]
user = users.get_current_user()
if user and '[%s]' % user.email() in allowed:
return True
return False
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and methods for managing Courses."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import logging
import os
from tools import verify
from models import MemcacheManager
class Course(object):
"""Manages a course and all of its components."""
# The course content files may change between deployment. To avoid reading
# old cached values by the new version of the application we add deployment
# version to the key. Now each version of the application can put/get its
# own version of the course.
memcache_key = 'course-%s' % os.environ.get('CURRENT_VERSION_ID')
def __init__(self, handler):
self._app_context = handler.app_context
self._loaded = False
self._units = []
self._lessons = []
self._unit_id_to_lessons = {}
def _reindex(self):
"""Groups all lessons by unit_id."""
for lesson in self._lessons:
key = str(lesson.unit_id)
if not key in self._unit_id_to_lessons:
self._unit_id_to_lessons[key] = []
self._unit_id_to_lessons[key].append(lesson)
def _load_from_memcache(self):
"""Loads course representation from memcache."""
try:
envelope = MemcacheManager.get(self.memcache_key)
if envelope:
self._units = envelope.units
self._lessons = envelope.lessons
self._reindex()
self._loaded = True
except Exception as e: # pylint: disable-msg=broad-except
logging.error(
'Failed to load course \'%s\' from memcache. %s',
self.memcache_key, e)
def _save_to_memcache(self):
"""Saves course representation into memcache."""
envelope = SerializableCourseEnvelope()
envelope.units = self._units
envelope.lessons = self._lessons
MemcacheManager.set(self.memcache_key, envelope)
def _rebuild_from_source(self):
"""Loads course data from persistence storage into this instance."""
self._units, self._lessons = load_csv_course(self._app_context)
self._reindex()
self._loaded = True
def _materialize(self):
"""Loads data from persistence into this instance."""
if not self._loaded:
self._load_from_memcache()
if not self._loaded:
self._rebuild_from_source()
self._save_to_memcache()
# TODO(psimakov): and if loading fails, then what?
def get_units(self):
self._materialize()
return self._units
def get_lessons(self, unit_id):
self._materialize()
return self._unit_id_to_lessons[str(unit_id)]
class SerializableCourseEnvelope(object):
"""A serializable, data-only representation of a Course."""
def __init__(self):
self.units = []
self.lessons = []
class Unit(object):
"""An object to represent a Unit."""
def __init__(self):
self.id = 0
self.type = ''
self.unit_id = ''
self.title = ''
self.release_date = ''
self.now_available = False
class Lesson(object):
"""An object to represent a Lesson."""
def __init__(self):
self.unit_id = 0
self.id = 0
self.title = ''
self.objectives = ''
self.video = ''
self.notes = ''
self.duration = ''
self.activity = ''
self.activity_title = ''
def copy_attributes(source, target, converter):
"""Copies source object attributes into a target using a converter."""
for source_name, value in converter.items():
if value:
target_name = value[0]
target_type = value[1]
setattr(
target, target_name, target_type(getattr(source, source_name)))
def load_csv_course(app_context):
"""Loads course data from the CSV files."""
logging.info('Initializing datastore from CSV files')
unit_file = os.path.join(app_context.get_data_home(), 'unit.csv')
lesson_file = os.path.join(app_context.get_data_home(), 'lesson.csv')
# Load and validate data from CSV files.
units = verify.read_objects_from_csv_stream(
app_context.fs.open(unit_file), verify.UNITS_HEADER, verify.Unit)
lessons = verify.read_objects_from_csv_stream(
app_context.fs.open(lesson_file), verify.LESSONS_HEADER, verify.Lesson)
verifier = verify.Verifier()
verifier.verify_unit_fields(units)
verifier.verify_lesson_fields(lessons)
verifier.verify_unit_lesson_relationships(units, lessons)
assert verifier.errors == 0
assert verifier.warnings == 0
# Load data from CSV files into a datastore.
new_units = []
new_lessons = []
units = verify.read_objects_from_csv_stream(
app_context.fs.open(unit_file), verify.UNITS_HEADER, Unit)
lessons = verify.read_objects_from_csv_stream(
app_context.fs.open(lesson_file), verify.LESSONS_HEADER, Lesson)
for unit in units:
entity = Unit()
copy_attributes(unit, entity, verify.UNIT_CSV_TO_DB_CONVERTER)
new_units.append(entity)
for lesson in lessons:
entity = Lesson()
copy_attributes(lesson, entity, verify.LESSON_CSV_TO_DB_CONVERTER)
new_lessons.append(entity)
return new_units, new_lessons
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manages dynamic properties of an application and/or its modules.
An application must explicitly declare properties and provide a type, doc string
and default value for each. The default property values are overridden by
the new values found in the environment variable with the same name. Those are
further overridden by the values found in the datastore. We also try to do all
of this with performance in mind.
"""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import logging
import os
import time
import appengine_config
import entities
import transforms
from google.appengine.api import namespace_manager
from google.appengine.ext import db
# The default update interval supported.
DEFAULT_UPDATE_INTERVAL = 60
# The longest update interval supported.
MAX_UPDATE_INTERVAL = 60 * 5
# Allowed property types.
TYPE_INT = int
TYPE_STR = str
TYPE_BOOL = bool
ALLOWED_TYPES = frozenset([TYPE_INT, TYPE_STR, TYPE_BOOL])
class ConfigProperty(object):
"""A property with name, type, doc_string and a default value."""
def __init__(
self, name, value_type, doc_string,
default_value=None, multiline=False):
if not value_type in ALLOWED_TYPES:
raise Exception('Bad value type: %s' % value_type)
self._multiline = multiline
self._name = name
self._type = value_type
self._doc_string = doc_string
self._default_value = value_type(default_value)
self._value = None
Registry.registered[name] = self
@property
def multiline(self):
return self._multiline
@property
def name(self):
return self._name
@property
def value_type(self):
return self._type
@property
def doc_string(self):
return self._doc_string
@property
def default_value(self):
return self._default_value
def get_environ_value(self):
"""Tries to get value from the environment variables."""
# Look for a name in lower or upper case.
name = None
if self._name.lower() in os.environ:
name = self._name.lower()
else:
if self._name.upper() in os.environ:
name = self._name.upper()
if name:
try:
return True, transforms.string_to_value(
os.environ[name], self.value_type)
except Exception: # pylint: disable-msg=broad-except
logging.error(
'Property %s failed to cast to type %s; removing.',
self._name, self._type)
del os.environ[name]
return False, None
@property
def value(self):
"""Get the latest value from datastore, environment or use default."""
# Try datastore overrides first.
overrides = Registry.get_overrides()
if overrides and self.name in overrides:
return overrides[self.name]
# Try environment variable overrides second.
has_value, environ_value = self.get_environ_value()
if has_value:
return environ_value
# Use default value last.
return self._default_value
class Registry(object):
"""Holds all registered properties."""
registered = {}
db_overrides = {}
update_interval = DEFAULT_UPDATE_INTERVAL
last_update_time = 0
update_index = 0
@classmethod
def get_overrides(cls, force_update=False):
"""Returns current property overrides, maybe cached."""
# Check if datastore property overrides are enabled at all.
has_value, environ_value = UPDATE_INTERVAL_SEC.get_environ_value()
if (has_value and environ_value == 0) or (
UPDATE_INTERVAL_SEC.default_value == 0):
return
# Check if cached values are still fresh.
now = long(time.time())
age = now - cls.last_update_time
if force_update or age < 0 or age >= cls.update_interval:
try:
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(
appengine_config.DEFAULT_NAMESPACE_NAME)
cls.load_from_db()
finally:
namespace_manager.set_namespace(old_namespace)
except Exception as e: # pylint: disable-msg=broad-except
logging.error(
'Failed to load properties from a database: %s.', str(e))
finally:
# Avoid overload and update timestamp even if we failed.
cls.last_update_time = now
cls.update_index += 1
return cls.db_overrides
@classmethod
def load_from_db(cls):
"""Loads dynamic properties from db."""
logging.info('Reloading properties.')
overrides = {}
for item in ConfigPropertyEntity.all().fetch(1000):
name = item.key().name()
if not name in cls.registered:
logging.error(
'Property is not registered (skipped): %s', name)
continue
target = cls.registered[name]
if target and not item.is_draft:
# Enforce value type.
try:
value = transforms.string_to_value(
item.value, target.value_type)
except Exception: # pylint: disable-msg=broad-except
logging.error(
'Property %s failed to cast to a type %s; removing.',
target.name, target.value_type)
continue
# Don't allow disabling of update interval from a database.
if name == UPDATE_INTERVAL_SEC.name:
if value == 0 or value < 0 or value > MAX_UPDATE_INTERVAL:
logging.error(
'Bad value %s for %s; discarded.', name, value)
continue
else:
cls.update_interval = value
overrides[name] = value
cls.db_overrides = overrides
class ConfigPropertyEntity(entities.BaseEntity):
"""A class that represents a named configuration property."""
value = db.TextProperty(indexed=False)
is_draft = db.BooleanProperty(indexed=False)
def run_all_unit_tests():
"""Runs all unit tests for this modules."""
str_prop = ConfigProperty('gcb-str-prop', str, ('doc for str_prop'), 'foo')
int_prop = ConfigProperty('gcb-int-prop', int, ('doc for int_prop'), 123)
assert str_prop.default_value == 'foo'
assert str_prop.value == 'foo'
assert int_prop.default_value == 123
assert int_prop.value == 123
# Check os.environ override works.
os.environ[str_prop.name] = 'bar'
assert str_prop.value == 'bar'
del os.environ[str_prop.name]
assert str_prop.value == 'foo'
# Check os.environ override with type casting.
os.environ[int_prop.name] = '12345'
assert int_prop.value == 12345
# Check setting of value is disallowed.
try:
str_prop.value = 'foo'
raise Exception()
except AttributeError:
pass
# Check value of bad type is disregarded.
os.environ[int_prop.name] = 'foo bar'
assert int_prop.value == int_prop.default_value
UPDATE_INTERVAL_SEC = ConfigProperty(
'gcb_config_update_interval_sec', int, (
'An update interval (in seconds) for reloading runtime properties '
'from a datastore. Using this editor, you can set this value to an '
'integer between 1 and 300. To completely disable reloading '
'properties from a datastore, you must set the value to 0. However, '
'you can only set the value to 0 by directly modifying the app.yaml '
'file. Maximum value is "%s".' % MAX_UPDATE_INTERVAL),
DEFAULT_UPDATE_INTERVAL)
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions to work with various models."""
__author__ = 'Sean Lip (sll@google.com)'
import json
def get_all_scores(student):
"""Gets all score data for a student.
Args:
student: the student whose scores should be retrieved.
Returns:
a dict where the keys are the assessment/summary name, and the value
is the assessment/summary score (if available).
"""
if not student.scores:
return {}
else:
return json.loads(student.scores)
def dict_get(dict_as_string, my_key):
if not dict_as_string:
return None
else:
return json.loads(dict_as_string).get(my_key)
def set_answer(answers, assessment_name, answer):
"""Stores the answer array for the given student and assessment.
The caller must call answers.put() to commit.
This does not do any type-checking on 'answer'; it just stores whatever
is passed in.
Args:
answers: the StudentAnswers entity in which the answer should be stored.
assessment_name: the name of the assessment.
answer: an array containing the student's answers.
"""
if not answers.data:
score_dict = {}
else:
score_dict = json.loads(answers.data)
score_dict[assessment_name] = answer
answers.data = json.dumps(score_dict)
def get_score(student, assessment_name):
"""Gets a student's score for a particular assessment.
The caller must cast the score appropriately.
Args:
student: the student whose score should be retrieved.
assessment_name: the name of the assessment.
Returns:
The student's score for this assessment, or None if not found.
"""
return dict_get(student.scores, assessment_name)
def set_score(student, assessment_name, score):
"""Stores the score for the given student and assessment.
The caller must call student.put() to commit.
This does not do any type-checking on 'score'; it just stores whatever
is passed in.
Args:
student: the student whose answer should be stored.
assessment_name: the name of the assessment.
score: the student's score.
"""
if not student.scores:
score_dict = {}
else:
score_dict = json.loads(student.scores)
score_dict[assessment_name] = score
student.scores = json.dumps(score_dict)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.