code
stringlengths
1
1.72M
language
stringclasses
1 value
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Twisted daemon helpers, specifically to parse out gFlags from twisted flags, manage pid files and support syslogging. """ import gflags import os import signal import sys import time from twisted.scripts import twistd from twisted.python import log from twisted.python import reflect from twisted.python import runtime from twisted.python import usage from fractus import flags from fractus import log as logging if runtime.platformType == "win32": from twisted.scripts._twistw import ServerOptions else: from twisted.scripts._twistd_unix import ServerOptions FLAGS = flags.FLAGS class TwistdServerOptions(ServerOptions): def parseArgs(self, *args): return class FlagParser(object): # this is a required attribute for gflags syntactic_help = '' def __init__(self, parser): self.parser = parser def Parse(self, s): return self.parser(s) def WrapTwistedOptions(wrapped): class TwistedOptionsToFlags(wrapped): subCommands = None def __init__(self): # NOTE(termie): _data exists because Twisted stuff expects # to be able to set arbitrary things that are # not actual flags self._data = {} self._flagHandlers = {} self._paramHandlers = {} # Absorb the twistd flags into our FLAGS self._absorbFlags() self._absorbParameters() self._absorbHandlers() super(TwistedOptionsToFlags, self).__init__() def _absorbFlags(self): twistd_flags = [] reflect.accumulateClassList(self.__class__, 'optFlags', twistd_flags) for flag in twistd_flags: key = flag[0].replace('-', '_') if hasattr(FLAGS, key): continue flags.DEFINE_boolean(key, None, str(flag[-1])) def _absorbParameters(self): twistd_params = [] reflect.accumulateClassList(self.__class__, 'optParameters', twistd_params) for param in twistd_params: key = param[0].replace('-', '_') if hasattr(FLAGS, key): continue if len(param) > 4: flags.DEFINE(FlagParser(param[4]), key, param[2], str(param[3]), serializer=gflags.ArgumentSerializer()) else: flags.DEFINE_string(key, param[2], str(param[3])) def _absorbHandlers(self): twistd_handlers = {} reflect.addMethodNamesToDict(self.__class__, twistd_handlers, "opt_") # NOTE(termie): Much of the following is derived/copied from # twisted.python.usage with the express purpose of # providing compatibility for name in twistd_handlers.keys(): method = getattr(self, 'opt_' + name) takesArg = not usage.flagFunction(method, name) doc = getattr(method, '__doc__', None) if not doc: doc = 'undocumented' if not takesArg: if name not in FLAGS: flags.DEFINE_boolean(name, None, doc) self._flagHandlers[name] = method else: if name not in FLAGS: flags.DEFINE_string(name, None, doc) self._paramHandlers[name] = method def _doHandlers(self): for flag, handler in self._flagHandlers.iteritems(): if self[flag]: handler() for param, handler in self._paramHandlers.iteritems(): if self[param] is not None: handler(self[param]) def __str__(self): return str(FLAGS) def parseOptions(self, options=None): if options is None: options = sys.argv else: options.insert(0, '') args = FLAGS(options) argv = args[1:] # ignore subcommands try: self.parseArgs(*argv) except TypeError: raise usage.UsageError(_("Wrong number of arguments.")) self.postOptions() return args def parseArgs(self, *args): # TODO(termie): figure out a decent way of dealing with args #return super(TwistedOptionsToFlags, self).parseArgs(*args) def postOptions(self): self._doHandlers() super(TwistedOptionsToFlags, self).postOptions() def __getitem__(self, key): key = key.replace('-', '_') try: return getattr(FLAGS, key) except (AttributeError, KeyError): return self._data[key] def __setitem__(self, key, value): key = key.replace('-', '_') try: return setattr(FLAGS, key, value) except (AttributeError, KeyError): self._data[key] = value def get(self, key, default): key = key.replace('-', '_') try: return getattr(FLAGS, key) except (AttributeError, KeyError): self._data.get(key, default) return TwistedOptionsToFlags def stop(pidfile): """ Stop the daemon """ # Get the pid from the pidfile try: pf = file(pidfile, 'r') pid = int(pf.read().strip()) pf.close() except IOError: pid = None if not pid: message = _("pidfile %s does not exist. Daemon not running?\n") sys.stderr.write(message % pidfile) # Not an error in a restart return # Try killing the daemon process try: while 1: os.kill(pid, signal.SIGTERM) time.sleep(0.1) except OSError, err: err = str(err) if err.find(_("No such process")) > 0: if os.path.exists(pidfile): os.remove(pidfile) else: print str(err) sys.exit(1) def serve(filename): logging.debug(_("Serving %s") % filename) name = os.path.basename(filename) OptionsClass = WrapTwistedOptions(TwistdServerOptions) options = OptionsClass() argv = options.parseOptions() FLAGS.python = filename FLAGS.no_save = True if not FLAGS.pidfile: FLAGS.pidfile = '%s.pid' % name elif FLAGS.pidfile.endswith('twistd.pid'): FLAGS.pidfile = FLAGS.pidfile.replace('twistd.pid', '%s.pid' % name) if not FLAGS.prefix: FLAGS.prefix = name elif FLAGS.prefix.endswith('twisted'): FLAGS.prefix = FLAGS.prefix.replace('twisted', name) action = 'start' if len(argv) > 1: action = argv.pop() if action == 'stop': stop(FLAGS.pidfile) sys.exit() elif action == 'restart': stop(FLAGS.pidfile) elif action == 'start': pass else: print 'usage: %s [options] [start|stop|restart]' % argv[0] sys.exit(1) logging.basicConfig() logging.debug(_("Full set of FLAGS:")) for flag in FLAGS: logging.debug("%s : %s" % (flag, FLAGS.get(flag, None))) logging.audit(_("Starting %s"), name) twistd.runApp(options)
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Wrappers around standard crypto data elements. Includes root and intermediate CAs, SSH key_pairs and x509 certificates. """ import base64 import gettext import hashlib import os import shutil import struct import tempfile import time import utils import M2Crypto gettext.install('fractus', unicode=1) from fractus import context from fractus import db from fractus import flags from fractus import log as logging LOG = logging.getLogger("fractus.crypto") FLAGS = flags.FLAGS flags.DEFINE_string('ca_file', 'cacert.pem', _('Filename of root CA')) flags.DEFINE_string('key_file', os.path.join('private', 'cakey.pem'), _('Filename of private key')) flags.DEFINE_string('crl_file', 'crl.pem', _('Filename of root Certificate Revokation List')) flags.DEFINE_string('keys_path', '$state_path/keys', _('Where we keep our keys')) flags.DEFINE_string('ca_path', '$state_path/CA', _('Where we keep our root CA')) flags.DEFINE_boolean('use_project_ca', False, _('Should we use a CA for each project?')) flags.DEFINE_string('user_cert_subject', '/C=US/ST=California/L=MountainView/O=AnsoLabs/' 'OU=NovaDev/CN=%s-%s-%s', _('Subject for certificate for users, ' '%s for project, user, timestamp')) flags.DEFINE_string('project_cert_subject', '/C=US/ST=California/L=MountainView/O=AnsoLabs/' 'OU=NovaDev/CN=project-ca-%s-%s', _('Subject for certificate for projects, ' '%s for project, timestamp')) flags.DEFINE_string('vpn_cert_subject', '/C=US/ST=California/L=MountainView/O=AnsoLabs/' 'OU=NovaDev/CN=project-vpn-%s-%s', _('Subject for certificate for vpns, ' '%s for project, timestamp')) def ca_folder(project_id=None): if FLAGS.use_project_ca and project_id: return os.path.join(FLAGS.ca_path, 'projects', project_id) return FLAGS.ca_path def ca_path(project_id=None): return os.path.join(ca_folder(project_id), FLAGS.ca_file) def key_path(project_id=None): return os.path.join(ca_folder(project_id), FLAGS.key_file) def fetch_ca(project_id=None, chain=True): if not FLAGS.use_project_ca: project_id = None buffer = "" if project_id: with open(ca_path(project_id), "r") as cafile: buffer += cafile.read() if not chain: return buffer with open(ca_path(None), "r") as cafile: buffer += cafile.read() return buffer def generate_key_pair(bits=1024): # what is the magic 65537? tmpdir = tempfile.mkdtemp() keyfile = os.path.join(tmpdir, 'temp') utils.execute('ssh-keygen -q -b %d -N "" -f %s' % (bits, keyfile)) (out, err) = utils.execute('ssh-keygen -q -l -f %s.pub' % (keyfile)) fingerprint = out.split(' ')[1] private_key = open(keyfile).read() public_key = open(keyfile + '.pub').read() shutil.rmtree(tmpdir) # code below returns public key in pem format # key = M2Crypto.RSA.gen_key(bits, 65537, callback=lambda: None) # private_key = key.as_pem(cipher=None) # bio = M2Crypto.BIO.MemoryBuffer() # key.save_pub_key_bio(bio) # public_key = bio.read() # public_key, err = execute('ssh-keygen -y -f /dev/stdin', private_key) return (private_key, public_key, fingerprint) def ssl_pub_to_ssh_pub(ssl_public_key, name='root', suffix='nova'): buf = M2Crypto.BIO.MemoryBuffer(ssl_public_key) rsa_key = M2Crypto.RSA.load_pub_key_bio(buf) e, n = rsa_key.pub() key_type = 'ssh-rsa' key_data = struct.pack('>I', len(key_type)) key_data += key_type key_data += '%s%s' % (e, n) b64_blob = base64.b64encode(key_data) return '%s %s %s@%s\n' % (key_type, b64_blob, name, suffix) def revoke_cert(project_id, file_name): """Revoke a cert by file name""" start = os.getcwd() os.chdir(ca_folder(project_id)) # NOTE(vish): potential race condition here utils.execute("openssl ca -config ./openssl.cnf -revoke '%s'" % file_name) utils.execute("openssl ca -gencrl -config ./openssl.cnf -out '%s'" % FLAGS.crl_file) os.chdir(start) def revoke_certs_by_user(user_id): """Revoke all user certs""" admin = context.get_admin_context() for cert in db.certificate_get_all_by_user(admin, user_id): revoke_cert(cert['project_id'], cert['file_name']) def revoke_certs_by_project(project_id): """Revoke all project certs""" # NOTE(vish): This is somewhat useless because we can just shut down # the vpn. admin = context.get_admin_context() for cert in db.certificate_get_all_by_project(admin, project_id): revoke_cert(cert['project_id'], cert['file_name']) def revoke_certs_by_user_and_project(user_id, project_id): """Revoke certs for user in project""" admin = context.get_admin_context() for cert in db.certificate_get_all_by_user(admin, user_id, project_id): revoke_cert(cert['project_id'], cert['file_name']) def _project_cert_subject(project_id): """Helper to generate user cert subject""" return FLAGS.project_cert_subject % (project_id, utils.isotime()) def _vpn_cert_subject(project_id): """Helper to generate user cert subject""" return FLAGS.vpn_cert_subject % (project_id, utils.isotime()) def _user_cert_subject(user_id, project_id): """Helper to generate user cert subject""" return FLAGS.user_cert_subject % (project_id, user_id, utils.isotime()) def generate_x509_cert(user_id, project_id, bits=1024): """Generate and sign a cert for user in project""" subject = _user_cert_subject(user_id, project_id) tmpdir = tempfile.mkdtemp() keyfile = os.path.abspath(os.path.join(tmpdir, 'temp.key')) csrfile = os.path.join(tmpdir, 'temp.csr') utils.execute("openssl genrsa -out %s %s" % (keyfile, bits)) utils.execute("openssl req -new -key %s -out %s -batch -subj %s" % (keyfile, csrfile, subject)) private_key = open(keyfile).read() csr = open(csrfile).read() shutil.rmtree(tmpdir) (serial, signed_csr) = sign_csr(csr, project_id) fname = os.path.join(ca_folder(project_id), "newcerts/%s.pem" % serial) cert = {'user_id': user_id, 'project_id': project_id, 'file_name': fname} db.certificate_create(context.get_admin_context(), cert) return (private_key, signed_csr) def _ensure_project_folder(project_id): if not os.path.exists(ca_path(project_id)): start = os.getcwd() os.chdir(ca_folder()) utils.execute("sh geninter.sh %s %s" % (project_id, _project_cert_subject(project_id))) os.chdir(start) def generate_vpn_files(project_id): project_folder = ca_folder(project_id) csr_fn = os.path.join(project_folder, "server.csr") crt_fn = os.path.join(project_folder, "server.crt") if os.path.exists(crt_fn): return _ensure_project_folder(project_id) start = os.getcwd() os.chdir(ca_folder()) # TODO(vish): the shell scripts could all be done in python utils.execute("sh genvpn.sh %s %s" % (project_id, _vpn_cert_subject(project_id))) with open(csr_fn, "r") as csrfile: csr_text = csrfile.read() (serial, signed_csr) = sign_csr(csr_text, project_id) with open(crt_fn, "w") as crtfile: crtfile.write(signed_csr) os.chdir(start) def sign_csr(csr_text, project_id=None): if not FLAGS.use_project_ca: project_id = None if not project_id: return _sign_csr(csr_text, ca_folder()) _ensure_project_folder(project_id) project_folder = ca_folder(project_id) return _sign_csr(csr_text, ca_folder(project_id)) def _sign_csr(csr_text, ca_folder): tmpfolder = tempfile.mkdtemp() inbound = os.path.join(tmpfolder, "inbound.csr") outbound = os.path.join(tmpfolder, "outbound.csr") csrfile = open(inbound, "w") csrfile.write(csr_text) csrfile.close() LOG.debug(_("Flags path: %s"), ca_folder) start = os.getcwd() # Change working dir to CA os.chdir(ca_folder) utils.execute("openssl ca -batch -out %s -config " "./openssl.cnf -infiles %s" % (outbound, inbound)) out, _err = utils.execute("openssl x509 -in %s -serial -noout" % outbound) serial = out.rpartition("=")[2] os.chdir(start) with open(outbound, "r") as crtfile: return (serial, crtfile.read()) def mkreq(bits, subject="foo", ca=0): pk = M2Crypto.EVP.PKey() req = M2Crypto.X509.Request() rsa = M2Crypto.RSA.gen_key(bits, 65537, callback=lambda: None) pk.assign_rsa(rsa) rsa = None # should not be freed here req.set_pubkey(pk) req.set_subject(subject) req.sign(pk, 'sha512') assert req.verify(pk) pk2 = req.get_pubkey() assert req.verify(pk2) return req, pk def mkcacert(subject='nova', years=1): req, pk = mkreq(2048, subject, ca=1) pkey = req.get_pubkey() sub = req.get_subject() cert = M2Crypto.X509.X509() cert.set_serial_number(1) cert.set_version(2) # FIXME subject is not set in mkreq yet cert.set_subject(sub) t = long(time.time()) + time.timezone now = M2Crypto.ASN1.ASN1_UTCTIME() now.set_time(t) nowPlusYear = M2Crypto.ASN1.ASN1_UTCTIME() nowPlusYear.set_time(t + (years * 60 * 60 * 24 * 365)) cert.set_not_before(now) cert.set_not_after(nowPlusYear) issuer = M2Crypto.X509.X509_Name() issuer.C = "US" issuer.CN = subject cert.set_issuer(issuer) cert.set_pubkey(pkey) ext = M2Crypto.X509.new_extension('basicConstraints', 'CA:TRUE') cert.add_ext(ext) cert.sign(pk, 'sha512') # print 'cert', dir(cert) print cert.as_pem() print pk.get_rsa().as_pem() return cert, pk, pkey # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # http://code.google.com/p/boto def compute_md5(fp): """ :type fp: file :param fp: File pointer to the file to MD5 hash. The file pointer will be reset to the beginning of the file before the method returns. :rtype: tuple :return: the hex digest version of the MD5 hash """ m = hashlib.md5() fp.seek(0) s = fp.read(8192) while s: m.update(s) s = fp.read(8192) hex_md5 = m.hexdigest() # size = fp.tell() fp.seek(0) return hex_md5
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A fake (in-memory) hypervisor+api. Allows nova testing w/o a hypervisor. This module also documents the semantics of real hypervisor connections. """ from fractus import exception from nova.compute import power_state def get_os(_): # The read_only parameter is ignored. return FakeConnection.instance() class FakeConnection(object): """ The interface to this class talks in terms of 'instances' (Amazon EC2 and internal Nova terminology), by which we mean 'running virtual machine' (XenAPI terminology) or domain (Xen or libvirt terminology). An instance has an ID, which is the identifier chosen by Nova to represent the instance further up the stack. This is unfortunately also called a 'name' elsewhere. As far as this layer is concerned, 'instance ID' and 'instance name' are synonyms. Note that the instance ID or name is not human-readable or customer-controlled -- it's an internal ID chosen by Nova. At the nova.virt layer, instances do not have human-readable names at all -- such things are only known higher up the stack. Most virtualization platforms will also have their own identity schemes, to uniquely identify a VM or domain. These IDs must stay internal to the platform-specific layer, and never escape the connection interface. The platform-specific layer is responsible for keeping track of which instance ID maps to which platform-specific ID, and vice versa. In contrast, the list_disks and list_interfaces calls may return platform-specific IDs. These identify a specific virtual disk or specific virtual network interface, and these IDs are opaque to the rest of Nova. Some methods here take an instance of nova.compute.service.Instance. This is the datastructure used by nova.compute to store details regarding an instance, and pass them into this layer. This layer is responsible for translating that generic datastructure into terms that are specific to the virtualization platform. """ def __init__(self): self.instances = {} @classmethod def instance(cls): if not hasattr(cls, '_instance'): cls._instance = cls() return cls._instance def init_host(self, host): """ Initialize anything that is necessary for the driver to function, including catching up with currently running VM's on the given host. """ return def list_instances(self): """ Return the names of all the instances known to the virtualization layer, as a list. """ return self.instances.keys() def spawn(self, instance): """ Create a new instance/VM/domain on the virtualization platform. The given parameter is an instance of nova.compute.service.Instance. This function should use the data there to guide the creation of the new instance. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. Once this successfully completes, the instance should be running (power_state.RUNNING). If this fails, any partial instance should be completely cleaned up, and the virtualization platform should be in the state that it was before this call began. """ fake_instance = FakeInstance() self.instances[instance.name] = fake_instance fake_instance._state = power_state.RUNNING def snapshot(self, instance, name): """ Snapshots the specified instance. The given parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The second parameter is the name of the snapshot. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. """ pass def reboot(self, instance): """ Reboot the specified instance. The given parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. """ pass def set_admin_password(self, instance, new_pass): """ Set the root password on the specified instance. The first parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The second parameter is the value of the new password. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. """ pass def inject_file(self, instance, b64_path, b64_contents): """ Writes a file on the specified instance. The first parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The second parameter is the base64-encoded path to which the file is to be written on the instance; the third is the contents of the file, also base64-encoded. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. """ pass def rescue(self, instance): """ Rescue the specified instance. """ pass def unrescue(self, instance): """ Unrescue the specified instance. """ pass def pause(self, instance, callback): """ Pause the specified instance. """ pass def unpause(self, instance, callback): """ Unpause the specified instance. """ pass def suspend(self, instance, callback): """ suspend the specified instance """ pass def resume(self, instance, callback): """ resume the specified instance """ pass def destroy(self, instance): """ Destroy (shutdown and delete) the specified instance. The given parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The work will be done asynchronously. This function returns a task that allows the caller to detect when it is complete. """ del self.instances[instance.name] def attach_volume(self, instance_name, device_path, mountpoint): """Attach the disk at device_path to the instance at mountpoint""" return True def detach_volume(self, instance_name, mountpoint): """Detach the disk attached to the instance at mountpoint""" return True def get_info(self, instance_name): """ Get a block of information about the given instance. This is returned as a dictionary containing 'state': The power_state of the instance, 'max_mem': The maximum memory for the instance, in KiB, 'mem': The current memory the instance has, in KiB, 'num_cpu': The current number of virtual CPUs the instance has, 'cpu_time': The total CPU time used by the instance, in nanoseconds. This method should raise exception.NotFound if the hypervisor has no knowledge of the instance """ if instance_name not in self.instances: raise exception.NotFound(_("Instance %s Not Found") % instance_name) i = self.instances[instance_name] return {'state': i._state, 'max_mem': 0, 'mem': 0, 'num_cpu': 2, 'cpu_time': 0} def get_diagnostics(self, instance_name): pass def list_disks(self, instance_name): """ Return the IDs of all the virtual disks attached to the specified instance, as a list. These IDs are opaque to the caller (they are only useful for giving back to this layer as a parameter to disk_stats). These IDs only need to be unique for a given instance. Note that this function takes an instance ID, not a compute.service.Instance, so that it can be called by compute.monitor. """ return ['A_DISK'] def list_interfaces(self, instance_name): """ Return the IDs of all the virtual network interfaces attached to the specified instance, as a list. These IDs are opaque to the caller (they are only useful for giving back to this layer as a parameter to interface_stats). These IDs only need to be unique for a given instance. Note that this function takes an instance ID, not a compute.service.Instance, so that it can be called by compute.monitor. """ return ['A_VIF'] def block_stats(self, instance_name, disk_id): """ Return performance counters associated with the given disk_id on the given instance_name. These are returned as [rd_req, rd_bytes, wr_req, wr_bytes, errs], where rd indicates read, wr indicates write, req is the total number of I/O requests made, bytes is the total number of bytes transferred, and errs is the number of requests held up due to a full pipeline. All counters are long integers. This method is optional. On some platforms (e.g. XenAPI) performance statistics can be retrieved directly in aggregate form, without Nova having to do the aggregation. On those platforms, this method is unused. Note that this function takes an instance ID, not a compute.service.Instance, so that it can be called by compute.monitor. """ return [0L, 0L, 0L, 0L, null] def interface_stats(self, instance_name, iface_id): """ Return performance counters associated with the given iface_id on the given instance_id. These are returned as [rx_bytes, rx_packets, rx_errs, rx_drop, tx_bytes, tx_packets, tx_errs, tx_drop], where rx indicates receive, tx indicates transmit, bytes and packets indicate the total number of bytes or packets transferred, and errs and dropped is the total number of packets failed / dropped. All counters are long integers. This method is optional. On some platforms (e.g. XenAPI) performance statistics can be retrieved directly in aggregate form, without Nova having to do the aggregation. On those platforms, this method is unused. Note that this function takes an instance ID, not a compute.service.Instance, so that it can be called by compute.monitor. """ return [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L] def get_console_output(self, instance): return 'FAKE CONSOLE OUTPUT' def get_ajax_console(self, instance): return 'http://fakeajaxconsole.com/?token=FAKETOKEN' def get_console_pool_info(self, console_type): return {'address': '127.0.0.1', 'username': 'fakeuser', 'password': 'fakepassword'} def refresh_security_group_rules(self, security_group_id): """This method is called after a change to security groups. All security groups and their associated rules live in the datastore, and calling this method should apply the updated rules to instances running the specified security group. An error should be raised if the operation cannot complete. """ return True def refresh_security_group_members(self, security_group_id): """This method is called when a security group is added to an instance. This message is sent to the virtualization drivers on hosts that are running an instance that belongs to a security group that has a rule that references the security group identified by `security_group_id`. It is the responsiblity of this method to make sure any rules that authorize traffic flow with members of the security group are updated and any new members can communicate, and any removed members cannot. Scenario: * we are running on host 'H0' and we have an instance 'i-0'. * instance 'i-0' is a member of security group 'speaks-b' * group 'speaks-b' has an ingress rule that authorizes group 'b' * another host 'H1' runs an instance 'i-1' * instance 'i-1' is a member of security group 'b' When 'i-1' launches or terminates we will recieve the message to update members of group 'b', at which time we will make any changes needed to the rules for instance 'i-0' to allow or deny traffic coming from 'i-1', depending on if it is being added or removed from the group. In this scenario, 'i-1' could just as easily have been running on our host 'H0' and this method would still have been called. The point was that this method isn't called on the host where instances of that group are running (as is the case with :method:`refresh_security_group_rules`) but is called where references are made to authorizing those instances. An error should be raised if the operation cannot complete. """ return True class FakeInstance(object): def __init__(self): self._state = power_state.NOSTATE
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Abstraction of the underlying os API.""" import sys from fractus import flags from fractus import log as logging from fractus.os import fake from fractus.os import linux_os from fractus.os import windows_os LOG = logging.getLogger("fractus.os.os") FLAGS = flags.FLAGS def get_os(read_only=False): """ Returns an object representing the os of the node. This could be :mod:`fractus.os.fake.FakeConnection` in test mode, a os to linux via :mod:`linux_os`, or a connection to windows via :mod:`windows_os`. Any object returned here must conform to the interface documented by :mod:`FakeConnection`. **Related flags** :connection_type: A string literal that falls through a if/elif structure to determine what os mechanism to use. Values may be * fake * linux * windows """ t = FLAGS.connection_type if t == 'fake': conn = fake.get_connection(read_only) elif t == 'linux': conn = libvirt_conn.get_connection(read_only) elif t == 'windows': conn = xenapi_conn.get_connection(read_only) else: raise Exception('Unknown os type "%s"' % t) if conn is None: LOG.error(_('Failed to get os driver')) sys.exit(1) return conn
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License.
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Managers are responsible for a certain aspect of the sytem. It is a logical grouping of code relating to a portion of the system. In general other components should be using the manager to make changes to the components that it is responsible for. For example, other components that need to deal with volumes in some way, should do so by calling methods on the VolumeManager instead of directly changing fields in the database. This allows us to keep all of the code relating to volumes in the same place. We have adopted a basic strategy of Smart managers and dumb data, which means rather than attaching methods to data objects, components should call manager methods that act on the data. Methods on managers that can be executed locally should be called directly. If a particular method must execute on a remote host, this should be done via rpc to the service that wraps the manager Managers should be responsible for most of the db access, and non-implementation specific data. Anything implementation specific that can't be generalized should be done by the Driver. In general, we prefer to have one manager with multiple drivers for different implementations, but sometimes it makes sense to have multiple managers. You can think of it this way: Abstract different overall strategies at the manager level(FlatNetwork vs VlanNetwork), and different implementations at the driver level(LinuxNetDriver vs CiscoNetDriver). Managers will often provide methods for initial setup of a host or periodic tasksto a wrapping service. This module provides Manager, a base class for managers. """ from fractus import flags from fractus.db import base FLAGS = flags.FLAGS class Manager(base.Base): def __init__(self, host=None, db_driver=None): if not host: host = FLAGS.host self.host = host super(Manager, self).__init__(db_driver) def periodic_tasks(self, context=None): """Tasks to be run at a periodic interval""" pass def init_host(self): """Do any initialization that needs to be run if this is a standalone service. Child classes should override this method.""" pass
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Package-level global flags are defined here, the rest are defined where they're used. """ import getopt import os import socket import string import sys import gflags class FlagValues(gflags.FlagValues): """Extension of gflags.FlagValues that allows undefined and runtime flags. Unknown flags will be ignored when parsing the command line, but the command line will be kept so that it can be replayed if new flags are defined after the initial parsing. """ def __init__(self, extra_context=None): gflags.FlagValues.__init__(self) self.__dict__['__dirty'] = [] self.__dict__['__was_already_parsed'] = False self.__dict__['__stored_argv'] = [] self.__dict__['__extra_context'] = extra_context def __call__(self, argv): # We're doing some hacky stuff here so that we don't have to copy # out all the code of the original verbatim and then tweak a few lines. # We're hijacking the output of getopt so we can still return the # leftover args at the end sneaky_unparsed_args = {"value": None} original_argv = list(argv) if self.IsGnuGetOpt(): orig_getopt = getattr(getopt, 'gnu_getopt') orig_name = 'gnu_getopt' else: orig_getopt = getattr(getopt, 'getopt') orig_name = 'getopt' def _sneaky(*args, **kw): optlist, unparsed_args = orig_getopt(*args, **kw) sneaky_unparsed_args['value'] = unparsed_args return optlist, unparsed_args try: setattr(getopt, orig_name, _sneaky) args = gflags.FlagValues.__call__(self, argv) except gflags.UnrecognizedFlagError: # Undefined args were found, for now we don't care so just # act like everything went well # (these three lines are copied pretty much verbatim from the end # of the __call__ function we are wrapping) unparsed_args = sneaky_unparsed_args['value'] if unparsed_args: if self.IsGnuGetOpt(): args = argv[:1] + unparsed_args else: args = argv[:1] + original_argv[-len(unparsed_args):] else: args = argv[:1] finally: setattr(getopt, orig_name, orig_getopt) # Store the arguments for later, we'll need them for new flags # added at runtime self.__dict__['__stored_argv'] = original_argv self.__dict__['__was_already_parsed'] = True self.ClearDirty() return args def Reset(self): gflags.FlagValues.Reset(self) self.__dict__['__dirty'] = [] self.__dict__['__was_already_parsed'] = False self.__dict__['__stored_argv'] = [] def SetDirty(self, name): """Mark a flag as dirty so that accessing it will case a reparse.""" self.__dict__['__dirty'].append(name) def IsDirty(self, name): return name in self.__dict__['__dirty'] def ClearDirty(self): self.__dict__['__is_dirty'] = [] def WasAlreadyParsed(self): return self.__dict__['__was_already_parsed'] def ParseNewFlags(self): if '__stored_argv' not in self.__dict__: return new_flags = FlagValues(self) for k in self.__dict__['__dirty']: new_flags[k] = gflags.FlagValues.__getitem__(self, k) new_flags(self.__dict__['__stored_argv']) for k in self.__dict__['__dirty']: setattr(self, k, getattr(new_flags, k)) self.ClearDirty() def __setitem__(self, name, flag): gflags.FlagValues.__setitem__(self, name, flag) if self.WasAlreadyParsed(): self.SetDirty(name) def __getitem__(self, name): if self.IsDirty(name): self.ParseNewFlags() return gflags.FlagValues.__getitem__(self, name) def __getattr__(self, name): if self.IsDirty(name): self.ParseNewFlags() val = gflags.FlagValues.__getattr__(self, name) if type(val) is str: tmpl = string.Template(val) context = [self, self.__dict__['__extra_context']] return tmpl.substitute(StrWrapper(context)) return val class StrWrapper(object): """Wrapper around FlagValues objects Wraps FlagValues objects for string.Template so that we're sure to return strings.""" def __init__(self, context_objs): self.context_objs = context_objs def __getitem__(self, name): for context in self.context_objs: val = getattr(context, name, False) if val: return str(val) raise KeyError(name) FLAGS = FlagValues() gflags.FLAGS = FLAGS gflags.DEFINE_flag(gflags.HelpFlag(), FLAGS) def _wrapper(func): def _wrapped(*args, **kw): kw.setdefault('flag_values', FLAGS) func(*args, **kw) _wrapped.func_name = func.func_name return _wrapped DEFINE = _wrapper(gflags.DEFINE) DEFINE_string = _wrapper(gflags.DEFINE_string) DEFINE_integer = _wrapper(gflags.DEFINE_integer) DEFINE_bool = _wrapper(gflags.DEFINE_bool) DEFINE_boolean = _wrapper(gflags.DEFINE_boolean) DEFINE_float = _wrapper(gflags.DEFINE_float) DEFINE_enum = _wrapper(gflags.DEFINE_enum) DEFINE_list = _wrapper(gflags.DEFINE_list) DEFINE_spaceseplist = _wrapper(gflags.DEFINE_spaceseplist) DEFINE_multistring = _wrapper(gflags.DEFINE_multistring) DEFINE_multi_int = _wrapper(gflags.DEFINE_multi_int) DEFINE_flag = _wrapper(gflags.DEFINE_flag) HelpFlag = gflags.HelpFlag HelpshortFlag = gflags.HelpshortFlag HelpXMLFlag = gflags.HelpXMLFlag def DECLARE(name, module_string, flag_values=FLAGS): if module_string not in sys.modules: __import__(module_string, globals(), locals()) if name not in flag_values: raise gflags.UnrecognizedFlag( "%s not defined by %s" % (name, module_string)) def _get_my_ip(): """Returns the actual ip of the local machine.""" try: csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) csock.connect(('8.8.8.8', 80)) (addr, port) = csock.getsockname() csock.close() return addr except socket.error as ex: return "127.0.0.1" # __GLOBAL FLAGS ONLY__ # Define any app-specific flags in their own files, docs at: # http://code.google.com/p/python-gflags/source/browse/trunk/gflags.py#a9 DEFINE_string('my_ip', _get_my_ip(), 'host ip address') DEFINE_list('region_list', [], 'list of region=fqdn pairs separated by commas') DEFINE_string('connection_type', 'libvirt', 'libvirt, xenapi or fake') DEFINE_string('aws_access_key_id', 'admin', 'AWS Access ID') DEFINE_string('aws_secret_access_key', 'admin', 'AWS Access Key') DEFINE_integer('glance_port', 9292, 'glance port') DEFINE_string('glance_host', '$my_ip', 'glance host') DEFINE_integer('s3_port', 3333, 's3 port') DEFINE_string('s3_host', '$my_ip', 's3 host (for infrastructure)') DEFINE_string('s3_dmz', '$my_ip', 's3 dmz ip (for instances)') DEFINE_string('compute_topic', 'compute', 'the topic compute nodes listen on') DEFINE_string('console_topic', 'console', 'the topic console proxy nodes listen on') DEFINE_string('scheduler_topic', 'scheduler', 'the topic scheduler nodes listen on') DEFINE_string('volume_topic', 'volume', 'the topic volume nodes listen on') DEFINE_string('network_topic', 'network', 'the topic network nodes listen on') DEFINE_string('ajax_console_proxy_topic', 'ajax_proxy', 'the topic ajax proxy nodes listen on') DEFINE_string('ajax_console_proxy_url', 'http://127.0.0.1:8000', 'location of ajax console proxy, \ in the form "http://127.0.0.1:8000"') DEFINE_string('ajax_console_proxy_port', 8000, 'port that ajax_console_proxy binds') DEFINE_bool('verbose', False, 'show debug output') DEFINE_boolean('fake_rabbit', False, 'use a fake rabbit') DEFINE_bool('fake_network', False, 'should we use fake network devices and addresses') DEFINE_string('rabbit_host', 'localhost', 'rabbit host') DEFINE_integer('rabbit_port', 5672, 'rabbit port') DEFINE_string('rabbit_userid', 'guest', 'rabbit userid') DEFINE_string('rabbit_password', 'guest', 'rabbit password') DEFINE_string('rabbit_virtual_host', '/', 'rabbit virtual host') DEFINE_integer('rabbit_retry_interval', 10, 'rabbit connection retry interval') DEFINE_integer('rabbit_max_retries', 12, 'rabbit connection attempts') DEFINE_string('control_exchange', 'nova', 'the main exchange to connect to') DEFINE_string('ec2_host', '$my_ip', 'ip of api server') DEFINE_string('ec2_dmz_host', '$my_ip', 'internal ip of api server') DEFINE_integer('ec2_port', 8773, 'cloud controller port') DEFINE_string('ec2_scheme', 'http', 'prefix for ec2') DEFINE_string('ec2_path', '/services/Cloud', 'suffix for ec2') DEFINE_string('osapi_host', '$my_ip', 'ip of api server') DEFINE_string('osapi_scheme', 'http', 'prefix for openstack') DEFINE_integer('osapi_port', 8774, 'OpenStack API port') DEFINE_string('osapi_path', '/v1.0/', 'suffix for openstack') DEFINE_string('default_project', 'openstack', 'default project for openstack') DEFINE_string('default_image', 'ami-11111', 'default image to use, testing only') DEFINE_string('default_instance_type', 'm1.small', 'default instance type to use, testing only') DEFINE_string('null_kernel', 'nokernel', 'kernel image that indicates not to use a kernel,' ' but to use a raw disk image instead') DEFINE_string('vpn_image_id', 'ami-cloudpipe', 'AMI for cloudpipe vpn server') DEFINE_string('vpn_key_suffix', '-vpn', 'Suffix to add to project name for vpn key and secgroups') DEFINE_integer('auth_token_ttl', 3600, 'Seconds for auth tokens to linger') DEFINE_string('state_path', os.path.join(os.path.dirname(__file__), '../'), "Top-level directory for maintaining nova's state") DEFINE_string('logdir', None, 'output to a per-service log file in named ' 'directory') DEFINE_string('sql_connection', 'sqlite:///$state_path/nova.sqlite', 'connection string for sql database') DEFINE_integer('sql_idle_timeout', 3600, 'timeout for idle sql database connections') DEFINE_integer('sql_max_retries', 12, 'sql connection attempts') DEFINE_integer('sql_retry_interval', 10, 'sql connection retry interval') DEFINE_string('compute_manager', 'nova.compute.manager.ComputeManager', 'Manager for compute') DEFINE_string('console_manager', 'nova.console.manager.ConsoleProxyManager', 'Manager for console proxy') DEFINE_string('network_manager', 'nova.network.manager.VlanManager', 'Manager for network') DEFINE_string('volume_manager', 'nova.volume.manager.VolumeManager', 'Manager for volume') DEFINE_string('scheduler_manager', 'nova.scheduler.manager.SchedulerManager', 'Manager for scheduler') # The service to use for image search and retrieval DEFINE_string('image_service', 'nova.image.s3.S3ImageService', 'The service to use for retrieving and searching for images.') DEFINE_string('host', socket.gethostname(), 'name of this node') DEFINE_string('node_availability_zone', 'nova', 'availability zone of this node')
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fake LDAP server for test harness. This class does very little error checking, and knows nothing about ldap class definitions. It implements the minimum emulation of the python ldap library to work with fractus. """ import fnmatch import json class Store(object): def __init__(self): if hasattr(self.__class__, '_instance'): raise Exception(_('Attempted to instantiate singleton')) @classmethod def instance(cls): if not hasattr(cls, '_instance'): cls._instance = _StorageDict() return cls._instance class _StorageDict(dict): def keys(self, pat=None): ret = super(_StorageDict, self).keys() if pat is not None: ret = fnmatch.filter(ret, pat) return ret def delete(self, key): try: del self[key] except KeyError: pass def flushdb(self): self.clear() def hgetall(self, key): """Returns the hash for the given key; creates the hash if the key doesn't exist.""" try: return self[key] except KeyError: self[key] = {} return self[key] def hget(self, key, field): hashdict = self.hgetall(key) try: return hashdict[field] except KeyError: hashdict[field] = {} return hashdict[field] def hset(self, key, field, val): hashdict = self.hgetall(key) hashdict[field] = val def hmset(self, key, value_dict): hashdict = self.hgetall(key) for field, val in value_dict.items(): hashdict[field] = val SCOPE_BASE = 0 SCOPE_ONELEVEL = 1 # Not implemented SCOPE_SUBTREE = 2 MOD_ADD = 0 MOD_DELETE = 1 MOD_REPLACE = 2 class NO_SUCH_OBJECT(Exception): # pylint: disable-msg=C0103 """Duplicate exception class from real LDAP module.""" pass class OBJECT_CLASS_VIOLATION(Exception): # pylint: disable-msg=C0103 """Duplicate exception class from real LDAP module.""" pass def initialize(_uri): """Opens a fake connection with an LDAP server.""" return FakeLDAP() def _match_query(query, attrs): """Match an ldap query to an attribute dictionary. The characters &, |, and ! are supported in the query. No syntax checking is performed, so malformed querys will not work correctly. """ # cut off the parentheses inner = query[1:-1] if inner.startswith('&'): # cut off the & l, r = _paren_groups(inner[1:]) return _match_query(l, attrs) and _match_query(r, attrs) if inner.startswith('|'): # cut off the | l, r = _paren_groups(inner[1:]) return _match_query(l, attrs) or _match_query(r, attrs) if inner.startswith('!'): # cut off the ! and the nested parentheses return not _match_query(query[2:-1], attrs) (k, _sep, v) = inner.partition('=') return _match(k, v, attrs) def _paren_groups(source): """Split a string into parenthesized groups.""" count = 0 start = 0 result = [] for pos in xrange(len(source)): if source[pos] == '(': if count == 0: start = pos count += 1 if source[pos] == ')': count -= 1 if count == 0: result.append(source[start:pos + 1]) return result def _match(key, value, attrs): """Match a given key and value against an attribute list.""" if key not in attrs: return False # This is a wild card search. Implemented as all or nothing for now. if value == "*": return True if key != "objectclass": return value in attrs[key] # it is an objectclass check, so check subclasses values = _subs(value) for v in values: if v in attrs[key]: return True return False def _subs(value): """Returns a list of subclass strings. The strings represent the ldap objectclass plus any subclasses that inherit from it. Fakeldap doesn't know about the ldap object structure, so subclasses need to be defined manually in the dictionary below. """ subs = {'groupOfNames': ['novaProject']} if value in subs: return [value] + subs[value] return [value] def _from_json(encoded): """Convert attribute values from json representation. Args: encoded -- a json encoded string Returns a list of strings """ return [str(x) for x in json.loads(encoded)] def _to_json(unencoded): """Convert attribute values into json representation. Args: unencoded -- an unencoded string or list of strings. If it is a single string, it will be converted into a list. Returns a json string """ return json.dumps(list(unencoded)) class FakeLDAP(object): """Fake LDAP connection.""" def simple_bind_s(self, dn, password): """This method is ignored, but provided for compatibility.""" pass def unbind_s(self): """This method is ignored, but provided for compatibility.""" pass def add_s(self, dn, attr): """Add an object with the specified attributes at dn.""" key = "%s%s" % (self.__prefix, dn) value_dict = dict([(k, _to_json(v)) for k, v in attr]) Store.instance().hmset(key, value_dict) def delete_s(self, dn): """Remove the ldap object at specified dn.""" Store.instance().delete("%s%s" % (self.__prefix, dn)) def modify_s(self, dn, attrs): """Modify the object at dn using the attribute list. Args: dn -- a dn attrs -- a list of tuples in the following form: ([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value) """ store = Store.instance() key = "%s%s" % (self.__prefix, dn) for cmd, k, v in attrs: values = _from_json(store.hget(key, k)) if cmd == MOD_ADD: values.append(v) elif cmd == MOD_REPLACE: values = [v] else: values.remove(v) values = store.hset(key, k, _to_json(values)) def search_s(self, dn, scope, query=None, fields=None): """Search for all matching objects under dn using the query. Args: dn -- dn to search under scope -- only SCOPE_BASE and SCOPE_SUBTREE are supported query -- query to filter objects by fields -- fields to return. Returns all fields if not specified """ if scope != SCOPE_BASE and scope != SCOPE_SUBTREE: raise NotImplementedError(str(scope)) store = Store.instance() if scope == SCOPE_BASE: keys = ["%s%s" % (self.__prefix, dn)] else: keys = store.keys("%s*%s" % (self.__prefix, dn)) objects = [] for key in keys: # get the attributes from the store attrs = store.hgetall(key) # turn the values from the store into lists # pylint: disable-msg=E1103 attrs = dict([(k, _from_json(v)) for k, v in attrs.iteritems()]) # filter the objects by query if not query or _match_query(query, attrs): # filter the attributes by fields attrs = dict([(k, v) for k, v in attrs.iteritems() if not fields or k in fields]) objects.append((key[len(self.__prefix):], attrs)) # pylint: enable-msg=E1103 if objects == []: raise NO_SUCH_OBJECT() return objects @property def __prefix(self): # pylint: disable-msg=R0201 """Get the prefix to use for all keys.""" return 'ldap:'
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # PORTIONS OF THIS FILE ARE FROM: # http://code.google.com/p/boto # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Utility class for parsing signed AMI manifests. """ import base64 import hashlib import hmac import urllib # NOTE(vish): for new boto import boto # NOTE(vish): for old boto import boto.utils from fractus import log as logging from fractus.exception import Error LOG = logging.getLogger('fractus.signer') class Signer(object): """Hacked up code from boto/connection.py""" def __init__(self, secret_key): self.hmac = hmac.new(secret_key, digestmod=hashlib.sha1) if hashlib.sha256: self.hmac_256 = hmac.new(secret_key, digestmod=hashlib.sha256) def s3_authorization(self, headers, verb, path): """Generate S3 authorization string.""" c_string = boto.utils.canonical_string(verb, path, headers) hmac_copy = self.hmac.copy() hmac_copy.update(c_string) b64_hmac = base64.encodestring(hmac_copy.digest()).strip() return b64_hmac def generate(self, params, verb, server_string, path): """Generate auth string according to what SignatureVersion is given.""" if params['SignatureVersion'] == '0': return self._calc_signature_0(params) if params['SignatureVersion'] == '1': return self._calc_signature_1(params) if params['SignatureVersion'] == '2': return self._calc_signature_2(params, verb, server_string, path) raise Error('Unknown Signature Version: %s' % params['SignatureVersion']) @staticmethod def _get_utf8_value(value): """Get the UTF8-encoded version of a value.""" if not isinstance(value, str) and not isinstance(value, unicode): value = str(value) if isinstance(value, unicode): return value.encode('utf-8') else: return value def _calc_signature_0(self, params): """Generate AWS signature version 0 string.""" s = params['Action'] + params['Timestamp'] self.hmac.update(s) keys = params.keys() keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower())) pairs = [] for key in keys: val = self._get_utf8_value(params[key]) pairs.append(key + '=' + urllib.quote(val)) return base64.b64encode(self.hmac.digest()) def _calc_signature_1(self, params): """Generate AWS signature version 1 string.""" keys = params.keys() keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower())) pairs = [] for key in keys: self.hmac.update(key) val = self._get_utf8_value(params[key]) self.hmac.update(val) pairs.append(key + '=' + urllib.quote(val)) return base64.b64encode(self.hmac.digest()) def _calc_signature_2(self, params, verb, server_string, path): """Generate AWS signature version 2 string.""" LOG.debug('using _calc_signature_2') string_to_sign = '%s\n%s\n%s\n' % (verb, server_string, path) if self.hmac_256: current_hmac = self.hmac_256 params['SignatureMethod'] = 'HmacSHA256' else: current_hmac = self.hmac params['SignatureMethod'] = 'HmacSHA1' keys = params.keys() keys.sort() pairs = [] for key in keys: val = self._get_utf8_value(params[key]) val = urllib.quote(val, safe='-_~') pairs.append(urllib.quote(key, safe='') + '=' + val) qs = '&'.join(pairs) LOG.debug('query string: %s', qs) string_to_sign += qs LOG.debug('string_to_sign: %s', string_to_sign) current_hmac.update(string_to_sign) b64 = base64.b64encode(current_hmac.digest()) LOG.debug('len(b64)=%d', len(b64)) LOG.debug('base64 encoded digest: %s', b64) return b64 if __name__ == '__main__': print Signer('foo').generate({'SignatureMethod': 'HmacSHA256', 'SignatureVersion': '2'}, 'get', 'server', '/foo')
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Auth driver using the DB as its backend. """ import sys from fractus import context from fractus import exception from fractus import db class DbDriver(object): """DB Auth driver Defines enter and exit and therefore supports the with/as syntax. """ def __init__(self): """Imports the LDAP module""" pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): pass def get_user(self, uid): """Retrieve user by id""" user = db.user_get(context.get_admin_context(), uid) return self._db_user_to_auth_user(user) def get_user_from_access_key(self, access): """Retrieve user by access key""" user = db.user_get_by_access_key(context.get_admin_context(), access) return self._db_user_to_auth_user(user) def get_project(self, pid): """Retrieve project by id""" project = db.project_get(context.get_admin_context(), pid) return self._db_project_to_auth_projectuser(project) def get_users(self): """Retrieve list of users""" return [self._db_user_to_auth_user(user) for user in db.user_get_all(context.get_admin_context())] def get_projects(self, uid=None): """Retrieve list of projects""" if uid: result = db.project_get_by_user(context.get_admin_context(), uid) else: result = db.project_get_all(context.get_admin_context()) return [self._db_project_to_auth_projectuser(proj) for proj in result] def create_user(self, name, access_key, secret_key, is_admin): """Create a user""" values = {'id': name, 'access_key': access_key, 'secret_key': secret_key, 'is_admin': is_admin} try: user_ref = db.user_create(context.get_admin_context(), values) return self._db_user_to_auth_user(user_ref) except exception.Duplicate, e: raise exception.Duplicate(_('User %s already exists') % name) def _db_user_to_auth_user(self, user_ref): return {'id': user_ref['id'], 'name': user_ref['id'], 'access': user_ref['access_key'], 'secret': user_ref['secret_key'], 'admin': user_ref['is_admin']} def _db_project_to_auth_projectuser(self, project_ref): member_ids = [member['id'] for member in project_ref['members']] return {'id': project_ref['id'], 'name': project_ref['name'], 'project_manager_id': project_ref['project_manager'], 'description': project_ref['description'], 'member_ids': member_ids} def create_project(self, name, manager_uid, description=None, member_uids=None): """Create a project""" manager = db.user_get(context.get_admin_context(), manager_uid) if not manager: raise exception.NotFound(_("Project can't be created because " "manager %s doesn't exist") % manager_uid) # description is a required attribute if description is None: description = name # First, we ensure that all the given users exist before we go # on to create the project. This way we won't have to destroy # the project again because a user turns out to be invalid. members = set([manager]) if member_uids != None: for member_uid in member_uids: member = db.user_get(context.get_admin_context(), member_uid) if not member: raise exception.NotFound(_("Project can't be created " "because user %s doesn't exist") % member_uid) members.add(member) values = {'id': name, 'name': name, 'project_manager': manager['id'], 'description': description} try: project = db.project_create(context.get_admin_context(), values) except exception.Duplicate: raise exception.Duplicate(_("Project can't be created because " "project %s already exists") % name) for member in members: db.project_add_member(context.get_admin_context(), project['id'], member['id']) # This looks silly, but ensures that the members element has been # correctly populated project_ref = db.project_get(context.get_admin_context(), project['id']) return self._db_project_to_auth_projectuser(project_ref) def modify_project(self, project_id, manager_uid=None, description=None): """Modify an existing project""" if not manager_uid and not description: return values = {} if manager_uid: manager = db.user_get(context.get_admin_context(), manager_uid) if not manager: raise exception.NotFound(_("Project can't be modified because " "manager %s doesn't exist") % manager_uid) values['project_manager'] = manager['id'] if description: values['description'] = description db.project_update(context.get_admin_context(), project_id, values) def add_to_project(self, uid, project_id): """Add user to project""" user, project = self._validate_user_and_project(uid, project_id) db.project_add_member(context.get_admin_context(), project['id'], user['id']) def remove_from_project(self, uid, project_id): """Remove user from project""" user, project = self._validate_user_and_project(uid, project_id) db.project_remove_member(context.get_admin_context(), project['id'], user['id']) def is_in_project(self, uid, project_id): """Check if user is in project""" user, project = self._validate_user_and_project(uid, project_id) return user in project.members def has_role(self, uid, role, project_id=None): """Check if user has role If project is specified, it checks for local role, otherwise it checks for global role """ return role in self.get_user_roles(uid, project_id) def add_role(self, uid, role, project_id=None): """Add role for user (or user and project)""" if not project_id: db.user_add_role(context.get_admin_context(), uid, role) return db.user_add_project_role(context.get_admin_context(), uid, project_id, role) def remove_role(self, uid, role, project_id=None): """Remove role for user (or user and project)""" if not project_id: db.user_remove_role(context.get_admin_context(), uid, role) return db.user_remove_project_role(context.get_admin_context(), uid, project_id, role) def get_user_roles(self, uid, project_id=None): """Retrieve list of roles for user (or user and project)""" if project_id is None: roles = db.user_get_roles(context.get_admin_context(), uid) return roles else: roles = db.user_get_roles_for_project(context.get_admin_context(), uid, project_id) return roles def delete_user(self, id): """Delete a user""" user = db.user_get(context.get_admin_context(), id) db.user_delete(context.get_admin_context(), user['id']) def delete_project(self, project_id): """Delete a project""" db.project_delete(context.get_admin_context(), project_id) def modify_user(self, uid, access_key=None, secret_key=None, admin=None): """Modify an existing user""" if not access_key and not secret_key and admin is None: return values = {} if access_key: values['access_key'] = access_key if secret_key: values['secret_key'] = secret_key if admin is not None: values['is_admin'] = admin db.user_update(context.get_admin_context(), uid, values) def _validate_user_and_project(self, user_id, project_id): user = db.user_get(context.get_admin_context(), user_id) if not user: raise exception.NotFound(_('User "%s" not found') % user_id) project = db.project_get(context.get_admin_context(), project_id) if not project: raise exception.NotFound(_('Project "%s" not found') % project_id) return user, project
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Auth driver for ldap. Includes FakeLdapDriver. It should be easy to create a replacement for this driver supporting other backends by creating another class that exposes the same public methods. """ import sys from fractus import exception from fractus import flags from fractus import log as logging FLAGS = flags.FLAGS flags.DEFINE_integer('ldap_schema_version', 2, 'Current version of the LDAP schema') flags.DEFINE_string('ldap_url', 'ldap://localhost', 'Point this at your ldap server') flags.DEFINE_string('ldap_password', 'changeme', 'LDAP password') flags.DEFINE_string('ldap_user_dn', 'cn=Manager,dc=example,dc=com', 'DN of admin user') flags.DEFINE_string('ldap_user_id_attribute', 'uid', 'Attribute to use as id') flags.DEFINE_string('ldap_user_name_attribute', 'cn', 'Attribute to use as name') flags.DEFINE_string('ldap_user_unit', 'Users', 'OID for Users') flags.DEFINE_string('ldap_user_subtree', 'ou=Users,dc=example,dc=com', 'OU for Users') flags.DEFINE_boolean('ldap_user_modify_only', False, 'Modify attributes for users instead of creating/deleting') flags.DEFINE_string('ldap_project_subtree', 'ou=Groups,dc=example,dc=com', 'OU for Projects') flags.DEFINE_string('role_project_subtree', 'ou=Groups,dc=example,dc=com', 'OU for Roles') # NOTE(vish): mapping with these flags is necessary because we're going # to tie in to an existing ldap schema flags.DEFINE_string('ldap_cloudadmin', 'cn=cloudadmins,ou=Groups,dc=example,dc=com', 'cn for Cloud Admins') flags.DEFINE_string('ldap_itsec', 'cn=itsec,ou=Groups,dc=example,dc=com', 'cn for ItSec') flags.DEFINE_string('ldap_sysadmin', 'cn=sysadmins,ou=Groups,dc=example,dc=com', 'cn for Sysadmins') flags.DEFINE_string('ldap_netadmin', 'cn=netadmins,ou=Groups,dc=example,dc=com', 'cn for NetAdmins') flags.DEFINE_string('ldap_developer', 'cn=developers,ou=Groups,dc=example,dc=com', 'cn for Developers') LOG = logging.getLogger("fractus.ldapdriver") # TODO(vish): make an abstract base class with the same public methods # to define a set interface for AuthDrivers. I'm delaying # creating this now because I'm expecting an auth refactor # in which we may want to change the interface a bit more. def _clean(attr): """Clean attr for insertion into ldap""" if attr is None: return None if type(attr) is unicode: return str(attr) return attr def sanitize(fn): """Decorator to sanitize all args""" def _wrapped(self, *args, **kwargs): args = [_clean(x) for x in args] kwargs = dict((k, _clean(v)) for (k, v) in kwargs) return fn(self, *args, **kwargs) _wrapped.func_name = fn.func_name return _wrapped class LdapDriver(object): """Ldap Auth driver Defines enter and exit and therefore supports the with/as syntax. """ project_pattern = '(owner=*)' isadmin_attribute = 'isFractusAdmin' project_attribute = 'owner' project_objectclass = 'groupOfNames' def __init__(self): """Imports the LDAP module""" self.ldap = __import__('ldap') self.conn = None if FLAGS.ldap_schema_version == 1: LdapDriver.project_pattern = '(objectclass=fractusProject)' LdapDriver.isadmin_attribute = 'isAdmin' LdapDriver.project_attribute = 'projectManager' LdapDriver.project_objectclass = 'fractusProject' def __enter__(self): """Creates the connection to LDAP""" self.conn = self.ldap.initialize(FLAGS.ldap_url) self.conn.simple_bind_s(FLAGS.ldap_user_dn, FLAGS.ldap_password) return self def __exit__(self, exc_type, exc_value, traceback): """Destroys the connection to LDAP""" self.conn.unbind_s() return False @sanitize def get_user(self, uid): """Retrieve user by id""" attr = self.__get_ldap_user(uid) return self.__to_user(attr) @sanitize def get_user_from_access_key(self, access): """Retrieve user by access key""" query = '(accessKey=%s)' % access dn = FLAGS.ldap_user_subtree return self.__to_user(self.__find_object(dn, query)) @sanitize def get_project(self, pid): """Retrieve project by id""" dn = self.__project_to_dn(pid) attr = self.__find_object(dn, LdapDriver.project_pattern) return self.__to_project(attr) @sanitize def get_users(self): """Retrieve list of users""" attrs = self.__find_objects(FLAGS.ldap_user_subtree, '(objectclass=fractusUser)') users = [] for attr in attrs: user = self.__to_user(attr) if user is not None: users.append(user) return users @sanitize def get_projects(self, uid=None): """Retrieve list of projects""" pattern = LdapDriver.project_pattern if uid: pattern = "(&%s(member=%s))" % (pattern, self.__uid_to_dn(uid)) attrs = self.__find_objects(FLAGS.ldap_project_subtree, pattern) return [self.__to_project(attr) for attr in attrs] @sanitize def create_user(self, name, access_key, secret_key, is_admin): """Create a user""" if self.__user_exists(name): raise exception.Duplicate(_("LDAP user %s already exists") % name) if FLAGS.ldap_user_modify_only: if self.__ldap_user_exists(name): # Retrieve user by name user = self.__get_ldap_user(name) # Entry could be malformed, test for missing attrs. # Malformed entries are useless, replace attributes found. attr = [] if 'secretKey' in user.keys(): attr.append((self.ldap.MOD_REPLACE, 'secretKey', [secret_key])) else: attr.append((self.ldap.MOD_ADD, 'secretKey', [secret_key])) if 'accessKey' in user.keys(): attr.append((self.ldap.MOD_REPLACE, 'accessKey', [access_key])) else: attr.append((self.ldap.MOD_ADD, 'accessKey', [access_key])) if LdapDriver.isadmin_attribute in user.keys(): attr.append((self.ldap.MOD_REPLACE, LdapDriver.isadmin_attribute, [str(is_admin).upper()])) else: attr.append((self.ldap.MOD_ADD, LdapDriver.isadmin_attribute, [str(is_admin).upper()])) self.conn.modify_s(self.__uid_to_dn(name), attr) return self.get_user(name) else: raise exception.NotFound(_("LDAP object for %s doesn't exist") % name) else: attr = [ ('objectclass', ['person', 'organizationalPerson', 'inetOrgPerson', 'fractusUser']), ('ou', [FLAGS.ldap_user_unit]), (FLAGS.ldap_user_id_attribute, [name]), ('sn', [name]), (FLAGS.ldap_user_name_attribute, [name]), ('secretKey', [secret_key]), ('accessKey', [access_key]), (LdapDriver.isadmin_attribute, [str(is_admin).upper()]), ] self.conn.add_s(self.__uid_to_dn(name), attr) return self.__to_user(dict(attr)) @sanitize def create_project(self, name, manager_uid, description=None, member_uids=None): """Create a project""" if self.__project_exists(name): raise exception.Duplicate(_("Project can't be created because " "project %s already exists") % name) if not self.__user_exists(manager_uid): raise exception.NotFound(_("Project can't be created because " "manager %s doesn't exist") % manager_uid) manager_dn = self.__uid_to_dn(manager_uid) # description is a required attribute if description is None: description = name members = [] if member_uids is not None: for member_uid in member_uids: if not self.__user_exists(member_uid): raise exception.NotFound(_("Project can't be created " "because user %s doesn't exist") % member_uid) members.append(self.__uid_to_dn(member_uid)) # always add the manager as a member because members is required if not manager_dn in members: members.append(manager_dn) attr = [ ('objectclass', [LdapDriver.project_objectclass]), ('cn', [name]), ('description', [description]), (LdapDriver.project_attribute, [manager_dn]), ('member', members)] dn = self.__project_to_dn(name, search=False) self.conn.add_s(dn, attr) return self.__to_project(dict(attr)) @sanitize def modify_project(self, project_id, manager_uid=None, description=None): """Modify an existing project""" if not manager_uid and not description: return attr = [] if manager_uid: if not self.__user_exists(manager_uid): raise exception.NotFound(_("Project can't be modified because " "manager %s doesn't exist") % manager_uid) manager_dn = self.__uid_to_dn(manager_uid) attr.append((self.ldap.MOD_REPLACE, LdapDriver.project_attribute, manager_dn)) if description: attr.append((self.ldap.MOD_REPLACE, 'description', description)) dn = self.__project_to_dn(project_id) self.conn.modify_s(dn, attr) @sanitize def add_to_project(self, uid, project_id): """Add user to project""" dn = self.__project_to_dn(project_id) return self.__add_to_group(uid, dn) @sanitize def remove_from_project(self, uid, project_id): """Remove user from project""" dn = self.__project_to_dn(project_id) return self.__remove_from_group(uid, dn) @sanitize def is_in_project(self, uid, project_id): """Check if user is in project""" dn = self.__project_to_dn(project_id) return self.__is_in_group(uid, dn) @sanitize def has_role(self, uid, role, project_id=None): """Check if user has role If project is specified, it checks for local role, otherwise it checks for global role """ role_dn = self.__role_to_dn(role, project_id) return self.__is_in_group(uid, role_dn) @sanitize def add_role(self, uid, role, project_id=None): """Add role for user (or user and project)""" role_dn = self.__role_to_dn(role, project_id) if not self.__group_exists(role_dn): # create the role if it doesn't exist description = '%s role for %s' % (role, project_id) self.__create_group(role_dn, role, uid, description) else: return self.__add_to_group(uid, role_dn) @sanitize def remove_role(self, uid, role, project_id=None): """Remove role for user (or user and project)""" role_dn = self.__role_to_dn(role, project_id) return self.__remove_from_group(uid, role_dn) @sanitize def get_user_roles(self, uid, project_id=None): """Retrieve list of roles for user (or user and project)""" if project_id is None: # NOTE(vish): This is unneccesarily slow, but since we can't # guarantee that the global roles are located # together in the ldap tree, we're doing this version. roles = [] for role in FLAGS.allowed_roles: role_dn = self.__role_to_dn(role) if self.__is_in_group(uid, role_dn): roles.append(role) return roles else: project_dn = self.__project_to_dn(project_id) query = ('(&(&(objectclass=groupOfNames)(!%s))(member=%s))' % (LdapDriver.project_pattern, self.__uid_to_dn(uid))) roles = self.__find_objects(project_dn, query) return [role['cn'][0] for role in roles] @sanitize def delete_user(self, uid): """Delete a user""" if not self.__user_exists(uid): raise exception.NotFound(_("User %s doesn't exist") % uid) self.__remove_from_all(uid) if FLAGS.ldap_user_modify_only: # Delete attributes attr = [] # Retrieve user by name user = self.__get_ldap_user(uid) if 'secretKey' in user.keys(): attr.append((self.ldap.MOD_DELETE, 'secretKey', user['secretKey'])) if 'accessKey' in user.keys(): attr.append((self.ldap.MOD_DELETE, 'accessKey', user['accessKey'])) if LdapDriver.isadmin_attribute in user.keys(): attr.append((self.ldap.MOD_DELETE, LdapDriver.isadmin_attribute, user[LdapDriver.isadmin_attribute])) self.conn.modify_s(self.__uid_to_dn(uid), attr) else: # Delete entry self.conn.delete_s(self.__uid_to_dn(uid)) @sanitize def delete_project(self, project_id): """Delete a project""" project_dn = self.__project_to_dn(project_id) self.__delete_roles(project_dn) self.__delete_group(project_dn) @sanitize def modify_user(self, uid, access_key=None, secret_key=None, admin=None): """Modify an existing user""" if not access_key and not secret_key and admin is None: return attr = [] if access_key: attr.append((self.ldap.MOD_REPLACE, 'accessKey', access_key)) if secret_key: attr.append((self.ldap.MOD_REPLACE, 'secretKey', secret_key)) if admin is not None: attr.append((self.ldap.MOD_REPLACE, LdapDriver.isadmin_attribute, str(admin).upper())) self.conn.modify_s(self.__uid_to_dn(uid), attr) def __user_exists(self, uid): """Check if user exists""" return self.get_user(uid) is not None def __ldap_user_exists(self, uid): """Check if the user exists in ldap""" return self.__get_ldap_user(uid) is not None def __project_exists(self, project_id): """Check if project exists""" return self.get_project(project_id) is not None def __get_ldap_user(self, uid): """Retrieve LDAP user entry by id""" dn = FLAGS.ldap_user_subtree query = ('(&(%s=%s)(objectclass=novaUser))' % (FLAGS.ldap_user_id_attribute, uid)) return self.__find_object(dn, query) def __find_object(self, dn, query=None, scope=None): """Find an object by dn and query""" objects = self.__find_objects(dn, query, scope) if len(objects) == 0: return None return objects[0] def __find_dns(self, dn, query=None, scope=None): """Find dns by query""" if scope is None: # One of the flags is 0! scope = self.ldap.SCOPE_SUBTREE try: res = self.conn.search_s(dn, scope, query) except self.ldap.NO_SUCH_OBJECT: return [] # Just return the DNs return [dn for dn, _attributes in res] def __find_objects(self, dn, query=None, scope=None): """Find objects by query""" if scope is None: # One of the flags is 0! scope = self.ldap.SCOPE_SUBTREE try: res = self.conn.search_s(dn, scope, query) except self.ldap.NO_SUCH_OBJECT: return [] # Just return the attributes return [attributes for dn, attributes in res] def __find_role_dns(self, tree): """Find dns of role objects in given tree""" query = ('(&(objectclass=groupOfNames)(!%s))' % LdapDriver.project_pattern) return self.__find_dns(tree, query) def __find_group_dns_with_member(self, tree, uid): """Find dns of group objects in a given tree that contain member""" query = ('(&(objectclass=groupOfNames)(member=%s))' % self.__uid_to_dn(uid)) dns = self.__find_dns(tree, query) return dns def __group_exists(self, dn): """Check if group exists""" query = '(objectclass=groupOfNames)' return self.__find_object(dn, query) is not None def __role_to_dn(self, role, project_id=None): """Convert role to corresponding dn""" if project_id is None: return FLAGS.__getitem__("ldap_%s" % role).value else: project_dn = self.__project_to_dn(project_id) return 'cn=%s,%s' % (role, project_dn) def __create_group(self, group_dn, name, uid, description, member_uids=None): """Create a group""" if self.__group_exists(group_dn): raise exception.Duplicate(_("Group can't be created because " "group %s already exists") % name) members = [] if member_uids is not None: for member_uid in member_uids: if not self.__user_exists(member_uid): raise exception.NotFound(_("Group can't be created " "because user %s doesn't exist") % member_uid) members.append(self.__uid_to_dn(member_uid)) dn = self.__uid_to_dn(uid) if not dn in members: members.append(dn) attr = [ ('objectclass', ['groupOfNames']), ('cn', [name]), ('description', [description]), ('member', members)] self.conn.add_s(group_dn, attr) def __is_in_group(self, uid, group_dn): """Check if user is in group""" if not self.__user_exists(uid): raise exception.NotFound(_("User %s can't be searched in group " "because the user doesn't exist") % uid) if not self.__group_exists(group_dn): return False res = self.__find_object(group_dn, '(member=%s)' % self.__uid_to_dn(uid), self.ldap.SCOPE_BASE) return res is not None def __add_to_group(self, uid, group_dn): """Add user to group""" if not self.__user_exists(uid): raise exception.NotFound(_("User %s can't be added to the group " "because the user doesn't exist") % uid) if not self.__group_exists(group_dn): raise exception.NotFound(_("The group at dn %s doesn't exist") % group_dn) if self.__is_in_group(uid, group_dn): raise exception.Duplicate(_("User %(uid)s is already a member of " "the group %(group_dn)s") % locals()) attr = [(self.ldap.MOD_ADD, 'member', self.__uid_to_dn(uid))] self.conn.modify_s(group_dn, attr) def __remove_from_group(self, uid, group_dn): """Remove user from group""" if not self.__group_exists(group_dn): raise exception.NotFound(_("The group at dn %s doesn't exist") % group_dn) if not self.__user_exists(uid): raise exception.NotFound(_("User %s can't be removed from the " "group because the user doesn't exist") % uid) if not self.__is_in_group(uid, group_dn): raise exception.NotFound(_("User %s is not a member of the group") % uid) # NOTE(vish): remove user from group and any sub_groups sub_dns = self.__find_group_dns_with_member(group_dn, uid) for sub_dn in sub_dns: self.__safe_remove_from_group(uid, sub_dn) def __safe_remove_from_group(self, uid, group_dn): """Remove user from group, deleting group if user is last member""" # FIXME(vish): what if deleted user is a project manager? attr = [(self.ldap.MOD_DELETE, 'member', self.__uid_to_dn(uid))] try: self.conn.modify_s(group_dn, attr) except self.ldap.OBJECT_CLASS_VIOLATION: LOG.debug(_("Attempted to remove the last member of a group. " "Deleting the group at %s instead."), group_dn) self.__delete_group(group_dn) def __remove_from_all(self, uid): """Remove user from all roles and projects""" if not self.__user_exists(uid): raise exception.NotFound(_("User %s can't be removed from all " "because the user doesn't exist") % uid) role_dns = self.__find_group_dns_with_member( FLAGS.role_project_subtree, uid) for role_dn in role_dns: self.__safe_remove_from_group(uid, role_dn) project_dns = self.__find_group_dns_with_member( FLAGS.ldap_project_subtree, uid) for project_dn in project_dns: self.__safe_remove_from_group(uid, project_dn) def __delete_group(self, group_dn): """Delete Group""" if not self.__group_exists(group_dn): raise exception.NotFound(_("Group at dn %s doesn't exist") % group_dn) self.conn.delete_s(group_dn) def __delete_roles(self, project_dn): """Delete all roles for project""" for role_dn in self.__find_role_dns(project_dn): self.__delete_group(role_dn) def __to_project(self, attr): """Convert ldap attributes to Project object""" if attr is None: return None member_dns = attr.get('member', []) return { 'id': attr['cn'][0], 'name': attr['cn'][0], 'project_manager_id': self.__dn_to_uid(attr[LdapDriver.project_attribute][0]), 'description': attr.get('description', [None])[0], 'member_ids': [self.__dn_to_uid(x) for x in member_dns]} def __uid_to_dn(self, uid, search=True): """Convert uid to dn""" # By default return a generated DN userdn = (FLAGS.ldap_user_id_attribute + '=%s,%s' % (uid, FLAGS.ldap_user_subtree)) if search: query = ('%s=%s' % (FLAGS.ldap_user_id_attribute, uid)) user = self.__find_dns(FLAGS.ldap_user_subtree, query) if len(user) > 0: userdn = user[0] return userdn def __project_to_dn(self, pid, search=True): """Convert pid to dn""" # By default return a generated DN projectdn = ('cn=%s,%s' % (pid, FLAGS.ldap_project_subtree)) if search: query = ('(&(cn=%s)%s)' % (pid, LdapDriver.project_pattern)) project = self.__find_dns(FLAGS.ldap_project_subtree, query) if len(project) > 0: projectdn = project[0] return projectdn @staticmethod def __to_user(attr): """Convert ldap attributes to User object""" if attr is None: return None if ('accessKey' in attr.keys() and 'secretKey' in attr.keys() \ and LdapDriver.isadmin_attribute in attr.keys()): return { 'id': attr[FLAGS.ldap_user_id_attribute][0], 'name': attr[FLAGS.ldap_user_name_attribute][0], 'access': attr['accessKey'][0], 'secret': attr['secretKey'][0], 'admin': (attr[LdapDriver.isadmin_attribute][0] == 'TRUE')} else: return None def __dn_to_uid(self, dn): """Convert user dn to uid""" query = '(objectclass=fractusUser)' user = self.__find_object(dn, query) return user[FLAGS.ldap_user_id_attribute][0] class FakeLdapDriver(LdapDriver): """Fake Ldap Auth driver""" def __init__(self): # pylint: disable-msg=W0231 __import__('fractus.auth.fakeldap') self.ldap = sys.modules['fractus.auth.fakeldap']
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Fractus authentication management """ import os import shutil import string # pylint: disable-msg=W0402 import tempfile import uuid import zipfile from fractus import context from fractus import crypto from fractus import db from fractus import exception from fractus import flags from fractus import log as logging from fractus import utils from fractus.auth import signer FLAGS = flags.FLAGS flags.DEFINE_list('allowed_roles', ['cloudadmin', 'itsec', 'sysadmin', 'netadmin', 'developer'], 'Allowed roles for project') # NOTE(vish): a user with one of these roles will be a superuser and # have access to all api commands flags.DEFINE_list('superuser_roles', ['cloudadmin'], 'Roles that ignore authorization checking completely') # NOTE(vish): a user with one of these roles will have it for every # project, even if he or she is not a member of the project flags.DEFINE_list('global_roles', ['cloudadmin', 'itsec'], 'Roles that apply to all projects') flags.DEFINE_string('credentials_template', utils.abspath('auth/novarc.template'), 'Template for creating users rc file') flags.DEFINE_string('vpn_client_template', utils.abspath('cloudpipe/client.ovpn.template'), 'Template for creating users vpn file') flags.DEFINE_string('credential_vpn_file', 'nova-vpn.conf', 'Filename of certificate in credentials zip') flags.DEFINE_string('credential_key_file', 'pk.pem', 'Filename of private key in credentials zip') flags.DEFINE_string('credential_cert_file', 'cert.pem', 'Filename of certificate in credentials zip') flags.DEFINE_string('credential_rc_file', '%src', 'Filename of rc in credentials zip, %s will be ' 'replaced by name of the region (nova by default)') flags.DEFINE_string('auth_driver', 'nova.auth.dbdriver.DbDriver', 'Driver that auth manager uses') LOG = logging.getLogger('nova.auth.manager') class AuthBase(object): """Base class for objects relating to auth Objects derived from this class should be stupid data objects with an id member. They may optionally contain methods that delegate to AuthManager, but should not implement logic themselves. """ @classmethod def safe_id(cls, obj): """Safely get object id. This method will return the id of the object if the object is of this class, otherwise it will return the original object. This allows methods to accept objects or ids as paramaters. """ if isinstance(obj, cls): return obj.id else: return obj class User(AuthBase): """Object representing a user""" def __init__(self, id, name, access, secret, admin): AuthBase.__init__(self) self.id = id self.name = name self.access = access self.secret = secret self.admin = admin def is_superuser(self): return AuthManager().is_superuser(self) def is_admin(self): return AuthManager().is_admin(self) def has_role(self, role): return AuthManager().has_role(self, role) def add_role(self, role): return AuthManager().add_role(self, role) def remove_role(self, role): return AuthManager().remove_role(self, role) def is_project_member(self, project): return AuthManager().is_project_member(self, project) def is_project_manager(self, project): return AuthManager().is_project_manager(self, project) def __repr__(self): return "User('%s', '%s', '%s', '%s', %s)" % (self.id, self.name, self.access, self.secret, self.admin) class Project(AuthBase): """Represents a Project returned from the datastore""" def __init__(self, id, name, project_manager_id, description, member_ids): AuthBase.__init__(self) self.id = id self.name = name self.project_manager_id = project_manager_id self.description = description self.member_ids = member_ids @property def project_manager(self): return AuthManager().get_user(self.project_manager_id) @property def vpn_ip(self): ip, _port = AuthManager().get_project_vpn_data(self) return ip @property def vpn_port(self): _ip, port = AuthManager().get_project_vpn_data(self) return port def has_manager(self, user): return AuthManager().is_project_manager(user, self) def has_member(self, user): return AuthManager().is_project_member(user, self) def add_role(self, user, role): return AuthManager().add_role(user, role, self) def remove_role(self, user, role): return AuthManager().remove_role(user, role, self) def has_role(self, user, role): return AuthManager().has_role(user, role, self) def get_credentials(self, user): return AuthManager().get_credentials(user, self) def __repr__(self): return "Project('%s', '%s', '%s', '%s', %s)" % \ (self.id, self.name, self.project_manager_id, self.description, self.member_ids) class AuthManager(object): """Manager Singleton for dealing with Users, Projects, and Keypairs Methods accept objects or ids. AuthManager uses a driver object to make requests to the data backend. See ldapdriver for reference. AuthManager also manages associated data related to Auth objects that need to be more accessible, such as vpn ips and ports. """ _instance = None def __new__(cls, *args, **kwargs): """Returns the AuthManager singleton""" if not cls._instance or ('new' in kwargs and kwargs['new']): cls._instance = super(AuthManager, cls).__new__(cls) return cls._instance def __init__(self, driver=None, *args, **kwargs): """Inits the driver from parameter or flag __init__ is run every time AuthManager() is called, so we only reset the driver if it is not set or a new driver is specified. """ self.network_manager = utils.import_object(FLAGS.network_manager) if driver or not getattr(self, 'driver', None): self.driver = utils.import_class(driver or FLAGS.auth_driver) def authenticate(self, access, signature, params, verb='GET', server_string='127.0.0.1:8773', path='/', check_type='ec2', headers=None): """Authenticates AWS request using access key and signature If the project is not specified, attempts to authenticate to a project with the same name as the user. This way, older tools that have no project knowledge will still work. @type access: str @param access: Access key for user in the form "access:project". @type signature: str @param signature: Signature of the request. @type params: list of str @param params: Web paramaters used for the signature. @type verb: str @param verb: Web request verb ('GET' or 'POST'). @type server_string: str @param server_string: Web request server string. @type path: str @param path: Web request path. @type check_type: str @param check_type: Type of signature to check. 'ec2' for EC2, 's3' for S3. Any other value will cause signature not to be checked. @type headers: list @param headers: HTTP headers passed with the request (only needed for s3 signature checks) @rtype: tuple (User, Project) @return: User and project that the request represents. """ # TODO(vish): check for valid timestamp (access_key, _sep, project_id) = access.partition(':') LOG.debug(_('Looking up user: %r'), access_key) user = self.get_user_from_access_key(access_key) LOG.debug('user: %r', user) if user == None: LOG.audit(_("Failed authorization for access key %s"), access_key) raise exception.NotFound(_('No user found for access key %s') % access_key) # NOTE(vish): if we stop using project name as id we need better # logic to find a default project for user if project_id == '': LOG.debug(_("Using project name = user name (%s)"), user.name) project_id = user.name project = self.get_project(project_id) if project == None: pjid = project_id uname = user.name LOG.audit(_("failed authorization: no project named %(pjid)s" " (user=%(uname)s)") % locals()) raise exception.NotFound(_('No project called %s could be found') % project_id) if not self.is_admin(user) and not self.is_project_member(user, project): uname = user.name uid = user.id pjname = project.name pjid = project.id LOG.audit(_("Failed authorization: user %(uname)s not admin" " and not member of project %(pjname)s") % locals()) raise exception.NotFound(_('User %(uid)s is not a member of' ' project %(pjid)s') % locals()) if check_type == 's3': sign = signer.Signer(user.secret.encode()) expected_signature = sign.s3_authorization(headers, verb, path) LOG.debug('user.secret: %s', user.secret) LOG.debug('expected_signature: %s', expected_signature) LOG.debug('signature: %s', signature) if signature != expected_signature: LOG.audit(_("Invalid signature for user %s"), user.name) raise exception.NotAuthorized(_('Signature does not match')) elif check_type == 'ec2': # NOTE(vish): hmac can't handle unicode, so encode ensures that # secret isn't unicode expected_signature = signer.Signer(user.secret.encode()).generate( params, verb, server_string, path) LOG.debug('user.secret: %s', user.secret) LOG.debug('expected_signature: %s', expected_signature) LOG.debug('signature: %s', signature) if signature != expected_signature: LOG.audit(_("Invalid signature for user %s"), user.name) raise exception.NotAuthorized(_('Signature does not match')) return (user, project) def get_access_key(self, user, project): """Get an access key that includes user and project""" if not isinstance(user, User): user = self.get_user(user) return "%s:%s" % (user.access, Project.safe_id(project)) def is_superuser(self, user): """Checks for superuser status, allowing user to bypass authorization @type user: User or uid @param user: User to check. @rtype: bool @return: True for superuser. """ if not isinstance(user, User): user = self.get_user(user) # NOTE(vish): admin flag on user represents superuser if user.admin: return True for role in FLAGS.superuser_roles: if self.has_role(user, role): return True def is_admin(self, user): """Checks for admin status, allowing user to access all projects @type user: User or uid @param user: User to check. @rtype: bool @return: True for admin. """ if not isinstance(user, User): user = self.get_user(user) if self.is_superuser(user): return True for role in FLAGS.global_roles: if self.has_role(user, role): return True def has_role(self, user, role, project=None): """Checks existence of role for user If project is not specified, checks for a global role. If project is specified, checks for the union of the global role and the project role. Role 'projectmanager' only works for projects and simply checks to see if the user is the project_manager of the specified project. It is the same as calling is_project_manager(user, project). @type user: User or uid @param user: User to check. @type role: str @param role: Role to check. @type project: Project or project_id @param project: Project in which to look for local role. @rtype: bool @return: True if the user has the role. """ with self.driver() as drv: if role == 'projectmanager': if not project: raise exception.Error(_("Must specify project")) return self.is_project_manager(user, project) global_role = drv.has_role(User.safe_id(user), role, None) if not global_role: return global_role if not project or role in FLAGS.global_roles: return global_role return drv.has_role(User.safe_id(user), role, Project.safe_id(project)) def add_role(self, user, role, project=None): """Adds role for user If project is not specified, adds a global role. If project is specified, adds a local role. The 'projectmanager' role is special and can't be added or removed. @type user: User or uid @param user: User to which to add role. @type role: str @param role: Role to add. @type project: Project or project_id @param project: Project in which to add local role. """ if role not in FLAGS.allowed_roles: raise exception.NotFound(_("The %s role can not be found") % role) if project is not None and role in FLAGS.global_roles: raise exception.NotFound(_("The %s role is global only") % role) uid = User.safe_id(user) pid = Project.safe_id(project) if project: LOG.audit(_("Adding role %(role)s to user %(uid)s" " in project %(pid)s") % locals()) else: LOG.audit(_("Adding sitewide role %(role)s to user %(uid)s") % locals()) with self.driver() as drv: drv.add_role(uid, role, pid) def remove_role(self, user, role, project=None): """Removes role for user If project is not specified, removes a global role. If project is specified, removes a local role. The 'projectmanager' role is special and can't be added or removed. @type user: User or uid @param user: User from which to remove role. @type role: str @param role: Role to remove. @type project: Project or project_id @param project: Project in which to remove local role. """ uid = User.safe_id(user) pid = Project.safe_id(project) if project: LOG.audit(_("Removing role %(role)s from user %(uid)s" " on project %(pid)s") % locals()) else: LOG.audit(_("Removing sitewide role %(role)s" " from user %(uid)s") % locals()) with self.driver() as drv: drv.remove_role(uid, role, pid) @staticmethod def get_roles(project_roles=True): """Get list of allowed roles""" if project_roles: return list(set(FLAGS.allowed_roles) - set(FLAGS.global_roles)) else: return FLAGS.allowed_roles def get_user_roles(self, user, project=None): """Get user global or per-project roles""" with self.driver() as drv: return drv.get_user_roles(User.safe_id(user), Project.safe_id(project)) def get_project(self, pid): """Get project object by id""" with self.driver() as drv: project_dict = drv.get_project(pid) if project_dict: return Project(**project_dict) def get_projects(self, user=None): """Retrieves list of projects, optionally filtered by user""" with self.driver() as drv: project_list = drv.get_projects(User.safe_id(user)) if not project_list: return [] return [Project(**project_dict) for project_dict in project_list] def create_project(self, name, manager_user, description=None, member_users=None): """Create a project @type name: str @param name: Name of the project to create. The name will also be used as the project id. @type manager_user: User or uid @param manager_user: This user will be the project manager. @type description: str @param project: Description of the project. If no description is specified, the name of the project will be used. @type member_users: list of User or uid @param: Initial project members. The project manager will always be added as a member, even if he isn't specified in this list. @rtype: Project @return: The new project. """ if member_users: member_users = [User.safe_id(u) for u in member_users] with self.driver() as drv: project_dict = drv.create_project(name, User.safe_id(manager_user), description, member_users) if project_dict: LOG.audit(_("Created project %(name)s with" " manager %(manager_user)s") % locals()) project = Project(**project_dict) return project def modify_project(self, project, manager_user=None, description=None): """Modify a project @type name: Project or project_id @param project: The project to modify. @type manager_user: User or uid @param manager_user: This user will be the new project manager. @type description: str @param project: This will be the new description of the project. """ LOG.audit(_("modifying project %s"), Project.safe_id(project)) if manager_user: manager_user = User.safe_id(manager_user) with self.driver() as drv: drv.modify_project(Project.safe_id(project), manager_user, description) def add_to_project(self, user, project): """Add user to project""" uid = User.safe_id(user) pid = Project.safe_id(project) LOG.audit(_("Adding user %(uid)s to project %(pid)s") % locals()) with self.driver() as drv: return drv.add_to_project(User.safe_id(user), Project.safe_id(project)) def is_project_manager(self, user, project): """Checks if user is project manager""" if not isinstance(project, Project): project = self.get_project(project) return User.safe_id(user) == project.project_manager_id def is_project_member(self, user, project): """Checks to see if user is a member of project""" if not isinstance(project, Project): project = self.get_project(project) return User.safe_id(user) in project.member_ids def remove_from_project(self, user, project): """Removes a user from a project""" uid = User.safe_id(user) pid = Project.safe_id(project) LOG.audit(_("Remove user %(uid)s from project %(pid)s") % locals()) with self.driver() as drv: return drv.remove_from_project(uid, pid) @staticmethod def get_project_vpn_data(project): """Gets vpn ip and port for project @type project: Project or project_id @param project: Project from which to get associated vpn data @rvalue: tuple of (str, str) @return: A tuple containing (ip, port) or None, None if vpn has not been allocated for user. """ network_ref = db.project_get_network(context.get_admin_context(), Project.safe_id(project), False) if not network_ref: return (None, None) return (network_ref['vpn_public_address'], network_ref['vpn_public_port']) def delete_project(self, project): """Deletes a project""" LOG.audit(_("Deleting project %s"), Project.safe_id(project)) with self.driver() as drv: drv.delete_project(Project.safe_id(project)) def get_user(self, uid): """Retrieves a user by id""" with self.driver() as drv: user_dict = drv.get_user(uid) if user_dict: return User(**user_dict) def get_user_from_access_key(self, access_key): """Retrieves a user by access key""" with self.driver() as drv: user_dict = drv.get_user_from_access_key(access_key) if user_dict: return User(**user_dict) def get_users(self): """Retrieves a list of all users""" with self.driver() as drv: user_list = drv.get_users() if not user_list: return [] return [User(**user_dict) for user_dict in user_list] def create_user(self, name, access=None, secret=None, admin=False): """Creates a user @type name: str @param name: Name of the user to create. @type access: str @param access: Access Key (defaults to a random uuid) @type secret: str @param secret: Secret Key (defaults to a random uuid) @type admin: bool @param admin: Whether to set the admin flag. The admin flag gives superuser status regardless of roles specifed for the user. @type create_project: bool @param: Whether to create a project for the user with the same name. @rtype: User @return: The new user. """ if access == None: access = str(uuid.uuid4()) if secret == None: secret = str(uuid.uuid4()) with self.driver() as drv: user_dict = drv.create_user(name, access, secret, admin) if user_dict: rv = User(**user_dict) rvname = rv.name rvadmin = rv.admin LOG.audit(_("Created user %(rvname)s" " (admin: %(rvadmin)r)") % locals()) return rv def delete_user(self, user): """Deletes a user Additionally deletes all users key_pairs""" uid = User.safe_id(user) LOG.audit(_("Deleting user %s"), uid) db.key_pair_destroy_all_by_user(context.get_admin_context(), uid) with self.driver() as drv: drv.delete_user(uid) def modify_user(self, user, access_key=None, secret_key=None, admin=None): """Modify credentials for a user""" uid = User.safe_id(user) if access_key: LOG.audit(_("Access Key change for user %s"), uid) if secret_key: LOG.audit(_("Secret Key change for user %s"), uid) if admin is not None: LOG.audit(_("Admin status set to %(admin)r" " for user %(uid)s") % locals()) with self.driver() as drv: drv.modify_user(uid, access_key, secret_key, admin) @staticmethod def get_key_pairs(context): return db.key_pair_get_all_by_user(context.elevated(), context.user_id) def get_credentials(self, user, project=None, use_dmz=True): """Get credential zip for user in project""" if not isinstance(user, User): user = self.get_user(user) if project is None: project = user.id pid = Project.safe_id(project) private_key, signed_cert = crypto.generate_x509_cert(user.id, pid) tmpdir = tempfile.mkdtemp() zf = os.path.join(tmpdir, "temp.zip") zippy = zipfile.ZipFile(zf, 'w') if use_dmz and FLAGS.region_list: regions = {} for item in FLAGS.region_list: region, _sep, region_host = item.partition("=") regions[region] = region_host else: regions = {'nova': FLAGS.ec2_host} for region, host in regions.iteritems(): rc = self.__generate_rc(user, pid, use_dmz, host) zippy.writestr(FLAGS.credential_rc_file % region, rc) zippy.writestr(FLAGS.credential_key_file, private_key) zippy.writestr(FLAGS.credential_cert_file, signed_cert) (vpn_ip, vpn_port) = self.get_project_vpn_data(project) if vpn_ip: configfile = open(FLAGS.vpn_client_template, "r") s = string.Template(configfile.read()) configfile.close() config = s.substitute(keyfile=FLAGS.credential_key_file, certfile=FLAGS.credential_cert_file, ip=vpn_ip, port=vpn_port) zippy.writestr(FLAGS.credential_vpn_file, config) else: LOG.warn(_("No vpn data for project %s"), pid) zippy.writestr(FLAGS.ca_file, crypto.fetch_ca(pid)) zippy.close() with open(zf, 'rb') as f: read_buffer = f.read() shutil.rmtree(tmpdir) return read_buffer def get_environment_rc(self, user, project=None, use_dmz=True): """Get credential zip for user in project""" if not isinstance(user, User): user = self.get_user(user) if project is None: project = user.id pid = Project.safe_id(project) return self.__generate_rc(user, pid, use_dmz) @staticmethod def __generate_rc(user, pid, use_dmz=True, host=None): """Generate rc file for user""" if use_dmz: ec2_host = FLAGS.ec2_dmz_host else: ec2_host = FLAGS.ec2_host # NOTE(vish): Always use the dmz since it is used from inside the # instance s3_host = FLAGS.s3_dmz if host: s3_host = host ec2_host = host rc = open(FLAGS.credentials_template).read() rc = rc % {'access': user.access, 'project': pid, 'secret': user.secret, 'ec2': '%s://%s:%s%s' % (FLAGS.ec2_scheme, ec2_host, FLAGS.ec2_port, FLAGS.ec2_path), 's3': 'http://%s:%s' % (s3_host, FLAGS.s3_port), 'os': '%s://%s:%s%s' % (FLAGS.osapi_scheme, ec2_host, FLAGS.osapi_port, FLAGS.osapi_path), 'user': user.name, 'nova': FLAGS.ca_file, 'cert': FLAGS.credential_cert_file, 'key': FLAGS.credential_key_file} return rc
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`fractus.auth` -- Authentication and Access Control ===================================================== .. automodule:: fractus.auth :platform: Unix :synopsis: User-and-Project based RBAC using LDAP, SAML. .. moduleauthor:: Jesse Andrews <jesse@ansolabs.com> .. moduleauthor:: Vishvananda Ishaya <vishvananda@yahoo.com> .. moduleauthor:: Joshua McKenty <joshua@cognition.ca> """
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`fractus` -- Data Center Automation ======================================== .. automodule:: fractus :platform: Unix :synopsis: Data Center Automation. .. moduleauthor:: Oscar Larrayoz <olarrayoz@fractus-labs.com> """ from exception import *
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handles all requests relating to agent. """ import datetime import re import time from fractus import db from fractus import exception from fractus import flags from fractus import log as logging from nova import network from nova import quota from fractus import rpc from fractus import utils from nova import volume from nova.compute import instance_types from fractus.db import base FLAGS = flags.FLAGS LOG = logging.getLogger('fractus.agent.api') def generate_default_hostname(instance_id): """Default function to generate a hostname given an instance reference.""" return str(instance_id) class API(base.Base): """API for interacting with the compute manager.""" def __init__(self, image_service=None, network_api=None, volume_api=None, hostname_factory=generate_default_hostname, **kwargs): if not image_service: image_service = utils.import_object(FLAGS.image_service) self.image_service = image_service if not network_api: network_api = network.API() self.network_api = network_api if not volume_api: volume_api = volume.API() self.volume_api = volume_api self.hostname_factory = hostname_factory super(API, self).__init__(**kwargs) def get_network_topic(self, context, instance_id): """Get the network topic for an instance.""" try: instance = self.get(context, instance_id) except exception.NotFound: LOG.warning(_("Instance %d was not found in get_network_topic"), instance_id) raise host = instance['host'] if not host: raise exception.Error(_("Instance %d has no host") % instance_id) topic = self.db.queue_get_for(context, FLAGS.compute_topic, host) return rpc.call(context, topic, {"method": "get_network_topic", "args": {'fake': 1}}) def create(self, context, instance_type, image_id, kernel_id=None, ramdisk_id=None, min_count=1, max_count=1, display_name='', display_description='', key_name=None, key_data=None, security_group='default', availability_zone=None, user_data=None): """Create the number of instances requested if quota and other arguments check out ok.""" type_data = instance_types.INSTANCE_TYPES[instance_type] num_instances = quota.allowed_instances(context, max_count, type_data) if num_instances < min_count: pid = context.project_id LOG.warn(_("Quota exceeeded for %(pid)s," " tried to run %(min_count)s instances") % locals()) raise quota.QuotaError(_("Instance quota exceeded. You can only " "run %s more instances of this type.") % num_instances, "InstanceLimitExceeded") is_vpn = image_id == FLAGS.vpn_image_id if not is_vpn: image = self.image_service.show(context, image_id) if kernel_id is None: kernel_id = image.get('kernel_id', None) if ramdisk_id is None: ramdisk_id = image.get('ramdisk_id', None) # No kernel and ramdisk for raw images if kernel_id == str(FLAGS.null_kernel): kernel_id = None ramdisk_id = None LOG.debug(_("Creating a raw instance")) # Make sure we have access to kernel and ramdisk (if not raw) logging.debug("Using Kernel=%s, Ramdisk=%s" % (kernel_id, ramdisk_id)) if kernel_id: self.image_service.show(context, kernel_id) if ramdisk_id: self.image_service.show(context, ramdisk_id) if security_group is None: security_group = ['default'] if not type(security_group) is list: security_group = [security_group] security_groups = [] self.ensure_default_security_group(context) for security_group_name in security_group: group = db.security_group_get_by_name(context, context.project_id, security_group_name) security_groups.append(group['id']) if key_data is None and key_name: key_pair = db.key_pair_get(context, context.user_id, key_name) key_data = key_pair['public_key'] base_options = { 'reservation_id': utils.generate_uid('r'), 'image_id': image_id, 'kernel_id': kernel_id or '', 'ramdisk_id': ramdisk_id or '', 'state_description': 'scheduling', 'user_id': context.user_id, 'project_id': context.project_id, 'launch_time': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), 'instance_type': instance_type, 'memory_mb': type_data['memory_mb'], 'vcpus': type_data['vcpus'], 'local_gb': type_data['local_gb'], 'display_name': display_name, 'display_description': display_description, 'user_data': user_data or '', 'key_name': key_name, 'key_data': key_data, 'locked': False, 'availability_zone': availability_zone} elevated = context.elevated() instances = [] LOG.debug(_("Going to run %s instances..."), num_instances) for num in range(num_instances): instance = dict(mac_address=utils.generate_mac(), launch_index=num, **base_options) instance = self.db.instance_create(context, instance) instance_id = instance['id'] elevated = context.elevated() if not security_groups: security_groups = [] for security_group_id in security_groups: self.db.instance_add_security_group(elevated, instance_id, security_group_id) # Set sane defaults if not specified updates = dict(hostname=self.hostname_factory(instance_id)) if (not hasattr(instance, 'display_name') or instance.display_name == None): updates['display_name'] = "Server %s" % instance_id instance = self.update(context, instance_id, **updates) instances.append(instance) pid = context.project_id uid = context.user_id LOG.debug(_("Casting to scheduler for %(pid)s/%(uid)s's" " instance %(instance_id)s") % locals()) rpc.cast(context, FLAGS.scheduler_topic, {"method": "run_instance", "args": {"topic": FLAGS.compute_topic, "instance_id": instance_id, "availability_zone": availability_zone}}) for group_id in security_groups: self.trigger_security_group_members_refresh(elevated, group_id) return [dict(x.iteritems()) for x in instances] def ensure_default_security_group(self, context): """ Create security group for the security context if it does not already exist :param context: the security context """ try: db.security_group_get_by_name(context, context.project_id, 'default') except exception.NotFound: values = {'name': 'default', 'description': 'default', 'user_id': context.user_id, 'project_id': context.project_id} db.security_group_create(context, values) def trigger_security_group_rules_refresh(self, context, security_group_id): """Called when a rule is added to or removed from a security_group""" security_group = self.db.security_group_get(context, security_group_id) hosts = set() for instance in security_group['instances']: if instance['host'] is not None: hosts.add(instance['host']) for host in hosts: rpc.cast(context, self.db.queue_get_for(context, FLAGS.compute_topic, host), {"method": "refresh_security_group_rules", "args": {"security_group_id": security_group.id}}) def trigger_security_group_members_refresh(self, context, group_id): """Called when a security group gains a new or loses a member Sends an update request to each compute node for whom this is relevant.""" # First, we get the security group rules that reference this group as # the grantee.. security_group_rules = \ self.db.security_group_rule_get_by_security_group_grantee( context, group_id) # ..then we distill the security groups to which they belong.. security_groups = set() for rule in security_group_rules: security_group = self.db.security_group_get( context, rule['parent_group_id']) security_groups.add(security_group) # ..then we find the instances that are members of these groups.. instances = set() for security_group in security_groups: for instance in security_group['instances']: instances.add(instance) # ...then we find the hosts where they live... hosts = set() for instance in instances: if instance['host']: hosts.add(instance['host']) # ...and finally we tell these nodes to refresh their view of this # particular security group. for host in hosts: rpc.cast(context, self.db.queue_get_for(context, FLAGS.compute_topic, host), {"method": "refresh_security_group_members", "args": {"security_group_id": group_id}}) def update(self, context, instance_id, **kwargs): """Updates the instance in the datastore. :param context: The security context :param instance_id: ID of the instance to update :param kwargs: All additional keyword args are treated as data fields of the instance to be updated :retval None """ rv = self.db.instance_update(context, instance_id, kwargs) return dict(rv.iteritems()) def delete(self, context, instance_id): LOG.debug(_("Going to try to terminate %s"), instance_id) try: instance = self.get(context, instance_id) except exception.NotFound: LOG.warning(_("Instance %d was not found during terminate"), instance_id) raise if (instance['state_description'] == 'terminating'): LOG.warning(_("Instance %d is already being terminated"), instance_id) return self.update(context, instance['id'], state_description='terminating', state=0, terminated_at=datetime.datetime.utcnow()) host = instance['host'] if host: self._cast_compute_message('terminate_instance', context, instance_id, host) else: self.db.instance_destroy(context, instance_id) def get(self, context, instance_id): """Get a single instance with the given ID.""" rv = self.db.instance_get(context, instance_id) return dict(rv.iteritems()) def get_all(self, context, project_id=None, reservation_id=None, fixed_ip=None): """Get all instances, possibly filtered by one of the given parameters. If there is no filter and the context is an admin, it will retreive all instances in the system.""" if reservation_id is not None: return self.db.instance_get_all_by_reservation(context, reservation_id) if fixed_ip is not None: return self.db.fixed_ip_get_instance(context, fixed_ip) if project_id or not context.is_admin: if not context.project: return self.db.instance_get_all_by_user(context, context.user_id) if project_id is None: project_id = context.project_id return self.db.instance_get_all_by_project(context, project_id) return self.db.instance_get_all(context) def _cast_compute_message(self, method, context, instance_id, host=None, params=None): """Generic handler for RPC casts to compute. :param params: Optional dictionary of arguments to be passed to the compute worker :retval None """ if not params: params = {} if not host: instance = self.get(context, instance_id) host = instance['host'] queue = self.db.queue_get_for(context, FLAGS.compute_topic, host) params['instance_id'] = instance_id kwargs = {'method': method, 'args': params} rpc.cast(context, queue, kwargs) def _call_compute_message(self, method, context, instance_id, host=None, params=None): """Generic handler for RPC calls to compute. :param params: Optional dictionary of arguments to be passed to the compute worker :retval: Result returned by compute worker """ if not params: params = {} if not host: instance = self.get(context, instance_id) host = instance["host"] queue = self.db.queue_get_for(context, FLAGS.compute_topic, host) params['instance_id'] = instance_id kwargs = {'method': method, 'args': params} return rpc.call(context, queue, kwargs) def snapshot(self, context, instance_id, name): """Snapshot the given instance. :retval: A dict containing image metadata """ data = {'name': name, 'is_public': False} image_meta = self.image_service.create(context, data) params = {'image_id': image_meta['id']} self._cast_compute_message('snapshot_instance', context, instance_id, params=params) return image_meta def reboot(self, context, instance_id): """Reboot the given instance.""" self._cast_compute_message('reboot_instance', context, instance_id) def pause(self, context, instance_id): """Pause the given instance.""" self._cast_compute_message('pause_instance', context, instance_id) def unpause(self, context, instance_id): """Unpause the given instance.""" self._cast_compute_message('unpause_instance', context, instance_id) def get_diagnostics(self, context, instance_id): """Retrieve diagnostics for the given instance.""" return self._call_compute_message( "get_diagnostics", context, instance_id) def get_actions(self, context, instance_id): """Retrieve actions for the given instance.""" return self.db.instance_get_actions(context, instance_id) def suspend(self, context, instance_id): """suspend the instance with instance_id""" self._cast_compute_message('suspend_instance', context, instance_id) def resume(self, context, instance_id): """resume the instance with instance_id""" self._cast_compute_message('resume_instance', context, instance_id) def rescue(self, context, instance_id): """Rescue the given instance.""" self._cast_compute_message('rescue_instance', context, instance_id) def unrescue(self, context, instance_id): """Unrescue the given instance.""" self._cast_compute_message('unrescue_instance', context, instance_id) def set_admin_password(self, context, instance_id): """Set the root/admin password for the given instance.""" self._cast_compute_message('set_admin_password', context, instance_id) def get_ajax_console(self, context, instance_id): """Get a url to an AJAX Console""" instance = self.get(context, instance_id) output = self._call_compute_message('get_ajax_console', context, instance_id) rpc.cast(context, '%s' % FLAGS.ajax_console_proxy_topic, {'method': 'authorize_ajax_console', 'args': {'token': output['token'], 'host': output['host'], 'port': output['port']}}) return {'url': '%s?token=%s' % (FLAGS.ajax_console_proxy_url, output['token'])} def get_console_output(self, context, instance_id): """Get console output for an an instance""" return self._call_compute_message('get_console_output', context, instance_id) def lock(self, context, instance_id): """lock the instance with instance_id""" self._cast_compute_message('lock_instance', context, instance_id) def unlock(self, context, instance_id): """unlock the instance with instance_id""" self._cast_compute_message('unlock_instance', context, instance_id) def get_lock(self, context, instance_id): """return the boolean state of (instance with instance_id)'s lock""" instance = self.get(context, instance_id) return instance['locked'] def attach_volume(self, context, instance_id, volume_id, device): if not re.match("^/dev/[a-z]d[a-z]+$", device): raise exception.ApiError(_("Invalid device specified: %s. " "Example device: /dev/vdb") % device) self.volume_api.check_attach(context, volume_id) instance = self.get(context, instance_id) host = instance['host'] rpc.cast(context, self.db.queue_get_for(context, FLAGS.compute_topic, host), {"method": "attach_volume", "args": {"volume_id": volume_id, "instance_id": instance_id, "mountpoint": device}}) def detach_volume(self, context, volume_id): instance = self.db.volume_get_instance(context.elevated(), volume_id) if not instance: raise exception.ApiError(_("Volume isn't attached to anything!")) self.volume_api.check_detach(context, volume_id) host = instance['host'] rpc.cast(context, self.db.queue_get_for(context, FLAGS.compute_topic, host), {"method": "detach_volume", "args": {"instance_id": instance['id'], "volume_id": volume_id}}) return instance def associate_floating_ip(self, context, instance_id, address): instance = self.get(context, instance_id) self.network_api.associate_floating_ip(context, address, instance['fixed_ip'])
Python
# Copyright 2011 Fractus Labs (www.fractus-labs.com) # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handles all processes relating to nodes (managed machines). The :py:class:`AgentManager` class is a :py:class:`fractus.manager.Manager` that handles RPC calls relating to creating instances. It is responsible for node management. **Related Flags** :instances_path: Where instances are kept on disk :compute_driver: Name of class that is used to handle virtualization, loaded by :func:`nova.utils.import_object` :volume_manager: Name of class that handles persistent storage, loaded by :func:`nova.utils.import_object` :node_driver: Name of class that handles os """ import base64 import datetime import random import string import socket import functools from fractus import exception from fractus import flags from fractus import log as logging from fractus import manager from fractus import rpc from fractus import utils from fractus.agent import power_state FLAGS = flags.FLAGS flags.DEFINE_string('node_driver', 'fractus.os.os.get_os', 'Driver to manage os') flags.DEFINE_string('instances_path', '$state_path/instances', 'where instances are stored on disk') flags.DEFINE_string('compute_driver', 'nova.virt.connection.get_connection', 'Driver to use for controlling virtualization') flags.DEFINE_string('stub_network', False, 'Stub network related code') flags.DEFINE_integer('password_length', 12, 'Length of generated admin passwords') flags.DEFINE_string('console_host', socket.gethostname(), 'Console proxy host to use to connect to instances on' 'this host.') LOG = logging.getLogger('fractus.agent.manager') def checks_instance_lock(function): """ decorator used for preventing action against locked instances unless, of course, you happen to be admin """ @functools.wraps(function) def decorated_function(self, context, instance_id, *args, **kwargs): LOG.info(_("check_instance_lock: decorating: |%s|"), function, context=context) LOG.info(_("check_instance_lock: arguments: |%(self)s| |%(context)s|" " |%(instance_id)s|") % locals(), context=context) locked = self.get_lock(context, instance_id) admin = context.is_admin LOG.info(_("check_instance_lock: locked: |%s|"), locked, context=context) LOG.info(_("check_instance_lock: admin: |%s|"), admin, context=context) # if admin or unlocked call function otherwise log error if admin or not locked: LOG.info(_("check_instance_lock: executing: |%s|"), function, context=context) function(self, context, instance_id, *args, **kwargs) else: LOG.error(_("check_instance_lock: not executing |%s|"), function, context=context) return False return decorated_function class NodeManager(manager.Manager): """Manages the running instances from creation to destruction.""" def __init__(self, compute_driver=None, *args, **kwargs): """Load configuration options and connect to the hypervisor.""" # TODO(vish): sync driver creation logic with the rest of the system # and redocument the module docstring if not compute_driver: compute_driver = FLAGS.compute_driver self.driver = utils.import_object(compute_driver) self.network_manager = utils.import_object(FLAGS.network_manager) self.volume_manager = utils.import_object(FLAGS.volume_manager) super(NodeManager, self).__init__(*args, **kwargs) def init_host(self): """Do any initialization that needs to be run if this is a standalone service. """ self.driver.init_host(host=self.host) def _update_state(self, context, instance_id): """Update the state of an instance from the driver info.""" # FIXME(ja): include other fields from state? instance_ref = self.db.instance_get(context, instance_id) try: info = self.driver.get_info(instance_ref['name']) state = info['state'] except exception.NotFound: state = power_state.FAILED self.db.instance_set_state(context, instance_id, state) def get_console_topic(self, context, **kwargs): """Retrieves the console host for a project on this host Currently this is just set in the flags for each compute host.""" #TODO(mdragon): perhaps make this variable by console_type? return self.db.queue_get_for(context, FLAGS.console_topic, FLAGS.console_host) def get_network_topic(self, context, **kwargs): """Retrieves the network host for a project on this host""" # TODO(vish): This method should be memoized. This will make # the call to get_network_host cheaper, so that # it can pas messages instead of checking the db # locally. if FLAGS.stub_network: host = FLAGS.network_host else: host = self.network_manager.get_network_host(context) return self.db.queue_get_for(context, FLAGS.network_topic, host) def get_console_pool_info(self, context, console_type): return self.driver.get_console_pool_info(console_type) @exception.wrap_exception def refresh_security_group_rules(self, context, security_group_id, **kwargs): """This call passes straight through to the virtualization driver.""" return self.driver.refresh_security_group_rules(security_group_id) @exception.wrap_exception def refresh_security_group_members(self, context, security_group_id, **kwargs): """This call passes straight through to the virtualization driver.""" return self.driver.refresh_security_group_members(security_group_id) @exception.wrap_exception def run_instance(self, context, instance_id, **kwargs): """Launch a new instance with specified options.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) instance_ref.onset_files = kwargs.get('onset_files', []) if instance_ref['name'] in self.driver.list_instances(): raise exception.Error(_("Instance has already been created")) LOG.audit(_("instance %s: starting..."), instance_id, context=context) self.db.instance_update(context, instance_id, {'host': self.host}) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'networking') is_vpn = instance_ref['image_id'] == FLAGS.vpn_image_id # NOTE(vish): This could be a cast because we don't do anything # with the address currently, but I'm leaving it as # a call to ensure that network setup completes. We # will eventually also need to save the address here. if not FLAGS.stub_network: address = rpc.call(context, self.get_network_topic(context), {"method": "allocate_fixed_ip", "args": {"instance_id": instance_id, "vpn": is_vpn}}) self.network_manager.setup_compute_network(context, instance_id) # TODO(vish) check to make sure the availability zone matches self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'spawning') try: self.driver.spawn(instance_ref) now = datetime.datetime.utcnow() self.db.instance_update(context, instance_id, {'launched_at': now}) except Exception: # pylint: disable-msg=W0702 LOG.exception(_("instance %s: Failed to spawn"), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.SHUTDOWN) self._update_state(context, instance_id) @exception.wrap_exception @checks_instance_lock def terminate_instance(self, context, instance_id): """Terminate an instance on this machine.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_("Terminating instance %s"), instance_id, context=context) fixed_ip = instance_ref.get('fixed_ip') if not FLAGS.stub_network and fixed_ip: floating_ips = fixed_ip.get('floating_ips') or [] for floating_ip in floating_ips: address = floating_ip['address'] LOG.debug("Disassociating address %s", address, context=context) # NOTE(vish): Right now we don't really care if the ip is # disassociated. We may need to worry about # checking this later. network_topic = self.db.queue_get_for(context, FLAGS.network_topic, floating_ip['host']) rpc.cast(context, network_topic, {"method": "disassociate_floating_ip", "args": {"floating_address": address}}) address = fixed_ip['address'] if address: LOG.debug(_("Deallocating address %s"), address, context=context) # NOTE(vish): Currently, nothing needs to be done on the # network node until release. If this changes, # we will need to cast here. self.network_manager.deallocate_fixed_ip(context.elevated(), address) volumes = instance_ref.get('volumes') or [] for volume in volumes: self.detach_volume(context, instance_id, volume['id']) if instance_ref['state'] == power_state.SHUTOFF: self.db.instance_destroy(context, instance_id) raise exception.Error(_('trying to destroy already destroyed' ' instance: %s') % instance_id) self.driver.destroy(instance_ref) # TODO(ja): should we keep it in a terminated state for a bit? self.db.instance_destroy(context, instance_id) @exception.wrap_exception @checks_instance_lock def reboot_instance(self, context, instance_id): """Reboot an instance on this server.""" context = context.elevated() self._update_state(context, instance_id) instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_("Rebooting instance %s"), instance_id, context=context) if instance_ref['state'] != power_state.RUNNING: state = instance_ref['state'] running = power_state.RUNNING LOG.warn(_('trying to reboot a non-running ' 'instance: %(instance_id)s (state: %(state)s ' 'expected: %(running)s)') % locals(), context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'rebooting') self.network_manager.setup_compute_network(context, instance_id) self.driver.reboot(instance_ref) self._update_state(context, instance_id) @exception.wrap_exception def snapshot_instance(self, context, instance_id, image_id): """Snapshot an instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) #NOTE(sirp): update_state currently only refreshes the state field # if we add is_snapshotting, we will need this refreshed too, # potentially? self._update_state(context, instance_id) LOG.audit(_('instance %s: snapshotting'), instance_id, context=context) if instance_ref['state'] != power_state.RUNNING: state = instance_ref['state'] running = power_state.RUNNING LOG.warn(_('trying to snapshot a non-running ' 'instance: %(instance_id)s (state: %(state)s ' 'expected: %(running)s)') % locals()) self.driver.snapshot(instance_ref, image_id) @exception.wrap_exception @checks_instance_lock def set_admin_password(self, context, instance_id, new_pass=None): """Set the root/admin password for an instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) instance_id = instance_ref['id'] instance_state = instance_ref['state'] expected_state = power_state.RUNNING if instance_state != expected_state: LOG.warn(_('trying to reset the password on a non-running ' 'instance: %(instance_id)s (state: %(instance_state)s ' 'expected: %(expected_state)s)') % locals()) LOG.audit(_('instance %s: setting admin password'), instance_ref['name']) if new_pass is None: # Generate a random password new_pass = utils.generate_password(FLAGS.password_length) self.driver.set_admin_password(instance_ref, new_pass) self._update_state(context, instance_id) @exception.wrap_exception @checks_instance_lock def inject_file(self, context, instance_id, path, file_contents): """Write a file to the specified path on an instance on this server""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) instance_id = instance_ref['id'] instance_state = instance_ref['state'] expected_state = power_state.RUNNING if instance_state != expected_state: LOG.warn(_('trying to inject a file into a non-running ' 'instance: %(instance_id)s (state: %(instance_state)s ' 'expected: %(expected_state)s)') % locals()) # Files/paths *should* be base64-encoded at this point, but # double-check to make sure. b64_path = utils.ensure_b64_encoding(path) b64_contents = utils.ensure_b64_encoding(file_contents) plain_path = base64.b64decode(b64_path) nm = instance_ref['name'] msg = _('instance %(nm)s: injecting file to %(plain_path)s') % locals() LOG.audit(msg) self.driver.inject_file(instance_ref, b64_path, b64_contents) @exception.wrap_exception @checks_instance_lock def rescue_instance(self, context, instance_id): """Rescue an instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: rescuing'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'rescuing') self.network_manager.setup_compute_network(context, instance_id) self.driver.rescue(instance_ref) self._update_state(context, instance_id) @exception.wrap_exception @checks_instance_lock def unrescue_instance(self, context, instance_id): """Rescue an instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: unrescuing'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'unrescuing') self.driver.unrescue(instance_ref) self._update_state(context, instance_id) @staticmethod def _update_state_callback(self, context, instance_id, result): """Update instance state when async task completes.""" self._update_state(context, instance_id) @exception.wrap_exception @checks_instance_lock def pause_instance(self, context, instance_id): """Pause an instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: pausing'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'pausing') self.driver.pause(instance_ref, lambda result: self._update_state_callback(self, context, instance_id, result)) @exception.wrap_exception @checks_instance_lock def unpause_instance(self, context, instance_id): """Unpause a paused instance on this server.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: unpausing'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'unpausing') self.driver.unpause(instance_ref, lambda result: self._update_state_callback(self, context, instance_id, result)) @exception.wrap_exception def get_diagnostics(self, context, instance_id): """Retrieve diagnostics for an instance on this server.""" instance_ref = self.db.instance_get(context, instance_id) if instance_ref["state"] == power_state.RUNNING: LOG.audit(_("instance %s: retrieving diagnostics"), instance_id, context=context) return self.driver.get_diagnostics(instance_ref) @exception.wrap_exception @checks_instance_lock def suspend_instance(self, context, instance_id): """ suspend the instance with instance_id """ context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: suspending'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'suspending') self.driver.suspend(instance_ref, lambda result: self._update_state_callback(self, context, instance_id, result)) @exception.wrap_exception @checks_instance_lock def resume_instance(self, context, instance_id): """ resume the suspended instance with instance_id """ context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_('instance %s: resuming'), instance_id, context=context) self.db.instance_set_state(context, instance_id, power_state.NOSTATE, 'resuming') self.driver.resume(instance_ref, lambda result: self._update_state_callback(self, context, instance_id, result)) @exception.wrap_exception def lock_instance(self, context, instance_id): """ lock the instance with instance_id """ context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.debug(_('instance %s: locking'), instance_id, context=context) self.db.instance_update(context, instance_id, {'locked': True}) @exception.wrap_exception def unlock_instance(self, context, instance_id): """ unlock the instance with instance_id """ context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.debug(_('instance %s: unlocking'), instance_id, context=context) self.db.instance_update(context, instance_id, {'locked': False}) @exception.wrap_exception def get_lock(self, context, instance_id): """ return the boolean state of (instance with instance_id)'s lock """ context = context.elevated() LOG.debug(_('instance %s: getting locked state'), instance_id, context=context) instance_ref = self.db.instance_get(context, instance_id) return instance_ref['locked'] @checks_instance_lock def reset_network(self, context, instance_id): """ Reset networking on the instance. """ context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.debug(_('instance %s: reset network'), instance_id, context=context) self.driver.reset_network(instance_ref) @exception.wrap_exception def get_console_output(self, context, instance_id): """Send the console output for an instance.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_("Get console output for instance %s"), instance_id, context=context) return self.driver.get_console_output(instance_ref) @exception.wrap_exception def get_ajax_console(self, context, instance_id): """Return connection information for an ajax console""" context = context.elevated() LOG.debug(_("instance %s: getting ajax console"), instance_id) instance_ref = self.db.instance_get(context, instance_id) return self.driver.get_ajax_console(instance_ref) @checks_instance_lock def attach_volume(self, context, instance_id, volume_id, mountpoint): """Attach a volume to an instance.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) LOG.audit(_("instance %(instance_id)s: attaching volume %(volume_id)s" " to %(mountpoint)s") % locals(), context=context) dev_path = self.volume_manager.setup_compute_volume(context, volume_id) try: self.driver.attach_volume(instance_ref['name'], dev_path, mountpoint) self.db.volume_attached(context, volume_id, instance_id, mountpoint) except Exception as exc: # pylint: disable-msg=W0702 # NOTE(vish): The inline callback eats the exception info so we # log the traceback here and reraise the same # ecxception below. LOG.exception(_("instance %(instance_id)s: attach failed" " %(mountpoint)s, removing") % locals(), context=context) self.volume_manager.remove_compute_volume(context, volume_id) raise exc return True @exception.wrap_exception @checks_instance_lock def detach_volume(self, context, instance_id, volume_id): """Detach a volume from an instance.""" context = context.elevated() instance_ref = self.db.instance_get(context, instance_id) volume_ref = self.db.volume_get(context, volume_id) mp = volume_ref['mountpoint'] LOG.audit(_("Detach volume %(volume_id)s from mountpoint %(mp)s" " on instance %(instance_id)s") % locals(), context=context) if instance_ref['name'] not in self.driver.list_instances(): LOG.warn(_("Detaching volume from unknown instance %s"), instance_id, context=context) else: self.driver.detach_volume(instance_ref['name'], volume_ref['mountpoint']) self.volume_manager.remove_compute_volume(context, volume_id) self.db.volume_detached(context, volume_id) return True
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from fractus.agent.api import API
Python
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2010 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Utility methods for working with WSGI servers """ import os import sys from xml.dom import minidom import eventlet import eventlet.wsgi eventlet.patcher.monkey_patch(all=False, socket=True, time=True) import routes import routes.middleware import webob import webob.dec import webob.exc from paste import deploy from fractus import flags from fractus import log as logging from fractus import utils FLAGS = flags.FLAGS class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" def __init__(self, logger, level=logging.DEBUG): self.logger = logger self.level = level def write(self, msg): self.logger.log(self.level, msg) class Server(object): """Server class to manage multiple WSGI sockets and applications.""" def __init__(self, threads=1000): logging.basicConfig() self.pool = eventlet.GreenPool(threads) def start(self, application, port, host='0.0.0.0', backlog=128): """Run a WSGI server with the given application.""" arg0 = sys.argv[0] logging.audit(_("Starting %(arg0)s on %(host)s:%(port)s") % locals()) socket = eventlet.listen((host, port), backlog=backlog) self.pool.spawn_n(self._run, application, socket) def wait(self): """Wait until all servers have completed running.""" try: self.pool.waitall() except KeyboardInterrupt: pass def _run(self, application, socket): """Start a WSGI server in a new green thread.""" logger = logging.getLogger('eventlet.wsgi.server') eventlet.wsgi.server(socket, application, custom_pool=self.pool, log=WritableLogger(logger)) class Application(object): """Base WSGI application wrapper. Subclasses need to implement __call__.""" @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config fles. Any local configuration (that is, values under the [app:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [app:wadl] latest_version = 1.3 paste.app_factory = nova.api.fancy_api:Wadl.factory which would result in a call to the `Wadl` class as import nova.api.fancy_api fancy_api.Wadl(latest_version='1.3') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ return cls(**local_config) def __call__(self, environ, start_response): r"""Subclasses will probably want to implement __call__ like this: @webob.dec.wsgify def __call__(self, req): # Any of the following objects work as responses: # Option 1: simple string res = 'message\n' # Option 2: a nicely formatted HTTP exception page res = exc.HTTPForbidden(detail='Nice try') # Option 3: a webob Response object (in case you need to play with # headers, or you want to be treated like an iterable, or or or) res = Response(); res.app_iter = open('somefile') # Option 4: any wsgi app to be run next res = self.application # Option 5: you can get a Response object for a wsgi app, too, to # play with headers etc res = req.get_response(self.application) # You can then just return your response... return res # ... or set req.response and return None. req.response = res See the end of http://pythonpaste.org/webob/modules/dec.html for more info. """ raise NotImplementedError(_("You must implement __call__")) class Middleware(Application): """Base WSGI middleware. These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config fles. Any local configuration (that is, values under the [filter:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [filter:analytics] redis_host = 127.0.0.1 paste.filter_factory = nova.api.analytics:Analytics.factory which would result in a call to the `Analytics` class as import nova.api.analytics analytics.Analytics(app_from_paste, redis_host='127.0.0.1') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ def _factory(app): return cls(app, **local_config) return _factory def __init__(self, application): self.application = application def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) return self.process_response(response) class Debug(Middleware): """Helper class that can be inserted into any WSGI application chain to get information about the request and response.""" @webob.dec.wsgify def __call__(self, req): print ("*" * 40) + " REQUEST ENVIRON" for key, value in req.environ.items(): print key, "=", value print resp = req.get_response(self.application) print ("*" * 40) + " RESPONSE HEADERS" for (key, value) in resp.headers.iteritems(): print key, "=", value print resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """ Iterator that prints the contents of a wrapper string iterator when iterated. """ print ("*" * 40) + " BODY" for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part print class Router(object): """ WSGI middleware that maps incoming requests to WSGI apps. """ def __init__(self, mapper): """ Create a router for the given routes.Mapper. Each route in `mapper` must specify a 'controller', which is a WSGI app to call. You'll probably want to specify an 'action' as well and have your controller be a wsgi.Controller, who will route the request to the action method. Examples: mapper = routes.Mapper() sc = ServerController() # Explicit mapping of one route to a controller+action mapper.connect(None, "/svrlist", controller=sc, action="list") # Actions are all implicitly defined mapper.resource("server", "servers", controller=sc) # Pointing to an arbitrary WSGI app. You can specify the # {path_info:.*} parameter so the target app can be handed just that # section of the URL. mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) """ self.map = mapper self._router = routes.middleware.RoutesMiddleware(self._dispatch, self.map) @webob.dec.wsgify def __call__(self, req): """ Route the incoming request to a controller based on self.map. If no match, return a 404. """ return self._router @staticmethod @webob.dec.wsgify def _dispatch(req): """ Called by self._router after matching the incoming request to a route and putting the information into req.environ. Either returns 404 or the routed WSGI app's response. """ match = req.environ['wsgiorg.routing_args'][1] if not match: return webob.exc.HTTPNotFound() app = match['controller'] return app class Controller(object): """ WSGI app that reads routing information supplied by RoutesMiddleware and calls the requested action method upon itself. All action methods must, in addition to their normal parameters, accept a 'req' argument which is the incoming webob.Request. They raise a webob.exc exception, or return a dict which will be serialized by requested content type. """ @webob.dec.wsgify def __call__(self, req): """ Call the method specified in req.environ by RoutesMiddleware. """ arg_dict = req.environ['wsgiorg.routing_args'][1] action = arg_dict['action'] method = getattr(self, action) del arg_dict['controller'] del arg_dict['action'] arg_dict['req'] = req result = method(**arg_dict) if type(result) is dict: return self._serialize(result, req) else: return result def _serialize(self, data, request): """ Serialize the given dict to the response type requested in request. Uses self._serialization_metadata if it exists, which is a dict mapping MIME types to information needed to serialize to that type. """ _metadata = getattr(type(self), "_serialization_metadata", {}) serializer = Serializer(request.environ, _metadata) return serializer.to_content_type(data) def _deserialize(self, data, request): """ Deserialize the request body to the response type requested in request. Uses self._serialization_metadata if it exists, which is a dict mapping MIME types to information needed to serialize to that type. """ _metadata = getattr(type(self), "_serialization_metadata", {}) serializer = Serializer(request.environ, _metadata) return serializer.deserialize(data) class Serializer(object): """ Serializes and deserializes dictionaries to certain MIME types. """ def __init__(self, environ, metadata=None): """ Create a serializer based on the given WSGI environment. 'metadata' is an optional dict mapping MIME types to information needed to serialize a dictionary to that type. """ self.metadata = metadata or {} req = webob.Request.blank('', environ) suffix = req.path_info.split('.')[-1].lower() if suffix == 'json': self.handler = self._to_json elif suffix == 'xml': self.handler = self._to_xml elif 'application/json' in req.accept: self.handler = self._to_json elif 'application/xml' in req.accept: self.handler = self._to_xml else: # This is the default self.handler = self._to_json def to_content_type(self, data): """ Serialize a dictionary into a string. The format of the string will be decided based on the Content Type requested in self.environ: by Accept: header, or by URL suffix. """ return self.handler(data) def deserialize(self, datastring): """ Deserialize a string to a dictionary. The string must be in the format of a supported MIME type. """ datastring = datastring.strip() try: is_xml = (datastring[0] == '<') if not is_xml: return utils.loads(datastring) return self._from_xml(datastring) except: return None def _from_xml(self, datastring): xmldata = self.metadata.get('application/xml', {}) plurals = set(xmldata.get('plurals', {})) node = minidom.parseString(datastring).childNodes[0] return {node.nodeName: self._from_xml_node(node, plurals)} def _from_xml_node(self, node, listnames): """ Convert a minidom node to a simple Python type. listnames is a collection of names of XML nodes whose subnodes should be considered list items. """ if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: return node.childNodes[0].nodeValue elif node.nodeName in listnames: return [self._from_xml_node(n, listnames) for n in node.childNodes] else: result = dict() for attr in node.attributes.keys(): result[attr] = node.attributes[attr].nodeValue for child in node.childNodes: if child.nodeType != node.TEXT_NODE: result[child.nodeName] = self._from_xml_node(child, listnames) return result def _to_json(self, data): return utils.dumps(data) def _to_xml(self, data): metadata = self.metadata.get('application/xml', {}) # We expect data to contain a single key which is the XML root. root_key = data.keys()[0] doc = minidom.Document() node = self._to_xml_node(doc, metadata, root_key, data[root_key]) return node.toprettyxml(indent=' ') def _to_xml_node(self, doc, metadata, nodename, data): """Recursive method to convert data members to XML nodes.""" result = doc.createElement(nodename) if type(data) is list: singular = metadata.get('plurals', {}).get(nodename, None) if singular is None: if nodename.endswith('s'): singular = nodename[:-1] else: singular = 'item' for item in data: node = self._to_xml_node(doc, metadata, singular, item) result.appendChild(node) elif type(data) is dict: attrs = metadata.get('attributes', {}).get(nodename, {}) for k, v in data.items(): if k in attrs: result.setAttribute(k, str(v)) else: node = self._to_xml_node(doc, metadata, k, v) result.appendChild(node) else: # Type is atom node = doc.createTextNode(str(data)) result.appendChild(node) return result def load_paste_configuration(filename, appname): """Returns a paste configuration dict, or None.""" filename = os.path.abspath(filename) config = None try: config = deploy.appconfig("config:%s" % filename, name=appname) except LookupError: pass return config def load_paste_app(filename, appname): """Builds a wsgi app from a paste config, None if app not configured.""" filename = os.path.abspath(filename) app = None try: app = deploy.loadapp("config:%s" % filename, name=appname) except LookupError: pass return app def paste_config_to_flags(config, mixins): for k, v in mixins.iteritems(): value = config.get(k, v) converted_value = FLAGS[k].parser.Parse(value) setattr(FLAGS, k, converted_value)
Python
#!/usr/bin/env python # Last modified: July 23rd, 2009 """ pydiction.py 1.2 by Ryan Kulla (rkulla AT gmail DOT com). Description: Creates a Vim dictionary of Python module attributes for Vim's completion feature. The created dictionary file is used by the Vim ftplugin "python_pydiction.vim". Usage: pydiction.py <module> ... [-v] Example: The following will append all the "time" and "math" modules' attributes to a file, in the current directory, called "pydiction" with and without the "time." and "math." prefix: $ python pydiction.py time math To print the output just to stdout, instead of appending to the file, supply the -v option: $ python pydiction.py -v time math License: BSD. """ __author__ = "Ryan Kulla (rkulla AT gmail DOT com)" __version__ = "1.2" __copyright__ = "Copyright (c) 2003-2009 Ryan Kulla" import os import sys import types import shutil # Path/filename of the vim dictionary file to write to: PYDICTION_DICT = r'complete-dict' # Path/filename of the vim dictionary backup file: PYDICTION_DICT_BACKUP = r'complete-dict.last' # Sentintal to test if we should only output to stdout: STDOUT_ONLY = False def get_submodules(module_name, submodules): """Build a list of all the submodules of modules.""" # Try to import a given module, so we can dir() it: try: imported_module = my_import(module_name) except ImportError, err: return submodules mod_attrs = dir(imported_module) for mod_attr in mod_attrs: if type(getattr(imported_module, mod_attr)) is types.ModuleType: submodules.append(module_name + '.' + mod_attr) return submodules def write_dictionary(module_name): """Write to module attributes to the vim dictionary file.""" prefix_on = '%s.%s' prefix_on_callable = '%s.%s(' prefix_off = '%s' prefix_off_callable = '%s(' try: imported_module = my_import(module_name) except ImportError, err: return mod_attrs = dir(imported_module) # Generate fully-qualified module names: write_to.write('\n--- import %s ---\n' % module_name) for mod_attr in mod_attrs: if callable(getattr(imported_module, mod_attr)): # If an attribute is callable, show an opening parentheses: format = prefix_on_callable else: format = prefix_on write_to.write(format % (module_name, mod_attr) + '\n') # Generate submodule names by themselves, for when someone does # "from foo import bar" and wants to complete bar.baz. # This works the same no matter how many .'s are in the module. if module_name.count('.'): # Get the "from" part of the module. E.g., 'xml.parsers' # if the module name was 'xml.parsers.expat': first_part = module_name[:module_name.rfind('.')] # Get the "import" part of the module. E.g., 'expat' # if the module name was 'xml.parsers.expat' second_part = module_name[module_name.rfind('.') + 1:] write_to.write('\n--- from %s import %s ---\n' % (first_part, second_part)) for mod_attr in mod_attrs: if callable(getattr(imported_module, mod_attr)): format = prefix_on_callable else: format = prefix_on write_to.write(format % (second_part, mod_attr) + '\n') # Generate non-fully-qualified module names: write_to.write('\n--- from %s import * ---\n' % module_name) for mod_attr in mod_attrs: if callable(getattr(imported_module, mod_attr)): format = prefix_off_callable else: format = prefix_off write_to.write(format % mod_attr + '\n') def my_import(name): """Make __import__ import "package.module" formatted names.""" mod = __import__(name) components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod def remove_duplicates(seq, keep=()): """ Remove duplicates from a sequence while perserving order. The optional tuple argument "keep" can be given to specificy each string you don't want to be removed as a duplicate. """ seq2 = [] seen = set(); for i in seq: if i in (keep): seq2.append(i) continue elif i not in seen: seq2.append(i) seen.add(i) return seq2 def get_yesno(msg="[Y/n]?"): """ Returns True if user inputs 'n', 'Y', "yes", "Yes"... Returns False if user inputs 'n', 'N', "no", "No"... If they enter an invalid option it tells them so and asks again. Hitting Enter is equivalent to answering Yes. Takes an optional message to display, defaults to "[Y/n]?". """ while True: answer = raw_input(msg) if answer == '': return True elif len(answer): answer = answer.lower()[0] if answer == 'y': return True break elif answer == 'n': return False break else: print "Invalid option. Please try again." continue def main(write_to): """Generate a dictionary for Vim of python module attributes.""" submodules = [] for module_name in sys.argv[1:]: try: imported_module = my_import(module_name) except ImportError, err: print "Couldn't import: %s. %s" % (module_name, err) sys.argv.remove(module_name) cli_modules = sys.argv[1:] # Step through each command line argument: for module_name in cli_modules: print "Trying module: %s" % module_name submodules = get_submodules(module_name, submodules) # Step through the current module's submodules: for submodule_name in submodules: submodules = get_submodules(submodule_name, submodules) # Add the top-level modules to the list too: for module_name in cli_modules: submodules.append(module_name) submodules.sort() # Step through all of the modules and submodules to create the dict file: for submodule_name in submodules: write_dictionary(submodule_name) if STDOUT_ONLY: return # Close and Reopen the file for reading and remove all duplicate lines: write_to.close() print "Removing duplicates..." f = open(PYDICTION_DICT, 'r') file_lines = f.readlines() file_lines = remove_duplicates(file_lines, ('\n')) f.close() # Delete the original file: os.unlink(PYDICTION_DICT) # Recreate the file, this time it won't have any duplicates lines: f = open(PYDICTION_DICT, 'w') for attr in file_lines: f.write(attr) f.close() print "Done." if __name__ == '__main__': """Process the command line.""" if sys.version_info[0:2] < (2, 3): sys.exit("You need a Python 2.x version of at least Python 2.3") if len(sys.argv) <= 1: sys.exit("%s requires at least one argument. None given." % sys.argv[0]) if '-v' in sys.argv: write_to = sys.stdout sys.argv.remove('-v') STDOUT_ONLY = True elif os.path.exists(PYDICTION_DICT): # See if any of the given modules have already been pydiction'd: f = open(PYDICTION_DICT, 'r') file_lines = f.readlines() for module_name in sys.argv[1:]: for line in file_lines: if line.find('--- import %s ' % module_name) != -1: print '"%s" already exists in %s. Skipping...' % \ (module_name, PYDICTION_DICT) sys.argv.remove(module_name) break f.close() if len(sys.argv) < 2: # Check if there's still enough command-line arguments: sys.exit("Nothing new to do. Aborting.") if os.path.exists(PYDICTION_DICT_BACKUP): answer = get_yesno('Overwrite existing backup "%s" [Y/n]? ' % \ PYDICTION_DICT_BACKUP) if (answer): print "Backing up old dictionary to: %s" % \ PYDICTION_DICT_BACKUP try: shutil.copyfile(PYDICTION_DICT, PYDICTION_DICT_BACKUP) except IOError, err: print "Couldn't back up %s. %s" % (PYDICTION_DICT, err) else: print "Skipping backup..." print 'Appending to: "%s"' % PYDICTION_DICT else: print "Backing up current %s to %s" % \ (PYDICTION_DICT, PYDICTION_DICT_BACKUP) try: shutil.copyfile(PYDICTION_DICT, PYDICTION_DICT_BACKUP) except IOError, err: print "Couldn't back up %s. %s" % (PYDICTION_DICT, err) else: print 'Creating file: "%s"' % PYDICTION_DICT if not STDOUT_ONLY: write_to = open(PYDICTION_DICT, 'a') main(write_to)
Python
from xbee.zigbee import ZigBee from xbee.base import XBeeBase from xbee.frame import APIFrame from twisted.protocols.basic import LineReceiver class ZigBeeProtocol(LineReceiver, ZigBee): def __init__(self, escaped=True): self._escaped = escaped self.frame = APIFrame(escaped=self._escaped) self.setRawMode() def rawDataReceived(self, data): if data[0] == APIFrame.START_BYTE: self.frame = APIFrame(escaped=self._escaped) for i in range(0, len(data)): self.frame.fill(data[i]) if (not (self.frame.remaining_bytes() > 0)): try: # Try to parse and return result self.frame.parse() return getattr(self, "handle_packet", None)(self._split_response(self.frame.data)) except ValueError: # Bad frame, so restart self.frame = APIFrame(escaped=self._escaped) def _write(self, data): frame = APIFrame(data, self._escaped).output() self.transport.write(frame)
Python
#!/usr/bin/python from twisted.internet import reactor, threads, task from twisted.internet.protocol import Protocol, Factory, defer from twisted.web import static from twisted.web.server import Site from twisted.web.resource import Resource from websocket import WebSocketHandler, WebSocketSite from xbeeService.protocol import ZigBeeProtocol from twisted.internet.serialport import SerialPort from time import strftime import cgi ################################################################################ # Globals and init: ################################################################################ from AutoHomeConf import * #the file with all your settings. TCPClients = [] WebSockClients=[] xbee=[] timer = None delimiter = None timers={} ################################################################################ # Handle XBEE I/O ################################################################################ ################################################################################ # Dispatch addressed commands to zigbee ZB, after making sure frame starts correctly. # eg: data = "2[f1]" will transmit "[f1]" to specific device # eg: data = "2![f1]" will transmit "[f1]" to specific device without ack on zibee layer. # eg: data = "[x]" will broadcast [x] # note there are no acks on zibee layer during broadcast so ">[" is not valid anyway ################################################################################ class ZBHandler(ZigBeeProtocol): def __init__(self, *args, **kwds): super(ZBHandler, self).__init__(*args, **kwds) xbee.append(self) def handle_packet(self, xbeePacketDictionary): response = xbeePacketDictionary if response.get("source_addr_long", "default") in ZB_reverse: if response["id"] == "rx": # Silently respond "OK" to AT calls (when module starts up). if response["rf_data"]=="AT": reactor.callFromThread(self.send, "tx", frame_id="\x01", dest_addr_long=response["source_addr_long"], dest_addr="\xff\xfe", data="OK") else: response = ZB_reverse[response["source_addr_long"]] + " DATA >> " \ + response["rf_data"] print strftime("%Y-%m-%d %H:%M:%S").encode('utf8'), "<<< FROM:",response broadcastToClients(response) elif 'samples' in response: # remove '-' in samples dict, eg: dio-0 > dio0 conforms with javascript scheme. response = str( ZB_reverse[response["source_addr_long"]] + " SAMPLE >> " \ + str(dict((str(key).replace('-',''), str(value)) \ for (key, value) in response["samples"][0].items())) \ ).replace("'",'"') print strftime("%Y-%m-%d %H:%M:%S").encode('utf8'), "<<< FROM:", response broadcastToClients(response) elif response["id"] == 'remote_at_response': response = ZB_reverse[response["source_addr_long"]] + " CMD >> " \ + str(response["command"])\ + " STATUS: " + str(response["status"].encode('hex')) print strftime("%Y-%m-%d %H:%M:%S").encode('utf8'), "<<< FROM:", response broadcastToClients(response) else: print response def dispatchZB(self, data): if len(data) > 2: index=0 frame_id='\x01' dest_addr = ZB["BC"] type=None; print strftime("%Y-%m-%d %H:%M:%S").encode('utf8'), ">>> ", data, # First, make sure frame starts correctly and determin the addressing scheme to use: if data[0] == '[': #No address specified: broadcast. index=0 type="tx"; elif data[0] == '(': #No address specified: broadcast. index=0 type="at"; elif data[0] in ZB: #Valid Address Specified. dest_addr=ZB[data[0]] if data[1:3] == '![': #No ack transmit to specific address. index=2 frame_id='\x00' type="tx"; elif data[1:3] == '!(': #No ack transmit to specific address. index=2 frame_id='\x00' type="at"; elif data[1] == '[': type="tx" elif data[1] == '(': type="at" index=1 else: print "INVALID START OF FRAME" return else: print "INVALID ADDRESS" return # Also, make sure frame ends correctly, only then send, otherwise just return. if (type=="tx" and data[-1] == ']'): reactor.callFromThread(self.send, 'tx', dest_addr_long=dest_addr, dest_addr='\xFF\xFE', frame_id=frame_id, data=data[index:]) print "" elif (type=="at" and data[-1]==')'): parts= data[index+1:-1].split(":") if len(parts) !=3: print "BAD COMMAND" return option = parts[0] command = parts[1] parameter = parts[2] print "[Option = ", option, ", Command = ", command, ", Parameter = ", parameter, "]" reactor.callFromThread(self.send, 'remote_at', frame_id='A', dest_addr_long=dest_addr, dest_addr='\xFF\xFE', options=option.decode('hex'), command=command, parameter=parameter.decode('hex')) else: print "INVALID END OF FRAME" s = SerialPort(ZBHandler(escaped=False), ZB_PORT, reactor, ZB_SPEED) ################################################################################ # Send data to all TCP + Websocket clients. ################################################################################ def broadcastToClients(data, source=None, timestamp=False): if timestamp: data = strftime("%Y-%m-%d %H:%M:%S").encode('utf8') + ": " + data for client in TCPClients: if client != source: client.transport.write(data) for client in WebSockClients: if client != source: client.transport.write(data) """ ################################################################################ # Handle TCP socket connections: # currently disabled. I will first find a good use for it (Android app?) # then work on it, then add ssl. ################################################################################ class TcpSocket(Protocol): def connectionMade(self): self.factory.clients.append(self) def connectionLost(self, reason): self.factory.clients.remove(self) def dataReceived(self, data): dispatchZB(data) class TcpSocketFactory(Factory): protocol = TcpSocket def __init__(self): self.clients = TCPClients reactor.listenTCP(TCP_PORT, TcpSocketFactory()) print "TCP socket listening on port: ", TCP_PORT """ ################################################################################ # Set up web interface. This sets up the form handling section # and the webserver root folder. ################################################################################ class FormPage(Resource): def dispatch(self, data): for x in xbee: x.dispatchZB(data) def render_POST(self, request): if ('pass' in request.args) & ('cmd' in request.args): if cgi.escape(request.args["pass"][0]) == WEBSITE_PASSWORD: print "Authenticated ", data = cgi.escape(request.args["cmd"][0]) #Handle a delayed request. ie, t180*4[l1] will send the command [l1] to device 4 in 2 minutes. if data[0] == 't': delimiter= data.find("*") if 1 <delimiter < data.find("[") -1: if int(data[1:delimiter]): timer = reactor.callLater(int(data[1:delimiter]), self.dispatch, data[delimiter+1:]) else: self.dispatch(data) return '<html><body>Submitted</body></html>' else: print "Wrong password in POST request" return '<html><body>Wrong PWD</body></html>' else: print "No command AND password in post request" return '<html><body>Not Submitted</body></html>' root = static.File(WEBSITE_ROOT) root.putChild("form", FormPage()) factory = Site(root) #reactor.listenTCP(WEBSITE_PORT, factory) #If you choose not to use ssl for https. Update index.html appropriately. from twisted.internet import ssl reactor.listenSSL(WEBSITE_PORT, factory, ssl.DefaultOpenSSLContextFactory(SSL_PRIVKEY, SSL_CERT,)) print "Web server listening on port: ", WEBSITE_PORT ################################################################################ # Run our websocket server which also serves a website, so the WEBSITE_ROOT is just served anyway. # The prob is that WebSocketSite can't handle POST requests, so it can't be the only server. ################################################################################ class WSHandler(WebSocketHandler): def __init__(self, transport): WebSocketHandler.__init__(self, transport) self.authenticated = False; def dispatch(self, data): for x in xbee: x.dispatchZB(data) def frameReceived(self, data): if not self.authenticated: if data==WEBSITE_PASSWORD: self.authenticated=True WebSockClients.append(self) print "Authenticated" else: #Handle a delayed request. ie, t180*4[l1] will send the command [l1] to device 4 in 2 minutes. if data[0] == 't': name='' delimiter1 = data.find("*") command=data[data.find("["):] if delimiter1: time=data[1:delimiter1] delimiter2 = data.rfind("*",0,data.find("[")) if delimiter2: name=data[delimiter1+1:delimiter2] if time=='x': try: timers[name].cancel() timers.pop(name) except: print "Failed to cancel timer " + name else: timers[name]=reactor.callLater(int(time), self.dispatch, command) print timers else: self.dispatch(data) def connectionMade(self): print 'Connected to client..', def connectionLost(self, reason): print 'Lost connection.' if self.authenticated: WebSockClients.remove(self) root = static.File(WEBSITE_ROOT) site = WebSocketSite(root) site.addHandler('/ws', WSHandler) #reactor.listenTCP(WEBSOCKET_PORT, site) #If you choose not to use wss, update index.html appropriately. reactor.listenSSL(WEBSOCKET_PORT, site, ssl.DefaultOpenSSLContextFactory(SSL_PRIVKEY, SSL_CERT,)) print "Web socket listening on port: ", WEBSOCKET_PORT ################################################################################ ################################################################################ if __name__ == '__main__': # Start reactor: reactor.run()
Python
# -*- test-case-name: twisted.web.test.test_websocket -*- # Copyright (c) 2009 Twisted Matrix Laboratories. # See LICENSE for details. """ Note: This is from the associated branch for http://twistedmatrix.com/trac/ticket/4173 and includes support for the hixie-76 handshake. WebSocket server protocol. See U{http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol} for the current version of the specification. @since: 10.1 """ import base64 from hashlib import md5, sha1 import itertools import struct from twisted.internet import interfaces from twisted.python import log from twisted.web._newclient import makeStatefulDispatcher from twisted.web.http import datetimeToString from twisted.web.http import _IdentityTransferDecoder from twisted.web.server import Request, Site, version, unquote from zope.interface import implements _ascii_numbers = frozenset(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']) (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG) = (0x0, 0x1, 0x2, 0x8, 0x9, 0xA) ALL_OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG) CONTROL_OPCODES = (OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG) DATA_OPCODES = (OPCODE_TEXT, OPCODE_BINARY) class WebSocketRequest(Request): """ A general purpose L{Request} supporting connection upgrade for WebSocket. """ ACCEPT_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" def process(self): connection = self.requestHeaders.getRawHeaders("Connection", [None])[0] upgrade = self.requestHeaders.getRawHeaders("Upgrade", [None])[0] if connection != "Upgrade": return Request.process(self) if upgrade not in ("WebSocket", "websocket"): return Request.process(self) return self.processWebSocket() def processWebSocket(self): """ Process a specific web socket request. """ # get site from channel self.site = self.channel.site # set various default headers self.setHeader("server", version) self.setHeader("date", datetimeToString()) # Resource Identification self.prepath = [] self.postpath = map(unquote, self.path[1:].split("/")) self.renderWebSocket() def _clientHandshake76(self): """ Complete hixie-76 handshake, which consists of a challenge and response. If the request is not identified with a proper WebSocket handshake, the connection will be closed. Otherwise, the response to the handshake is sent and a C{WebSocketHandler} is created to handle the request. """ def finish(): self.channel.transport.loseConnection() if self.queued: return finish() secKey1 = self.requestHeaders.getRawHeaders("Sec-WebSocket-Key1", []) secKey2 = self.requestHeaders.getRawHeaders("Sec-WebSocket-Key2", []) if len(secKey1) != 1 or len(secKey2) != 1: return finish() # copied originHeaders = self.requestHeaders.getRawHeaders("Origin", []) if len(originHeaders) != 1: return finish() hostHeaders = self.requestHeaders.getRawHeaders("Host", []) if len(hostHeaders) != 1: return finish() handlerFactory = self.site.handlers.get(self.uri) if not handlerFactory: return finish() # key1 and key2 exist and are a string of characters # filter both keys to get a string with all numbers in order key1 = secKey1[0] key2 = secKey2[0] numBuffer1 = ''.join([x for x in key1 if x in _ascii_numbers]) numBuffer2 = ''.join([x for x in key2 if x in _ascii_numbers]) # make sure numbers actually exist if not numBuffer1 or not numBuffer2: return finish() # these should be int-like num1 = int(numBuffer1) num2 = int(numBuffer2) # count the number of spaces in each character string numSpaces1 = 0 for x in key1: if x == ' ': numSpaces1 += 1 numSpaces2 = 0 for x in key2: if x == ' ': numSpaces2 += 1 # there should be at least one space in each if numSpaces1 == 0 or numSpaces2 == 0: return finish() # get two resulting numbers, as specified in hixie-76 num1 = num1 / numSpaces1 num2 = num2 / numSpaces2 transport = WebSocketTransport(self) handler = handlerFactory(transport) transport._attachHandler(handler) self.channel.setRawMode() def finishHandshake(nonce): """ Receive nonce value from request body, and calculate repsonse. """ protocolHeaders = self.requestHeaders.getRawHeaders( "WebSocket-Protocol", []) if len(protocolHeaders) not in (0, 1): return finish() if protocolHeaders: if protocolHeaders[0] not in self.site.supportedProtocols: return finish() protocolHeader = protocolHeaders[0] else: protocolHeader = None originHeader = originHeaders[0] hostHeader = hostHeaders[0] self.startedWriting = True handshake = [ "HTTP/1.1 101 Web Socket Protocol Handshake", "Upgrade: WebSocket", "Connection: Upgrade"] handshake.append("Sec-WebSocket-Origin: %s" % (originHeader)) if self.isSecure(): scheme = "wss" else: scheme = "ws" handshake.append( "Sec-WebSocket-Location: %s://%s%s" % ( scheme, hostHeader, self.uri)) if protocolHeader is not None: handshake.append("Sec-WebSocket-Protocol: %s" % protocolHeader) for header in handshake: self.write("%s\r\n" % header) self.write("\r\n") # concatenate num1 (32 bit in), num2 (32 bit int), nonce, and take md5 of result res = struct.pack('>II8s', num1, num2, nonce) server_response = md5(res).digest() self.write(server_response) # XXX we probably don't want to set _transferDecoder self.channel._transferDecoder = WebSocketFrameDecoder( self, handler) transport._connectionMade() # we need the nonce from the request body self.channel._transferDecoder = _IdentityTransferDecoder(0, lambda _ : None, finishHandshake) def _checkClientHandshake(self): """ Verify client handshake, closing the connection in case of problem. @return: C{None} if a problem was detected, or a tuple of I{Origin} header, I{Host} header, I{WebSocket-Protocol} header, and C{WebSocketHandler} instance. The I{WebSocket-Protocol} header will be C{None} if not specified by the client. """ def finish(): self.channel.transport.loseConnection() if self.queued: return finish() originHeaders = self.requestHeaders.getRawHeaders("Origin", []) if len(originHeaders) != 1: return finish() hostHeaders = self.requestHeaders.getRawHeaders("Host", []) if len(hostHeaders) != 1: return finish() handlerFactory = self.site.handlers.get(self.uri) if not handlerFactory: return finish() transport = WebSocketTransport(self) handler = handlerFactory(transport) transport._attachHandler(handler) protocolHeaders = self.requestHeaders.getRawHeaders( "WebSocket-Protocol", []) if len(protocolHeaders) not in (0, 1): return finish() if protocolHeaders: if protocolHeaders[0] not in self.site.supportedProtocols: return finish() protocolHeader = protocolHeaders[0] else: protocolHeader = None return originHeaders[0], hostHeaders[0], protocolHeader, handler def _getOneHeader(self, name): headers = self.requestHeaders.getRawHeaders(name) if not headers or len(headers) > 1: return None return headers[0] def _clientHandshakeHybi(self): """ Initial handshake, as defined in hybi-10. If the client is not following the hybi-10 protocol or is requesting a version that's lower than what hybi-10 describes, the connection will be closed. Otherwise the appropriate transport and content decoders will be plugged in and the connection will be estabilished. """ version = self._getOneHeader("Sec-WebSocket-Version") # we only speak version 8 of the protocol if version != "8": print "client connecting to websocket with version:", version, "8 oficially supported. restriction over-rided" #self.setResponseCode(426, "Upgrade Required") #self.setHeader("Sec-WebSocket-Version", "8") #return self.finish() key = self._getOneHeader("Sec-WebSocket-Key") if not key: self.setResponseCode(400, "Bad Request") return self.finish() handlerFactory = self.site.handlers.get(self.uri) if not handlerFactory: self.setResponseCode(404, "Not Found") return self.finish() transport = WebSocketHybiTransport(self) handler = handlerFactory(transport) transport._attachHandler(handler) accept = base64.b64encode(sha1(key + self.ACCEPT_GUID).digest()) self.startedWriting = True handshake = [ "HTTP/1.1 101 Switching Protocols", "Upgrade: websocket", "Connection: Upgrade", "Sec-WebSocket-Accept: %s" % accept] for header in handshake: self.write("%s\r\n" % header) self.write("\r\n") self.channel.setRawMode() self.channel._transferDecoder = WebSocketHybiFrameDecoder( self, handler) handler.transport._connectionMade() def renderWebSocket(self): """ Render a WebSocket request. If the request is not identified with a proper WebSocket handshake, the connection will be closed. Otherwise, the response to the handshake is sent and a C{WebSocketHandler} is created to handle the request. """ # check for hybi handshake requests if self.requestHeaders.hasHeader("Sec-WebSocket-Version"): return self._clientHandshakeHybi() # check for post-75 handshake requests isSecHandshake = self.requestHeaders.getRawHeaders("Sec-WebSocket-Key1", []) if isSecHandshake: self._clientHandshake76() else: check = self._checkClientHandshake() if check is None: return originHeader, hostHeader, protocolHeader, handler = check self.startedWriting = True handshake = [ "HTTP/1.1 101 Web Socket Protocol Handshake", "Upgrade: WebSocket", "Connection: Upgrade"] handshake.append("WebSocket-Origin: %s" % (originHeader)) if self.isSecure(): scheme = "wss" else: scheme = "ws" handshake.append( "WebSocket-Location: %s://%s%s" % ( scheme, hostHeader, self.uri)) if protocolHeader is not None: handshake.append("WebSocket-Protocol: %s" % protocolHeader) for header in handshake: self.write("%s\r\n" % header) self.write("\r\n") self.channel.setRawMode() # XXX we probably don't want to set _transferDecoder self.channel._transferDecoder = WebSocketFrameDecoder( self, handler) handler.transport._connectionMade() return class WebSocketSite(Site): """ @ivar handlers: a C{dict} of names to L{WebSocketHandler} factories. @type handlers: C{dict} @ivar supportedProtocols: a C{list} of supported I{WebSocket-Protocol} values. If a value is passed at handshake and doesn't figure in this list, the connection is closed. @type supportedProtocols: C{list} """ requestFactory = WebSocketRequest def __init__(self, resource, logPath=None, timeout=60*60*12, supportedProtocols=None): Site.__init__(self, resource, logPath, timeout) self.handlers = {} self.supportedProtocols = supportedProtocols or [] def addHandler(self, name, handlerFactory): """ Add or override a handler for the given C{name}. @param name: the resource name to be handled. @type name: C{str} @param handlerFactory: a C{WebSocketHandler} factory. @type handlerFactory: C{callable} """ if not name.startswith("/"): raise ValueError("Invalid resource name.") self.handlers[name] = handlerFactory class WebSocketTransport(object): """ Transport abstraction over WebSocket, providing classic Twisted methods and callbacks. """ implements(interfaces.ITransport) _handler = None def __init__(self, request): self._request = request self._request.notifyFinish().addErrback(self._connectionLost) def _attachHandler(self, handler): """ Attach the given L{WebSocketHandler} to this transport. """ self._handler = handler def _connectionMade(self): """ Called when a connection is made. """ self._handler.connectionMade() def _connectionLost(self, reason): """ Forward connection lost event to the L{WebSocketHandler}. """ self._handler.connectionLost(reason) del self._request.transport del self._request del self._handler def getPeer(self): """ Return a tuple describing the other side of the connection. @rtype: C{tuple} """ return self._request.transport.getPeer() def getHost(self): """ Similar to getPeer, but returns an address describing this side of the connection. @return: An L{IAddress} provider. """ return self._request.transport.getHost() def write(self, frame): """ Send the given frame to the connected client. @param frame: a I{UTF-8} encoded C{str} to send to the client. @type frame: C{str} """ self._request.write("\x00%s\xff" % frame) def writeSequence(self, frames): """ Send a sequence of frames to the connected client. """ self._request.write("".join(["\x00%s\xff" % f for f in frames])) def loseConnection(self): """ Close the connection. """ self._request.transport.loseConnection() class WebSocketHybiTransport(WebSocketTransport): """ A WebSocket transport that speaks the hybi-10 protocol. The L{ITransport} methods are set up to send Text frames containing the payload. To have finer-grained control over the type of frame being sent, the transport provides a L{sendFrame} method. """ def write(self, frame): """ Treat the given frame as a text frame and send it to the client. @param frame: a I{UTF-8} encoded C{str} to send to the client. @type frame: C{str} """ self.sendFrame(OPCODE_TEXT, frame) def writeSequence(self, frames): """ Send a sequence of text frames to the connected client. """ for frame in frames: self.sendFrame(OPCODE_TEXT, frame) def sendFrame(self, opcode, payload, fragmented=False): """ Send a frame with the given opcode and payload to the client. If the L{fragmented} parameter is set, the message frame will contain a flag saying it's part of a fragmented payload, by default data is sent as a self-contained frame. Note that if you use fragmentation support, it is up to you to correctly set the first frame's opcode and then use L{OPCODE_CONT} on the following continuation frames. Payloads sent using this method are never masked. @param opcode: the opcode as defined in hybi-10 @type opcode: C{int} @param payload: the frame's payload @type payload: C{str} @param fragmented: should the frame be marked as part of a fragmented payload @type fragmented: C{bool} """ if opcode not in ALL_OPCODES: raise ValueError("Invalid opcode 0x%X" % opcode) length = len(payload) # there's always the header and at least one length field spec = ">BB" if fragmented: header = 0x00 else: header = 0x80 data = [header | opcode] # there's no masking, so the high bit of the first byte of length is # always 0 if 125 < length <= 65535: # add a 16-bit int to the spec and append 126 value, which means # "interpret the next two bytes" spec += "H" data.append(126) elif length > 65535: # same for even longer frames spec += "Q" data.append(127) data.append(length) header = struct.pack(spec, *data) self._request.write(header + payload) class WebSocketHandler(object): """ Base class for handling WebSocket connections. It mainly provides a transport to send frames, and a callback called when frame are received, C{frameReceived}. @ivar transport: a C{WebSocketTransport} instance. @type: L{WebSocketTransport} """ def __init__(self, transport): """ Create the handler, with the given transport """ self.transport = transport def frameReceived(self, frame): """ Called when a frame is received. @param frame: a I{UTF-8} encoded C{str} sent by the client. @type frame: C{str} """ def binaryFrameReceived(self, data): """ Called when a binary is received via the hybi protocol. @param data: a binary C{str} sent by the client. @type data: C{str} """ def pongReceived(self, data): """ Called when a pong control message is received via the hybi protocol. @param data: the payload sent by the client. @type data: C{str} """ def closeReceived(self, code, msg): """ Called when a close control message is received via the hybi protocol. @param code: the status code of the close message, if present @type code: C{int} or C{None} @param msg: the I{UTF-8} encoded message sent by the client, if present @type msg: C{str} or C{None} """ def frameLengthExceeded(self): """ Called when too big a frame is received. The default behavior is to close the connection, but it can be customized to do something else. """ self.transport.loseConnection() def connectionMade(self): """ Called when a connection is made. """ def connectionLost(self, reason): """ Callback called when the underlying transport has detected that the connection is closed. """ class IncompleteFrame(Exception): """ Not enough data to complete a WebSocket frame. """ class DecodingError(Exception): """ The incoming data is not valid WebSocket protocol data. """ class WebSocketFrameDecoder(object): """ Decode WebSocket frames and pass them to the attached C{WebSocketHandler} instance. @ivar MAX_LENGTH: maximum len of a text frame allowed, before calling C{frameLengthExceeded} on the handler. @type MAX_LENGTH: C{int} @ivar MAX_BINARY_LENGTH: like C{MAX_LENGTH}, but for 0xff type frames @type MAX_BINARY_LENGTH: C{int} @ivar closing: a flag set when the closing handshake has been received @type closing: C{bool} @ivar request: C{Request} instance. @type request: L{twisted.web.server.Request} @ivar handler: L{WebSocketHandler} instance handling the request. @type handler: L{WebSocketHandler} @ivar _data: C{list} of C{str} buffering the received data. @type _data: C{list} of C{str} @ivar _currentFrameLength: length of the current handled frame, plus the additional leading byte. @type _currentFrameLength: C{int} """ MAX_LENGTH = 16384 MAX_BINARY_LENGTH = 2147483648 closing = False def __init__(self, request, handler): self.request = request self.handler = handler self.closing = False self._data = [] self._currentFrameLength = 0 self._state = "FRAME_START" def dataReceived(self, data): """ Parse data to read WebSocket frames. @param data: data received over the WebSocket connection. @type data: C{str} """ if not data or self.closing: return self._data.append(data) while self._data and not self.closing: try: self.consumeData(self._data[-1]) except IncompleteFrame: break except DecodingError: log.err() self.request.transport.loseConnection() break def consumeData(self, data): """ Process the last data chunk received. After processing is done, L{IncompleteFrame} should be raised or L{_addRemainingData} should be called. @param data: last chunk of data received. @type data: C{str} """ consumeData = makeStatefulDispatcher("consumeData", consumeData) def _consumeData_FRAME_START(self, data): self._currentFrameLength = 0 if data[0] == "\x00": self._state = "PARSING_TEXT_FRAME" elif data[0] == "\xff": self._state = "PARSING_LENGTH" else: raise DecodingError("Invalid frame type 0x%s" % data[0].encode("hex")) self._addRemainingData(data[1:]) def _consumeData_PARSING_TEXT_FRAME(self, data): endIndex = data.find("\xff") if endIndex == -1: self._currentFrameLength += len(data) else: self._currentFrameLength += endIndex self._currentFrameLength += endIndex # check length + 1 to account for the initial frame type byte if self._currentFrameLength + 1 > self.MAX_LENGTH: self.handler.frameLengthExceeded() if endIndex == -1: raise IncompleteFrame() frame = "".join(self._data[:-1]) + data[:endIndex] self.handler.frameReceived(frame) remainingData = data[endIndex + 1:] self._addRemainingData(remainingData) self._state = "FRAME_START" def _consumeData_PARSING_LENGTH(self, data): current = 0 available = len(data) while current < available: byte = ord(data[current]) length, more = byte & 0x7F, bool(byte & 0x80) if not length: self._closingHandshake() raise IncompleteFrame() self._currentFrameLength *= 128 self._currentFrameLength += length current += 1 if not more: if self._currentFrameLength > self.MAX_BINARY_LENGTH: self.handler.frameLengthExceeded() remainingData = data[current:] self._addRemainingData(remainingData) self._state = "PARSING_BINARY_FRAME" break else: raise IncompleteFrame() def _consumeData_PARSING_BINARY_FRAME(self, data): available = len(data) if self._currentFrameLength <= available: remainingData = data[self._currentFrameLength:] self._addRemainingData(remainingData) self._state = "FRAME_START" else: self._currentFrameLength -= available self._data[:] = [] def _addRemainingData(self, remainingData): if remainingData: self._data[:] = [remainingData] else: self._data[:] = [] def _closingHandshake(self): self.closing = True # send the closing handshake self.request.transport.write("\xff\x00") # discard all buffered data self._data[:] = [] class WebSocketHybiFrameDecoder(WebSocketFrameDecoder): def __init__(self, request, handler): WebSocketFrameDecoder.__init__(self, request, handler) self._opcode = None self._fragment_opcode = None self._fragments = [] self._state = "HYBI_FRAME_START" def _consumeData_HYBI_FRAME_START(self, data): self._opcode = None byte = ord(data[0]) fin, reserved, opcode = byte & 0x80, byte & 0x70, byte & 0x0F if reserved: raise DecodingError("Reserved bits set: 0x%02X" % byte) if opcode not in ALL_OPCODES: raise DecodingError("Invalid opcode 0x%X" % opcode) if not fin: # part of a fragmented frame if not self._fragment_opcode: # first of the fragmented frames, which determines the opcode if opcode not in DATA_OPCODES: raise DecodingError( "Fragmented frame with invalid opcode 0x%X" % opcode) # save the opcode for later use self._fragment_opcode = opcode else: # already reading a fragmet, and this is a fragmented frame, so # it has to use the continuation opcode if opcode != OPCODE_CONT: raise DecodingError( "Continuation frame with invalid opcode 0x%X" % opcode) else: # self-contained frame or last of the fragmented frames if self._fragment_opcode: # a fragmented frame is pending, so this can only be the end of # it or a control message if opcode not in CONTROL_OPCODES and opcode != OPCODE_CONT: raise DecodingError( "Final frame with invalid opcode 0x%X" % opcode) else: # no fragmented frames pending, so this cannot be a # continuation frame if opcode == OPCODE_CONT: raise DecodingError( "Final frame with invalid opcode 0x%X" % opcode) self._opcode = opcode self._state = "HYBI_PARSING_LENGTH" self._addRemainingData(data[1:]) def _consumeData_HYBI_PARSING_LENGTH(self, data): byte = ord(data[0]) masked, length = byte & 0x80, byte & 0x7F if not masked: raise DecodingError("Unmasked frame received") if length < 126: self._currentFrameLength = length self._state = "HYBI_MASKING_KEY" elif length == 126: self._state = "HYBI_PARSING_LENGTH_2" elif length == 127: self._state = "HYBI_PARSING_LENGTH_3" self._addRemainingData(data[1:]) def _consumeData_HYBI_PARSING_LENGTH_2(self, data): self._parse_length_spec(2, ">H") def _consumeData_HYBI_PARSING_LENGTH_3(self, data): self._parse_length_spec(8, ">Q", 0x7fffffffffffffff) def _parse_length_spec(self, needed, spec, limit=None): # if the accumulated data is not long enough to parse out the length, # keep on accumulating if sum(map(len, self._data)) < needed: raise IncompleteFrame() data = "".join(self._data) self._currentFrameLength = struct.unpack(spec, data[:needed])[0] if limit and self._currentFrameLength > limit: raise DecodingError( "Frame length exceeded: %r" % self._currentFrameLength) self._addRemainingData(data[needed:]) self._state = "HYBI_MASKING_KEY" def _consumeData_HYBI_MASKING_KEY(self, data): if sum(map(len, self._data)) < 4: raise IncompleteFrame() data = "".join(self._data) self._maskingKey = struct.unpack(">4B", data[:4]) self._addRemainingData(data[4:]) if self._currentFrameLength: self._state = "HYBI_PAYLOAD" else: # there will be no payload, notify the handler of an empty frame # and continue self._frameCompleted("", data[4:]) def _consumeData_HYBI_PAYLOAD(self, data): available = len(data) if self._currentFrameLength > available: self._currentFrameLength -= available raise IncompleteFrame() frame = "".join(self._data[:-1]) + data[:self._currentFrameLength] # unmask the frame bufferedPayload = itertools.chain(*self._data[:-1]) restOfPayload = data[:self._currentFrameLength] allData = itertools.chain(bufferedPayload, restOfPayload) key = itertools.cycle(self._maskingKey) def xor(c, k): return chr(ord(c) ^ k) unmasked = itertools.imap(xor, allData, key) frame = "".join(unmasked) remainingData = data[self._currentFrameLength:] self._frameCompleted(frame, remainingData) def _frameCompleted(self, frame, remainingData): # if it's part of a fragmented frame, store the payload if self._opcode is None: self._fragments.append(frame) # if it's the last of the fragmented frames, replace the opcode with # the original one from the fragment and the frame with the accumulated # payload if self._opcode == OPCODE_CONT: self._opcode = self._fragment_opcode self._fragments.append(frame) frame = "".join(self._fragments) self._fragment_opcode = None self._fragments[:] = [] if self._opcode == OPCODE_TEXT: # assume it's valid UTF-8 and let the client handle the rest if len(frame) > self.MAX_LENGTH: self.handler.frameLengthExceeded() self.handler.frameReceived(frame) elif self._opcode == OPCODE_BINARY: if len(frame) > self.MAX_BINARY_LENGTH: self.handler.frameLengthExceeded() self.handler.binaryFrameReceived(frame) elif self._opcode == OPCODE_PING: self.handler.transport.sendFrame(OPCODE_PONG, frame) elif self._opcode == OPCODE_PONG: self.handler.pongReceived(frame) self._state = "HYBI_FRAME_START" self._addRemainingData(remainingData) # if the opcode was CLOSE, initiate connection closing if self._opcode == OPCODE_CLOSE: self._hybiClose(frame) def _hybiClose(self, frame): self.closing = True # try to parse out the status code and message if len(frame) > 1: code = struct.unpack(">H", frame[:2])[0] msg = frame[2:] else: code, msg = None, None # let the handler know self.handler.closeReceived(code, msg) # send the closing handshake self.handler.transport.sendFrame(OPCODE_CLOSE, "") # discard all buffered data and lose connection self._data[:] = [] self.handler.transport.loseConnection() __all__ = ["WebSocketHandler", "WebSocketSite"]
Python
################################################################################ # Set up your parameters: ################################################################################ TCP_PORT = 4321 import hashlib WEBSITE_PASSWORD = hashlib.md5("mypass").hexdigest() WEBSITE_ROOT = "/Users/ruzz/open-zb-home/Site" WEBSITE_PORT = 8880 WEBSOCKET_PORT = 8881 ZB_PORT = '/dev/tty.SLAB_USBtoUART' ZB_SPEED = 57600 #Change with values shown on the back of your XBee Modules. ZB={ "1":'\x00\x13\xA2\x00\x40\x3B\x8F\x4E', "2":'\x00\x13\xA2\x00\x40\x7A\x38\x58', "3":'\x00\x13\xA2\x00\x40\x76\x47\xB4', "4":'\x00\x13\xA2\x00\x40\x76\x47\xB6', "BC":'\x00\x00\x00\x00\x00\x00\xFF\xFF' } #ZB_reverse = dict((ZB[i],i) for i in ZB) ZB_reverse={ ZB["1"]:'M1', ZB["2"]:'AC', ZB["3"]:'M2', ZB["4"]:'ML', ZB["BC"]:'BC' } ############################ # To create key and certificate: # openssl genrsa > privkey.pem # Then: # openssl req -new -x509 -key privkey.pem -out cacert.pem -days 1000 # feel free to comment these out if you dont want to use ssl, also update the main script accordingly. ############################### SSL_PRIVKEY = '/Users/ruzz/open-zb-home/privkey.pem' SSL_CERT = '/Users/ruzz/open-zb-home/cacert.pem'
Python
#!/usr/bin/python from optparse import OptionParser import re import subprocess import sys """ This script generates a release note from the output of git log between the specified tags. Options: --issues Show output the commits with issues associated with them. --issue-numbers Show outputs issue numbers of the commits with issues associated with them Arguments: since -- tag name until -- tag name Example Input: * <commit subject> + <commit message> Bug: issue 123 Change-Id: <change id> Signed-off-by: <name> Expected Output: * issue 123 <commit subject> + <commit message> """ parser = OptionParser(usage='usage: %prog [options] <since> <until>') parser.add_option('-i', '--issues', action='store_true', dest='issues_only', default=False, help='only output the commits with issues association') parser.add_option('-n', '--issue-numbers', action='store_true', dest='issue_numbers_only', default=False, help='only outputs issue numbers of the commits with \ issues association') (options, args) = parser.parse_args() if len(args) != 2: parser.error("wrong number of arguments") issues_only = options.issues_only issue_numbers_only = options.issue_numbers_only since_until = args[0] + '..' + args[1] proc = subprocess.Popen(['git', 'log', '--reverse', '--no-merges', since_until, "--format=* %s%n+%n%b"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT,) stdout_value = proc.communicate()[0] subject = "" message = [] is_issue = False # regex pattern to match following cases such as Bug: 123, Issue Bug: 123, # Bug: GERRIT-123, Bug: issue 123, Bug issue: 123, issue: 123, issue: bug 123 p = re.compile('bug: GERRIT-|bug(:? issue)?:? |issue(:? bug)?:? ', re.IGNORECASE) if issue_numbers_only: for line in stdout_value.splitlines(True): if p.match(line): sys.stdout.write(p.sub('', line)) else: for line in stdout_value.splitlines(True): # Move issue number to subject line if p.match(line): line = p.sub('issue ', line).replace('\n',' ') subject = subject[:2] + line + subject[2:] is_issue = True elif line.startswith('* '): # Write change log for a commit if subject != "": if (not issues_only or is_issue): # Write subject sys.stdout.write(subject) # Write message lines if message != []: # Clear + from last line in commit message message[-1] = '\n' for m in message: sys.stdout.write(m) # Start new commit block message = [] subject = line is_issue = False # Remove commit footers elif re.match(r'((\w+-)+\w+:)', line): continue # Don't add extra blank line if last one is already blank elif line == '\n' and message and message[-1] != '+\n': message.append('+\n') elif line != '\n': message.append(line)
Python
#!/usr/bin/env python import commands import getopt import sys SSH_USER = 'bot' SSH_HOST = 'localhost' SSH_PORT = 29418 SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER, SSH_HOST, SSH_PORT) FAILURE_SCORE = '--code-review=-2' FAILURE_MESSAGE = 'This commit message does not match the standard.' \ + ' Please correct the commit message and upload a replacement patch.' PASS_SCORE = '--code-review=0' PASS_MESSAGE = '' def main(): change = None project = None branch = None commit = None patchset = None try: opts, args = getopt.getopt(sys.argv[1:], '', \ ['change=', 'project=', 'branch=', 'commit=', 'patchset=']) except getopt.GetoptError, err: print 'Error: %s' % (err) usage() sys.exit(-1) for arg, value in opts: if arg == '--change': change = value elif arg == '--project': project = value elif arg == '--branch': branch = value elif arg == '--commit': commit = value elif arg == '--patchset': patchset = value else: print 'Error: option %s not recognized' % (arg) usage() sys.exit(-1) if change == None or project == None or branch == None \ or commit == None or patchset == None: usage() sys.exit(-1) command = 'git cat-file commit %s' % (commit) status, output = commands.getstatusoutput(command) if status != 0: print 'Error running \'%s\'. status: %s, output:\n\n%s' % \ (command, status, output) sys.exit(-1) commitMessage = output[(output.find('\n\n')+2):] commitLines = commitMessage.split('\n') if len(commitLines) > 1 and len(commitLines[1]) != 0: fail(commit, 'Invalid commit summary. The summary must be ' \ + 'one line followed by a blank line.') i = 0 for line in commitLines: i = i + 1 if len(line) > 80: fail(commit, 'Line %d is over 80 characters.' % i) passes(commit) def usage(): print 'Usage:\n' print sys.argv[0] + ' --change <change id> --project <project name> ' \ + '--branch <branch> --commit <sha1> --patchset <patchset id>' def fail( commit, message ): command = SSH_COMMAND + FAILURE_SCORE + ' -m \\\"' \ + _shell_escape( FAILURE_MESSAGE + '\n\n' + message) \ + '\\\" ' + commit commands.getstatusoutput(command) sys.exit(1) def passes( commit ): command = SSH_COMMAND + PASS_SCORE + ' -m \\\"' \ + _shell_escape(PASS_MESSAGE) + ' \\\" ' + commit commands.getstatusoutput(command) def _shell_escape(x): s = '' for c in x: if c in '\n': s = s + '\\\"$\'\\n\'\\\"' else: s = s + c return s if __name__ == '__main__': main()
Python
#!/usr/bin/env python2.6 # Copyright (c) 2010, Code Aurora Forum. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # Neither the name of Code Aurora Forum, Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # This script is designed to detect when a patchset uploaded to Gerrit is # 'identical' (determined via git-patch-id) and reapply reviews onto the new # patchset from the previous patchset. # Get usage and help info by running: ./trivial_rebase.py --help # Documentation is available here: https://www.codeaurora.org/xwiki/bin/QAEP/Gerrit import json from optparse import OptionParser import subprocess from sys import exit class CheckCallError(OSError): """CheckCall() returned non-0.""" def __init__(self, command, cwd, retcode, stdout, stderr=None): OSError.__init__(self, command, cwd, retcode, stdout, stderr) self.command = command self.cwd = cwd self.retcode = retcode self.stdout = stdout self.stderr = stderr def CheckCall(command, cwd=None): """Like subprocess.check_call() but returns stdout. Works on python 2.4 """ try: process = subprocess.Popen(command, cwd=cwd, stdout=subprocess.PIPE) std_out, std_err = process.communicate() except OSError, e: raise CheckCallError(command, cwd, e.errno, None) if process.returncode: raise CheckCallError(command, cwd, process.returncode, std_out, std_err) return std_out, std_err def GsqlQuery(sql_query, server, port): """Runs a gerrit gsql query and returns the result""" gsql_cmd = ['ssh', '-p', port, server, 'gerrit', 'gsql', '--format', 'JSON', '-c', sql_query] try: (gsql_out, gsql_stderr) = CheckCall(gsql_cmd) except CheckCallError, e: print "return code is %s" % e.retcode print "stdout and stderr is\n%s%s" % (e.stdout, e.stderr) raise new_out = gsql_out.replace('}}\n', '}}\nsplit here\n') return new_out.split('split here\n') def FindPrevRev(changeId, patchset, server, port): """Finds the revision of the previous patch set on the change""" sql_query = ("\"SELECT revision FROM patch_sets,changes WHERE " "patch_sets.change_id = changes.change_id AND " "patch_sets.patch_set_id = %s AND " "changes.change_key = \'%s\'\"" % ((patchset - 1), changeId)) revisions = GsqlQuery(sql_query, server, port) json_dict = json.loads(revisions[0], strict=False) return json_dict["columns"]["revision"] def GetApprovals(changeId, patchset, server, port): """Get all the approvals on a specific patch set Returns a list of approval dicts""" sql_query = ("\"SELECT value,account_id,category_id FROM patch_set_approvals " "WHERE patch_set_id = %s AND change_id = (SELECT change_id FROM " "changes WHERE change_key = \'%s\') AND value <> 0\"" % ((patchset - 1), changeId)) gsql_out = GsqlQuery(sql_query, server, port) approvals = [] for json_str in gsql_out: dict = json.loads(json_str, strict=False) if dict["type"] == "row": approvals.append(dict["columns"]) return approvals def GetEmailFromAcctId(account_id, server, port): """Returns the preferred email address associated with the account_id""" sql_query = ("\"SELECT preferred_email FROM accounts WHERE account_id = %s\"" % account_id) email_addr = GsqlQuery(sql_query, server, port) json_dict = json.loads(email_addr[0], strict=False) return json_dict["columns"]["preferred_email"] def GetPatchId(revision): git_show_cmd = ['git', 'show', revision] patch_id_cmd = ['git', 'patch-id'] patch_id_process = subprocess.Popen(patch_id_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) git_show_process = subprocess.Popen(git_show_cmd, stdout=subprocess.PIPE) return patch_id_process.communicate(git_show_process.communicate()[0])[0] def SuExec(server, port, private_key, as_user, cmd): suexec_cmd = ['ssh', '-l', "Gerrit Code Review", '-p', port, server, '-i', private_key, 'suexec', '--as', as_user, '--', cmd] CheckCall(suexec_cmd) def DiffCommitMessages(commit1, commit2): log_cmd1 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"', commit1 + '^!'] commit1_log = CheckCall(log_cmd1) log_cmd2 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"', commit2 + '^!'] commit2_log = CheckCall(log_cmd2) if commit1_log != commit2_log: return True return False def Main(): server = 'localhost' usage = "usage: %prog <required options> [--server-port=PORT]" parser = OptionParser(usage=usage) parser.add_option("--change", dest="changeId", help="Change identifier") parser.add_option("--project", help="Project path in Gerrit") parser.add_option("--commit", help="Git commit-ish for this patchset") parser.add_option("--patchset", type="int", help="The patchset number") parser.add_option("--private-key-path", dest="private_key_path", help="Full path to Gerrit SSH daemon's private host key") parser.add_option("--server-port", dest="port", default='29418', help="Port to connect to Gerrit's SSH daemon " "[default: %default]") (options, args) = parser.parse_args() if not options.changeId: parser.print_help() exit(0) if options.patchset == 1: # Nothing to detect on first patchset exit(0) prev_revision = None prev_revision = FindPrevRev(options.changeId, options.patchset, server, options.port) if not prev_revision: # Couldn't find a previous revision exit(0) prev_patch_id = GetPatchId(prev_revision) cur_patch_id = GetPatchId(options.commit) if cur_patch_id.split()[0] != prev_patch_id.split()[0]: # patch-ids don't match exit(0) # Patch ids match. This is a trivial rebase. # In addition to patch-id we should check if the commit message changed. Most # approvers would want to re-review changes when the commit message changes. changed = DiffCommitMessages(prev_revision, options.commit) if changed: # Insert a comment into the change letting the approvers know only the # commit message changed comment_msg = ("\'--message=New patchset patch-id matches previous patchset" ", but commit message has changed.'") comment_cmd = ['ssh', '-p', options.port, server, 'gerrit', 'approve', '--project', options.project, comment_msg, options.commit] CheckCall(comment_cmd) exit(0) # Need to get all approvals on prior patch set, then suexec them onto # this patchset. approvals = GetApprovals(options.changeId, options.patchset, server, options.port) gerrit_approve_msg = ("\'Automatically re-added by Gerrit trivial rebase " "detection script.\'") for approval in approvals: # Note: Sites with different 'copy_min_score' values in the # approval_categories DB table might want different behavior here. # Additional categories should also be added if desired. if approval["category_id"] == "CRVW": approve_category = '--code-review' elif approval["category_id"] == "VRIF": # Don't re-add verifies #approve_category = '--verified' continue elif approval["category_id"] == "SUBM": # We don't care about previous submit attempts continue else: print "Unsupported category: %s" % approval exit(0) score = approval["value"] gerrit_approve_cmd = ['gerrit', 'approve', '--project', options.project, '--message', gerrit_approve_msg, approve_category, score, options.commit] email_addr = GetEmailFromAcctId(approval["account_id"], server, options.port) SuExec(server, options.port, options.private_key_path, email_addr, ' '.join(gerrit_approve_cmd)) exit(0) if __name__ == "__main__": Main()
Python
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import cgi import os from google.appengine.api import users from google.appengine.ext import webapp from google.appengine.ext.webapp import util from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app from google.appengine.ext import db from google.appengine.ext.db import polymodel from google.appengine.ext.db.metadata import Property class AssetClass(polymodel.PolyModel): display_name = db.StringProperty() description = db.StringProperty(multiline=True) rating = db.RatingProperty() def syncProperty(self, handler, property): if property == 'display_name': self.display_name = handler.request.get(property) elif property == 'description': self.description = handler.request.get(property) elif property == 'rating': self.rating = int(handler.request.get(property)) def makeId(self): strQuery = "SELECT * FROM " + self.kind() query = db.GqlQuery(strQuery) id = 0 if query.count()>0: for item in query: id = max(item.id, id) self.id = id + 1 class Universe(AssetClass): id = db.IntegerProperty() class Cateogry(AssetClass): id = db.IntegerProperty() universe_ref = db.ReferenceProperty() class Asset(db.Model): id = db.IntegerProperty() category_ref = db.ReferenceProperty() class FrameIAM_Universe(webapp.RequestHandler): def post(self): universe = Universe() universe.makeId() universe.syncProperty(self, 'display_name') universe.syncProperty(self, 'description') universe.syncProperty(self, 'rating') universe.put() self.redirect('/') class MainPage(webapp.RequestHandler): def get(self): universe_props = Property.all() universe_props.ancestor(Property.key_for_kind('AssetClass')) universes = Universe.all() universes.order('-id') if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' template_values = { 'universe_props': universe_props, 'universes': universes, 'url': url, 'url_linktext': url_linktext, } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) application = webapp.WSGIApplication( [('/', MainPage), ('/universe', FrameIAM_Universe)], debug=True) def main(): run_wsgi_app(application) if __name__ == "__main__": main()
Python
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import cgi import os from google.appengine.api import users from google.appengine.ext import webapp from google.appengine.ext.webapp import util from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app from google.appengine.ext import db from google.appengine.ext.db import polymodel from google.appengine.ext.db.metadata import Property class AssetClass(polymodel.PolyModel): display_name = db.StringProperty() description = db.StringProperty(multiline=True) rating = db.RatingProperty() def syncProperty(self, handler, property): if property == 'display_name': self.display_name = handler.request.get(property) elif property == 'description': self.description = handler.request.get(property) elif property == 'rating': self.rating = int(handler.request.get(property)) def makeId(self): strQuery = "SELECT * FROM " + self.kind() query = db.GqlQuery(strQuery) id = 0 if query.count()>0: for item in query: id = max(item.id, id) self.id = id + 1 class Universe(AssetClass): id = db.IntegerProperty() class Cateogry(AssetClass): id = db.IntegerProperty() universe_ref = db.ReferenceProperty() class Asset(db.Model): id = db.IntegerProperty() category_ref = db.ReferenceProperty() class FrameIAM_Universe(webapp.RequestHandler): def post(self): universe = Universe() universe.makeId() universe.syncProperty(self, 'display_name') universe.syncProperty(self, 'description') universe.syncProperty(self, 'rating') universe.put() self.redirect('/') class MainPage(webapp.RequestHandler): def get(self): universe_props = Property.all() universe_props.ancestor(Property.key_for_kind('AssetClass')) universes = Universe.all() universes.order('-id') if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' template_values = { 'universe_props': universe_props, 'universes': universes, 'url': url, 'url_linktext': url_linktext, } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) application = webapp.WSGIApplication( [('/', MainPage), ('/universe', FrameIAM_Universe)], debug=True) def main(): run_wsgi_app(application) if __name__ == "__main__": main()
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file demonstrates how to use the Google Data API's Python client library # to interface with the Blogger service. There are examples for the following # operations: # # * Retrieving the list of all the user's blogs # * Retrieving all posts on a single blog # * Performing a date-range query for posts on a blog # * Creating draft posts and publishing posts # * Updating posts # * Retrieving comments # * Creating comments # * Deleting comments # * Deleting posts __author__ = 'lkeppler@google.com (Luke Keppler)' import gdata.blogger.client import gdata.client import gdata.sample_util import gdata.data import atom.data class BloggerExample: def __init__(self): """Creates a GDataService and provides ClientLogin auth details to it. The email and password are required arguments for ClientLogin. The 'source' defined below is an arbitrary string, but should be used to reference your name or the name of your organization, the app name and version, with '-' between each of the three values.""" # Authenticate using ClientLogin, AuthSub, or OAuth. self.client = gdata.blogger.client.BloggerClient() gdata.sample_util.authorize_client( self.client, service='blogger', source='Blogger_Python_Sample-2.0', scopes=['http://www.blogger.com/feeds/']) # Get the blog ID for the first blog. feed = self.client.get_blogs() self.blog_id = feed.entry[0].get_blog_id() def PrintUserBlogTitles(self): """Prints a list of all the user's blogs.""" # Request the feed. feed = self.client.get_blogs() # Print the results. print feed.title.text for entry in feed.entry: print "\t" + entry.title.text print def CreatePost(self, title, content, is_draft): """This method creates a new post on a blog. The new post can be stored as a draft or published based on the value of the is_draft parameter. The method creates an GDataEntry for the new post using the title, content, author_name and is_draft parameters. With is_draft, True saves the post as a draft, while False publishes the post. Then it uses the given GDataService to insert the new post. If the insertion is successful, the added post (GDataEntry) will be returned. """ return self.client.add_post(self.blog_id, title, content, draft=is_draft) def PrintAllPosts(self): """This method displays the titles of all the posts in a blog. First it requests the posts feed for the blogs and then it prints the results. """ # Request the feed. feed = self.client.get_posts(self.blog_id) # Print the results. print feed.title.text for entry in feed.entry: if not entry.title.text: print "\tNo Title" else: print "\t" + entry.title.text.encode('utf-8') print def PrintPostsInDateRange(self, start_time, end_time): """This method displays the title and modification time for any posts that have been created or updated in the period between the start_time and end_time parameters. The method creates the query, submits it to the GDataService, and then displays the results. Note that while the start_time is inclusive, the end_time is exclusive, so specifying an end_time of '2007-07-01' will include those posts up until 2007-6-30 11:59:59PM. The start_time specifies the beginning of the search period (inclusive), while end_time specifies the end of the search period (exclusive). """ # Create query and submit a request. query = gdata.blogger.client.Query(updated_min=start_time, updated_max=end_time, order_by='updated') print query.updated_min print query.order_by feed = self.client.get_posts(self.blog_id, query=query) # Print the results. print feed.title.text + " posts between " + start_time + " and " + end_time print feed.title.text for entry in feed.entry: if not entry.title.text: print "\tNo Title" else: print "\t" + entry.title.text print def UpdatePostTitle(self, entry_to_update, new_title): """This method updates the title of the given post. The GDataEntry object is updated with the new title, then a request is sent to the GDataService. If the insertion is successful, the updated post will be returned. Note that other characteristics of the post can also be modified by updating the values of the entry object before submitting the request. The entry_to_update is a GDatEntry containing the post to update. The new_title is the text to use for the post's new title. Returns: a GDataEntry containing the newly-updated post. """ # Set the new title in the Entry object entry_to_update.title = atom.data.Title(type='xhtml', text=new_title) return self.client.update(entry_to_update) def CreateComment(self, post_id, comment_text): """This method adds a comment to the specified post. First the comment feed's URI is built using the given post ID. Then a GDataEntry is created for the comment and submitted to the GDataService. The post_id is the ID of the post on which to post comments. The comment_text is the text of the comment to store. Returns: an entry containing the newly-created comment NOTE: This functionality is not officially supported yet. """ return self.client.add_comment(self.blog_id, post_id, comment_text) def PrintAllComments(self, post_id): """This method displays all the comments for the given post. First the comment feed's URI is built using the given post ID. Then the method requests the comments feed and displays the results. Takes the post_id of the post on which to view comments. """ feed = self.client.get_post_comments(self.blog_id, post_id) # Display the results print feed.title.text for entry in feed.entry: print "\t" + entry.title.text print "\t" + entry.updated.text print def DeleteComment(self, comment_entry): """This method removes the comment specified by the given edit_link_href, the URI for editing the comment. """ self.client.delete(comment_entry) def DeletePost(self, post_entry): """This method removes the post specified by the given edit_link_href, the URI for editing the post. """ self.client.delete(post_entry) def run(self): """Runs each of the example methods defined above, demonstrating how to interface with the Blogger service. """ # Demonstrate retrieving a list of the user's blogs. self.PrintUserBlogTitles() # Demonstrate how to create a draft post. draft_post = self.CreatePost('Snorkling in Aruba', '<p>We had <b>so</b> much fun snorkling in Aruba<p>', True) print 'Successfully created draft post: "' + draft_post.title.text + '".\n' # Delete the draft blog post. self.client.delete(draft_post) # Demonstrate how to publish a public post. public_post = self.CreatePost("Back from vacation", "<p>I didn't want to leave Aruba, but I ran out of money :(<p>", False) print "Successfully created public post: \"" + public_post.title.text + "\".\n" # Demonstrate various feed queries. print "Now listing all posts." self.PrintAllPosts() print "Now listing all posts between 2007-04-04 and 2007-04-23." self.PrintPostsInDateRange("2007-04-04", "2007-04-23") # Demonstrate updating a post's title. print "Now updating the title of the post we just created:" public_post = self.UpdatePostTitle(public_post, "The party's over") print "Successfully changed the post's title to \"" + public_post.title.text + "\".\n" # Demonstrate how to retrieve the comments for a post. # Get the post ID and build the comments feed URI for the specified post post_id = public_post.get_post_id() print "Now posting a comment on the post titled: \"" + public_post.title.text + "\"." comment = self.CreateComment(post_id, "Did you see any sharks?") print "Successfully posted \"" + comment.content.text + "\" on the post titled: \"" + public_post.title.text + "\".\n" comment_id = comment.GetCommentId() print "Now printing all comments" self.PrintAllComments(post_id) # Delete the comment we just posted print "Now deleting the comment we just posted" self.DeleteComment(comment) print "Successfully deleted comment." self.PrintAllComments(post_id) # Demonstrate deleting posts. print "Now deleting the post titled: \"" + public_post.title.text + "\"." self.DeletePost(public_post) print "Successfully deleted post." self.PrintAllPosts() def main(): """The main function runs the BloggerExample application. NOTE: It is recommended that you run this sample using a test account. """ sample = BloggerExample() sample.run() if __name__ == '__main__': main()
Python
__author__ = 'wiktorgworek@google.com (Wiktor Gworek)' import wsgiref.handlers import atom import os import cgi import gdata.blogger.service from oauth import OAuthDanceHandler, OAuthHandler, requiresOAuth from google.appengine.ext import webapp from google.appengine.ext.webapp import template class MainHandler(OAuthHandler): """Main handler. If user is not logged in via OAuth it will display welcome page. In other case user's blogs on Blogger will be displayed.""" def get(self): try: template_values = {'logged': self.client.has_access_token()} if template_values['logged']: feed = self.client.blogger.GetBlogFeed() blogs = [] for entry in feed.entry: blogs.append({ 'id': entry.GetBlogId(), 'title': entry.title.text, 'link': entry.GetHtmlLink().href, 'published': entry.published.text, 'updated': entry.updated.text }) template_values['blogs'] = blogs except gdata.service.RequestError, error: template_values['logged'] = False path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) class NewPostHandler(OAuthHandler): """Handles AJAX POST request to create a new post on a blog.""" @requiresOAuth def post(self): entry = atom.Entry(content=atom.Content(text=self.request.get('body'))) self.client.blogger.AddPost(entry, blog_id=self.request.get('id')) def main(): application = webapp.WSGIApplication([ (r'/oauth/(.*)', OAuthDanceHandler), ('/new_post', NewPostHandler), ('/', MainHandler), ], debug=True) wsgiref.handlers.CGIHandler().run(application) if __name__ == '__main__': main()
Python
"""Provides OAuth authorization. Main components are: * OAuthClient - provides logic for 3-legged OAuth protocol, * OAuthDanceHandler - wrapper for OAuthClient for handling OAuth requests, * OAuthHandler - from this handler should inherit all other handlers that want to be authenticated and have access to BloggerService. Be sure that you added @requiredOAuth on top of your request method (i.e. post, get). Request tokens are stored in OAuthRequestToken (explicite) and access tokens are stored in TokenCollection (implicit) provided by gdata.alt.appengine. Heavily used resources and ideas from: * http://github.com/tav/tweetapp, * Examples of OAuth from GData Python Client written by Eric Bidelman. """ __author__ = ('wiktorgworek (Wiktor Gworek), ' 'e.bidelman (Eric Bidelman)') import os import gdata.auth import gdata.client import gdata.alt.appengine import gdata.blogger.service from google.appengine.api import users from google.appengine.ext import db from google.appengine.ext import webapp from google.appengine.ext.webapp import template SETTINGS = { 'APP_NAME': 'YOUR_APPLICATION_NAME', 'CONSUMER_KEY': 'YOUR_CONSUMER_KEY', 'CONSUMER_SECRET': 'YOUR_CONSUMER_SECRET', 'SIG_METHOD': gdata.auth.OAuthSignatureMethod.HMAC_SHA1, 'SCOPES': gdata.service.CLIENT_LOGIN_SCOPES['blogger'] } # ------------------------------------------------------------------------------ # Data store models. # ------------------------------------------------------------------------------ class OAuthRequestToken(db.Model): """Stores OAuth request token.""" token_key = db.StringProperty(required=True) token_secret = db.StringProperty(required=True) created = db.DateTimeProperty(auto_now_add=True) # ------------------------------------------------------------------------------ # OAuth client. # ------------------------------------------------------------------------------ class OAuthClient(object): __public__ = ('request_token', 'callback', 'revoke_token') def __init__(self, handler): self.handler = handler self.blogger = gdata.blogger.service.BloggerService( source=SETTINGS['APP_NAME']) self.blogger.SetOAuthInputParameters(SETTINGS['SIG_METHOD'], SETTINGS['CONSUMER_KEY'], consumer_secret=SETTINGS['CONSUMER_SECRET']) gdata.alt.appengine.run_on_appengine(self.blogger) def has_access_token(self): """Checks if there is an access token in token store.""" access_token = self.blogger.token_store.find_token( '%20'.join(SETTINGS['SCOPES'])) return isinstance(access_token, gdata.auth.OAuthToken) def request_token(self): """Fetches a request token and redirects the user to the approval page.""" if users.get_current_user(): # 1.) REQUEST TOKEN STEP. Provide the data scope(s) and the page we'll # be redirected back to after the user grants access on the approval page. req_token = self.blogger.FetchOAuthRequestToken( scopes=SETTINGS['SCOPES'], oauth_callback=self.handler.request.uri.replace( 'request_token', 'callback')) # When using HMAC, persist the token secret in order to re-create an # OAuthToken object coming back from the approval page. db_token = OAuthRequestToken(token_key = req_token.key, token_secret=req_token.secret) db_token.put() # 2.) APPROVAL STEP. Redirect to user to Google's OAuth approval page. self.handler.redirect(self.blogger.GenerateOAuthAuthorizationURL()) def callback(self): """Invoked after we're redirected back from the approval page.""" oauth_token = gdata.auth.OAuthTokenFromUrl(self.handler.request.uri) if oauth_token: # Find request token saved by put() method. db_token = OAuthRequestToken.all().filter( 'token_key =', oauth_token.key).fetch(1)[0] oauth_token.secret = db_token.token_secret oauth_token.oauth_input_params = self.blogger.GetOAuthInputParameters() self.blogger.SetOAuthToken(oauth_token) # 3.) Exchange the authorized request token for an access token oauth_verifier = self.handler.request.get( 'oauth_verifier', default_value='') access_token = self.blogger.UpgradeToOAuthAccessToken( oauth_verifier=oauth_verifier) # Remember the access token in the current user's token store if access_token and users.get_current_user(): self.blogger.token_store.add_token(access_token) elif access_token: self.blogger.current_token = access_token self.blogger.SetOAuthToken(access_token) self.handler.redirect('/') def revoke_token(self): """Revokes the current user's OAuth access token.""" try: self.blogger.RevokeOAuthToken() except gdata.service.RevokingOAuthTokenFailed: pass except gdata.service.NonOAuthToken: pass self.blogger.token_store.remove_all_tokens() self.handler.redirect('/') # ------------------------------------------------------------------------------ # Request handlers. # ------------------------------------------------------------------------------ class OAuthDanceHandler(webapp.RequestHandler): """Handler for the 3 legged OAuth dance. This handler is responsible for fetching an initial OAuth request token, redirecting the user to the approval page. When the user grants access, they will be redirected back to this GET handler and their authorized request token will be exchanged for a long-lived access token.""" def __init__(self): super(OAuthDanceHandler, self).__init__() self.client = OAuthClient(self) def get(self, action=''): if action in self.client.__public__: self.response.out.write(getattr(self.client, action)()) else: self.response.out.write(self.client.request_token()) class OAuthHandler(webapp.RequestHandler): """All handlers requiring OAuth should inherit from this class.""" def __init__(self): super(OAuthHandler, self).__init__() self.client = OAuthClient(self) def requiresOAuth(fun): """Decorator for request handlers to gain authentication via OAuth. Must be used in a handler that inherits from OAuthHandler.""" def decorate(self, *args, **kwargs): if self.client.has_access_token(): try: fun(self, *args, **kwargs) except gdata.service.RequestError, error: if error.code in [401, 403]: self.redirect('/oauth/request_token') else: raise else: self.redirect('/oauth/request_token') return decorate
Python
# Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'j.s@google.com (Jeff Scudder)' import os import wsgiref.handlers from google.appengine.api import users from google.appengine.ext import webapp from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app import gdata.gauth import gdata.data import gdata.blogger.client def get_auth_token(request): """Retrieves the AuthSub token for the current user. Will first check the request URL for a token request parameter indicating that the user has been sent to this page after authorizing the app. Auto-upgrades to a session token. If the token was not in the URL, which will usually be the case, looks for the token in the datastore. Returns: The token object if one was found for the current user. If there is no current user, it returns False, if there is a current user but no AuthSub token, it returns None. """ current_user = users.get_current_user() if current_user is None or current_user.user_id() is None: return False # Look for the token string in the current page's URL. token_string, token_scopes = gdata.gauth.auth_sub_string_from_url( request.url) if token_string is None: # Try to find a previously obtained session token. return gdata.gauth.ae_load('blogger' + current_user.user_id()) # If there was a new token in the current page's URL, convert it to # to a long lived session token and persist it to be used in future # requests. single_use_token = gdata.gauth.AuthSubToken(token_string, token_scopes) # Create a client to make the HTTP request to upgrade the single use token # to a long lived session token. client = gdata.client.GDClient() session_token = client.upgrade_token(single_use_token) gdata.gauth.ae_save(session_token, 'blogger' + current_user.user_id()) return session_token class ListBlogs(webapp.RequestHandler): """Requests the list of the user's blogs from the Blogger API.""" def get(self): template_values = { 'sign_out': users.create_logout_url('/') } # See if we have an auth token for this user. token = get_auth_token(self.request) if token is None: template_values['auth_url'] = gdata.gauth.generate_auth_sub_url( self.request.url, ['http://www.blogger.com/feeds/']) path = os.path.join(os.path.dirname(__file__), 'auth_required.html') self.response.out.write(template.render(path, template_values)) return elif token == False: self.response.out.write( '<html><body><a href="%s">You must sign in first</a>' '</body></html>' % users.create_login_url('/blogs')) return client = gdata.blogger.client.BloggerClient() feed = client.get_blogs(auth_token=token) template_values['feed'] = feed path = os.path.join(os.path.dirname(__file__), 'list_blogs.html') self.response.out.write(template.render(path, template_values)) class WritePost(webapp.RequestHandler): def get(self): template_values = { 'sign_out': users.create_logout_url('/'), 'blog_id': self.request.get('id') } # We should have an auth token for this user. token = get_auth_token(self.request) if not token: self.redirect('/blogs') return path = os.path.join(os.path.dirname(__file__), 'post_editor.html') self.response.out.write(template.render(path, template_values)) def post(self): token = get_auth_token(self.request) if not token: self.redirect('/blogs') return draft = False if self.request.get('draft') == 'true': draft = True client = gdata.blogger.client.BloggerClient() new_post = client.add_post( self.request.get('blog_id'), self.request.get('title'), self.request.get('body'), draft=draft, auth_token=token) if not draft: self.response.out.write( 'See your new post <a href="%s">here</a>.' % ( new_post.find_alternate_link())) else: self.response.out.write( 'This was a draft blog post, visit ' '<a href="http://blogger.com/">blogger.com</a> to publish') def main(): application = webapp.WSGIApplication([('/blogs', ListBlogs), ('/write_post', WritePost)], debug=True) wsgiref.handlers.CGIHandler().run(application) if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file demonstrates how to use the Google Data API's Python client library # to interface with the Blogger service. There are examples for the following # operations: # # * Retrieving the list of all the user's blogs # * Retrieving all posts on a single blog # * Performing a date-range query for posts on a blog # * Creating draft posts and publishing posts # * Updating posts # * Retrieving comments # * Creating comments # * Deleting comments # * Deleting posts __author__ = 'lkeppler@google.com (Luke Keppler)' from gdata import service import gdata import atom import getopt import sys class BloggerExample: def __init__(self, email, password): """Creates a GDataService and provides ClientLogin auth details to it. The email and password are required arguments for ClientLogin. The 'source' defined below is an arbitrary string, but should be used to reference your name or the name of your organization, the app name and version, with '-' between each of the three values.""" # Authenticate using ClientLogin. self.service = service.GDataService(email, password) self.service.source = 'Blogger_Python_Sample-1.0' self.service.service = 'blogger' self.service.server = 'www.blogger.com' self.service.ProgrammaticLogin() # Get the blog ID for the first blog. feed = self.service.Get('/feeds/default/blogs') self_link = feed.entry[0].GetSelfLink() if self_link: self.blog_id = self_link.href.split('/')[-1] def PrintUserBlogTitles(self): """Prints a list of all the user's blogs.""" # Request the feed. query = service.Query() query.feed = '/feeds/default/blogs' feed = self.service.Get(query.ToUri()) # Print the results. print feed.title.text for entry in feed.entry: print "\t" + entry.title.text print def CreatePost(self, title, content, author_name, is_draft): """This method creates a new post on a blog. The new post can be stored as a draft or published based on the value of the is_draft parameter. The method creates an GDataEntry for the new post using the title, content, author_name and is_draft parameters. With is_draft, True saves the post as a draft, while False publishes the post. Then it uses the given GDataService to insert the new post. If the insertion is successful, the added post (GDataEntry) will be returned. """ # Create the entry to insert. entry = gdata.GDataEntry() entry.author.append(atom.Author(atom.Name(text=author_name))) entry.title = atom.Title(title_type='xhtml', text=title) entry.content = atom.Content(content_type='html', text=content) if is_draft: control = atom.Control() control.draft = atom.Draft(text='yes') entry.control = control # Ask the service to insert the new entry. return self.service.Post(entry, '/feeds/' + self.blog_id + '/posts/default') def PrintAllPosts(self): """This method displays the titles of all the posts in a blog. First it requests the posts feed for the blogs and then it prints the results. """ # Request the feed. feed = self.service.GetFeed('/feeds/' + self.blog_id + '/posts/default') # Print the results. print feed.title.text for entry in feed.entry: if not entry.title.text: print "\tNo Title" else: print "\t" + entry.title.text print def PrintPostsInDateRange(self, start_time, end_time): """This method displays the title and modification time for any posts that have been created or updated in the period between the start_time and end_time parameters. The method creates the query, submits it to the GDataService, and then displays the results. Note that while the start_time is inclusive, the end_time is exclusive, so specifying an end_time of '2007-07-01' will include those posts up until 2007-6-30 11:59:59PM. The start_time specifies the beginning of the search period (inclusive), while end_time specifies the end of the search period (exclusive). """ # Create query and submit a request. query = service.Query() query.feed = '/feeds/' + self.blog_id + '/posts/default' query.updated_min = start_time query.updated_max = end_time query.orderby = 'updated' feed = self.service.Get(query.ToUri()) # Print the results. print feed.title.text + " posts between " + start_time + " and " + end_time print feed.title.text for entry in feed.entry: if not entry.title.text: print "\tNo Title" else: print "\t" + entry.title.text print def UpdatePostTitle(self, entry_to_update, new_title): """This method updates the title of the given post. The GDataEntry object is updated with the new title, then a request is sent to the GDataService. If the insertion is successful, the updated post will be returned. Note that other characteristics of the post can also be modified by updating the values of the entry object before submitting the request. The entry_to_update is a GDatEntry containing the post to update. The new_title is the text to use for the post's new title. Returns: a GDataEntry containing the newly-updated post. """ # Set the new title in the Entry object entry_to_update.title = atom.Title('xhtml', new_title) # Grab the edit URI edit_uri = entry_to_update.GetEditLink().href return self.service.Put(entry_to_update, edit_uri) def CreateComment(self, post_id, comment_text): """This method adds a comment to the specified post. First the comment feed's URI is built using the given post ID. Then a GDataEntry is created for the comment and submitted to the GDataService. The post_id is the ID of the post on which to post comments. The comment_text is the text of the comment to store. Returns: an entry containing the newly-created comment NOTE: This functionality is not officially supported yet. """ # Build the comment feed URI feed_uri = '/feeds/' + self.blog_id + '/' + post_id + '/comments/default' # Create a new entry for the comment and submit it to the GDataService entry = gdata.GDataEntry() entry.content = atom.Content(content_type='xhtml', text=comment_text) return self.service.Post(entry, feed_uri) def PrintAllComments(self, post_id): """This method displays all the comments for the given post. First the comment feed's URI is built using the given post ID. Then the method requests the comments feed and displays the results. Takes the post_id of the post on which to view comments. """ # Build comment feed URI and request comments on the specified post feed_url = '/feeds/' + self.blog_id + '/comments/default' feed = self.service.Get(feed_url) # Display the results print feed.title.text for entry in feed.entry: print "\t" + entry.title.text print "\t" + entry.updated.text print def DeleteComment(self, post_id, comment_id): """This method removes the comment specified by the given edit_link_href, the URI for editing the comment. """ feed_uri = '/feeds/' + self.blog_id + '/' + post_id + '/comments/default/' + comment_id self.service.Delete(feed_uri) def DeletePost(self, edit_link_href): """This method removes the post specified by the given edit_link_href, the URI for editing the post. """ self.service.Delete(edit_link_href) def run(self): """Runs each of the example methods defined above, demonstrating how to interface with the Blogger service. """ # Demonstrate retrieving a list of the user's blogs. self.PrintUserBlogTitles() # Demonstrate how to create a draft post. draft_post = self.CreatePost("Snorkling in Aruba", "<p>We had <b>so</b> much fun snorkling in Aruba<p>", "Post author", True) print "Successfully created draft post: \"" + draft_post.title.text + "\".\n" # Demonstrate how to publish a public post. public_post = self.CreatePost("Back from vacation", "<p>I didn't want to leave Aruba, but I ran out of money :(<p>", "Post author", False) print "Successfully created public post: \"" + public_post.title.text + "\".\n" # Demonstrate various feed queries. print "Now listing all posts." self.PrintAllPosts() print "Now listing all posts between 2007-04-04 and 2007-04-23." self.PrintPostsInDateRange("2007-04-04", "2007-04-23") # Demonstrate updating a post's title. print "Now updating the title of the post we just created:" public_post = self.UpdatePostTitle(public_post, "The party's over") print "Successfully changed the post's title to \"" + public_post.title.text + "\".\n" # Demonstrate how to retrieve the comments for a post. # Get the post ID and build the comments feed URI for the specified post self_id = public_post.id.text tokens = self_id.split("-") post_id = tokens[-1] print "Now posting a comment on the post titled: \"" + public_post.title.text + "\"." comment = self.CreateComment(post_id, "Did you see any sharks?") print "Successfully posted \"" + comment.content.text + "\" on the post titled: \"" + public_post.title.text + "\".\n" comment_id = comment.GetEditLink().href.split("/")[-1] print "Now printing all comments" self.PrintAllComments(post_id) # Delete the comment we just posted print "Now deleting the comment we just posted" self.DeleteComment(post_id, comment_id) print "Successfully deleted comment." self.PrintAllComments(post_id) # Get the post's edit URI edit_uri = public_post.GetEditLink().href # Demonstrate deleting posts. print "Now deleting the post titled: \"" + public_post.title.text + "\"." self.DeletePost(edit_uri) print "Successfully deleted post." self.PrintAllPosts() def main(): """The main function runs the BloggerExample application with the provided username and password values. Authentication credentials are required. NOTE: It is recommended that you run this sample using a test account.""" # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "", ["email=", "password="]) except getopt.error, msg: print ('python BloggerExample.py --email [email] --password [password] ') sys.exit(2) email = '' password = '' # Process options for o, a in opts: if o == "--email": email = a elif o == "--password": password = a if email == '' or password == '': print ('python BloggerExample.py --email [email] --password [password]') sys.exit(2) sample = BloggerExample(email, password) sample.run() if __name__ == '__main__': main()
Python
#!/usr/bin/python2.4 # # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample app for Google Apps Provisioning API using OAuth. ProvisioningOAuthSample: Demonstrates the use of the Provisioning API with OAuth in a Google App Engine app. """ __author__ = 'pti@google.com (Prashant Tiwari)' import json import os import atom.http_interface import gdata.apps.service import gdata.auth from django.utils import simplejson from google.appengine.ext import webapp from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app from gdata.apps.service import AppsForYourDomainException INIT = { 'APP_NAME': 'googlecode-provisioningtester-v1', 'SCOPES': ['https://apps-apis.google.com/a/feeds'] } secret = None class MainPage(webapp.RequestHandler): """Defines the entry point for the App Engine app""" def get(self): global service try: service except: self.redirect('/login') return oauth_token = None json = None if self.request.get('two_legged_oauth'): two_legged_oauth = True app_title = 'Provisioning API Sample (2-legged OAuth)' start_over_text = 'Start over' else: two_legged_oauth = False app_title = 'Provisioning API Sample (3-legged OAuth)' start_over_text = 'Revoke token' try: if service: oauth_token = service.token_store.find_token('%20'.join(INIT['SCOPES'])) if isinstance(oauth_token, gdata.auth.OAuthToken) or isinstance( oauth_token, atom.http_interface.GenericToken): user_feed = service.RetrieveAllUsers() json = get_json_from_feed(user_feed) else: self.redirect('/login') return except AppsForYourDomainException, e: # Usually a Forbidden (403) when signed-in user isn't the admin. self.response.out.write(e.args[0].get('body')) else: template_values = { 'oauth_token': oauth_token, 'json': json, 'two_legged_oauth': two_legged_oauth, 'start_over_text': start_over_text, 'app_title': app_title } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) def get_json_from_feed(user_feed): """Constructs and returns a JSON object from the given feed object Args: user_feed: A gdata.apps.UserFeed object Returns: A JSON object containing the first and last names of the domain users """ json = [] for entry in user_feed.entry: json.append({'given_name': entry.name.given_name, 'family_name': entry.name.family_name, 'username': entry.login.user_name, 'admin': entry.login.admin }) return simplejson.dumps(json) class DoLogin(webapp.RequestHandler): """Brings up the app's login page""" def get(self): path = os.path.join(os.path.dirname(__file__), 'login.html') self.response.out.write(template.render(path, None)) class DoAuth(webapp.RequestHandler): """Handles the entire OAuth flow for the app""" def post(self): global service global secret # Get instance of the AppsService for the given consumer_key (domain) service = gdata.apps.service.AppsService(source=INIT['APP_NAME'], domain=self.request.get('key')) two_legged_oauth = False if self.request.get('oauth') == 'two_legged_oauth': two_legged_oauth = True service.SetOAuthInputParameters( signature_method=gdata.auth.OAuthSignatureMethod.HMAC_SHA1, consumer_key=self.request.get('key'), consumer_secret=self.request.get('secret'), two_legged_oauth=two_legged_oauth) if two_legged_oauth: # Redirect to MainPage if 2-legged OAuth is requested self.redirect('/?two_legged_oauth=true') return request_token = service.FetchOAuthRequestToken( scopes=INIT['SCOPES'], oauth_callback=self.request.uri) secret = request_token.secret service.SetOAuthToken(request_token) # Send user to Google authorization page google_auth_page_url = service.GenerateOAuthAuthorizationURL() self.redirect(google_auth_page_url) def get(self): global service global secret # Extract the OAuth request token from the URL oauth_token = gdata.auth.OAuthTokenFromUrl(self.request.uri) if oauth_token: oauth_token.secret = secret oauth_token.oauth_input_params = service.GetOAuthInputParameters() service.SetOAuthToken(oauth_token) # Exchange the request token for an access token oauth_verifier = self.request.get('oauth_verifier', default_value='') access_token = service.UpgradeToOAuthAccessToken( oauth_verifier=oauth_verifier) # Store access_token to the service token_store for later access if access_token: service.current_token = access_token service.SetOAuthToken(access_token) self.redirect('/') class DoStartOver(webapp.RequestHandler): """Revokes the OAuth token if needed and starts over""" def get(self): global service two_legged_oauth = self.request.get('two_legged_oauth') # Revoke the token for 3-legged OAuth if two_legged_oauth != 'True': try: service.RevokeOAuthToken() except gdata.service.RevokingOAuthTokenFailed: pass except gdata.service.NonOAuthToken: pass finally: service.token_store.remove_all_tokens() service = None self.redirect('/') application = webapp.WSGIApplication([('/', MainPage), ('/do_auth', DoAuth), ('/start_over', DoStartOver), ('/login', DoLogin)], debug=True) def main(): run_wsgi_app(application) if __name__ == "__main__": main()
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains a Sample for Google Apps Admin Settings. AdminSettingsSample: shows everything you ever wanted to know about your Google Apps Domain but were afraid to ask. """ __author__ = 'jlee@pbu.edu' import getopt import getpass import sys import time import gdata.apps.service import gdata.apps.adminsettings.service class AdminSettingsSample(object): """AdminSettingsSample object demos Admin Settings API.""" def __init__(self, email, password, domain): """Constructor for the AdminSettingsSample object. Takes an email and password corresponding to a google apps admin account to demon the Admin Settings API. Args: email: [string] The e-mail address of the account to use for the sample. password: [string] The password corresponding to the account specified by the email parameter. domain: [string] The domain for the Profiles feed """ self.gd_client = gdata.apps.adminsettings.service.AdminSettingsService() self.gd_client.domain = domain self.gd_client.email = email self.gd_client.password = password self.gd_client.source = 'GoogleInc-AdminSettingsPythonSample-1' self.gd_client.ProgrammaticLogin() def Run(self): #pause 1 sec inbetween calls to prevent quota warning print 'Google Apps Domain: ', self.gd_client.domain time.sleep(1) print 'Default Language: ', self.gd_client.GetDefaultLanguage() time.sleep(1) print 'Organization Name: ', self.gd_client.GetOrganizationName() time.sleep(1) print 'Maximum Users: ', self.gd_client.GetMaximumNumberOfUsers() time.sleep(1) print 'Current Users: ', self.gd_client.GetCurrentNumberOfUsers() time.sleep(1) print 'Domain is Verified: ',self.gd_client.IsDomainVerified() time.sleep(1) print 'Support PIN: ',self.gd_client.GetSupportPIN() time.sleep(1) print 'Domain Edition: ', self.gd_client.GetEdition() time.sleep(1) print 'Customer PIN: ', self.gd_client.GetCustomerPIN() time.sleep(1) print 'Domain Creation Time: ', self.gd_client.GetCreationTime() time.sleep(1) print 'Domain Country Code: ', self.gd_client.GetCountryCode() time.sleep(1) print 'Admin Secondary Email: ', self.gd_client.GetAdminSecondaryEmail() time.sleep(1) cnameverificationstatus = self.gd_client.GetCNAMEVerificationStatus() print 'CNAME Verification Record Name: ', cnameverificationstatus['recordName'] print 'CNAME Verification Verified: ', cnameverificationstatus['verified'] print 'CNAME Verification Method: ', cnameverificationstatus['verificationMethod'] time.sleep(1) mxverificationstatus = self.gd_client.GetMXVerificationStatus() print 'MX Verification Verified: ', mxverificationstatus['verified'] print 'MX Verification Method: ', mxverificationstatus['verificationMethod'] time.sleep(1) ssosettings = self.gd_client.GetSSOSettings() print 'SSO Enabled: ', ssosettings['enableSSO'] print 'SSO Signon Page: ', ssosettings['samlSignonUri'] print 'SSO Logout Page: ', ssosettings['samlLogoutUri'] print 'SSO Password Page: ', ssosettings['changePasswordUri'] print 'SSO Whitelist IPs: ', ssosettings['ssoWhitelist'] print 'SSO Use Domain Specific Issuer: ', ssosettings['useDomainSpecificIssuer'] time.sleep(1) ssokey = self.gd_client.GetSSOKey() print 'SSO Key Modulus: ', ssokey['modulus'] print 'SSO Key Exponent: ', ssokey['exponent'] print 'SSO Key Algorithm: ', ssokey['algorithm'] print 'SSO Key Format: ', ssokey['format'] print 'User Migration Enabled: ', self.gd_client.IsUserMigrationEnabled() time.sleep(1) outboundgatewaysettings = self.gd_client.GetOutboundGatewaySettings() print 'Outbound Gateway Smart Host: ', outboundgatewaysettings['smartHost'] print 'Outbound Gateway Mode: ', outboundgatewaysettings['smtpMode'] def main(): """Demonstrates use of the Admin Settings API using the AdminSettingsSample object.""" # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=', 'domain=']) except getopt.error, msg: print 'python adminsettings_example.py --user [username] --pw [password]' print ' --domain [domain]' sys.exit(2) user = '' pw = '' domain = '' # Process options for option, arg in opts: if option == '--user': user = arg elif option == '--pw': pw = arg elif option == '--domain': domain = arg while not domain: print 'NOTE: Please run these tests only with a test account.' domain = raw_input('Please enter your apps domain: ') while not user: user = raw_input('Please enter a administrator account: ')+'@'+domain while not pw: pw = getpass.getpass('Please enter password: ') if not pw: print 'Password cannot be blank.' try: sample = AdminSettingsSample(user, pw, domain) except gdata.service.BadAuthentication: print 'Invalid user credentials given.' return sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A sample app for Google Apps Email Settings features. EmailSettingsSample: demonstrates getting and setting/updating email settings """ __author__ = 'Prashant Tiwari <pti@google.com>' from optparse import OptionParser from gdata.apps.emailsettings.client import EmailSettingsClient #defaults for sendAs alias settings SEND_AS_NAME = 'test-alias' #update SEND_AS_ADDRESS to a valid account on your domain SEND_AS_ADDRESS = 'johndoe@domain.com' SEND_AS_REPLY_TO = 'replyto@example.com' SEND_AS_MAKE_DEFAULT = False #defaults for label settings LABEL_NAME = 'label' #defaults for forwarding settings #update FORWARD_TO to a valid account on your domain FORWARD_TO = 'account@domain.com' FORWARDING_ACTION = 'ARCHIVE' #defaults for pop settings POP_ENABLE_FOR = 'MAIL_FROM_NOW_ON' POP_ACTION = 'ARCHIVE' #defaults for signature settings SIGNATURE = "<Insert witty signature here>" #defaults for vacation settings VACATION_SUBJECT = "On vacation" VACATION_MESSAGE = "I'm on vacation, will respond when I return." VACATION_CONTACTS_ONLY = True #defaults for filter settings FILTER_FROM = 'me@domain.com' FILTER_TO = 'you@domain.com' FILTER_SUBJECT = 'subject' FILTER_HAS_THE_WORD = 'has' FILTER_DOES_NOT_HAVE_THE_WORD = 'no' FILTER_HAS_ATTACHMENT = True FILTER_SHOULD_MARK_AS_READ = True FILTER_SHOULD_ARCHIVE = True FILTER_LABEL = 'label' #defaults for general settings GENERAL_PAGE_SIZE = '50' GENERAL_ENABLE_SHORTCUTS = True GENERAL_ENABLE_ARROWS = True GENERAL_ENABLE_SNIPPETS = True GENERAL_ENABLE_UNICODE = True #defaults for language settings LANGUAGE = 'en-US' parser = None options = None class EmailSettingsSample(object): """EmailsSettingsSample object demos the Email Settings API.""" def __init__(self, domain, email, password, app): """Constructor for the EmailSettingsSample object. Takes an email, password and an app id corresponding to a google apps admin account to demo the Email Settings API. Args: domain: [string] The domain name (e.g. domain.com) email: [string] The e-mail address of a domain admin account. password: [string] The domain admin's password. app: [string] The app name of the form companyName-applicationName-versionID """ self.client = EmailSettingsClient(domain=domain) self.client.ClientLogin(email=email, password=password, source=app) def run(self, username, setting, method, args): """Method that invokes the EmailSettingsClient services Args: username: [string] The name of the account for whom to get/set settings setting: [string] The email setting to be got/set/updated method: [string] Specifies the get or set method """ if setting == 'label': if method == 'get': print "getting labels for %s...\n" % (username) print self.client.RetrieveLabels(username=username) elif method == 'set': print "creating label for %s...\n" % (username) print self.client.CreateLabel(username=username, name=LABEL_NAME) else: print "deleting labels isn't supported" elif setting == 'forwarding': if method == 'get': print "getting forwarding for %s...\n" % (username) print self.client.RetrieveForwarding(username) elif method == 'set': print "updating forwarding settings for %s...\n" % (username) print self.client.UpdateForwarding(username=username, enable=not(options.disable), forward_to=FORWARD_TO, action=FORWARDING_ACTION) else: print "deleting forwarding settings isn't supported" elif setting == 'sendas': if method == 'get': print "getting sendAs alias for %s...\n" % (username) print self.client.RetrieveSendAs(username=username) elif method == 'set': print "creating sendAs alias for %s...\n" % (username) print self.client.CreateSendAs(username=username, name=SEND_AS_NAME, address=SEND_AS_ADDRESS, reply_to=SEND_AS_REPLY_TO, make_default=SEND_AS_MAKE_DEFAULT) else: print "deleting send-as settings isn't supported" elif setting == 'pop': if method == 'get': print "getting pop settings for %s...\n" % (username) print self.client.RetrievePop(username=username) elif method == 'set': print "updating pop settings for %s...\n" % (username) print self.client.UpdatePop(username=username, enable=not(options.disable), enable_for=POP_ENABLE_FOR, action=POP_ACTION) else: print "deleting pop settings isn't supported" elif setting == 'signature': if method == 'get': print "getting signature for %s...\n" % (username) print self.client.RetrieveSignature(username=username) elif method == 'set': print "updating signature for %s...\n" % (username) print self.client.UpdateSignature(username=username, signature=SIGNATURE) else: print "deleting signature settings isn't supported" elif setting == 'vacation': if method == 'get': print "getting vacation settings for %s...\n" % (username) print self.client.RetrieveVacation(username=username) elif method == 'set': print "updating vacation settings for %s...\n" % (username) print self.client.UpdateVacation(username=username, enable=not(options.disable), subject=VACATION_SUBJECT, message=VACATION_MESSAGE, contacts_only=VACATION_CONTACTS_ONLY) else: print "deleting vacation settings isn't supported" elif setting == 'imap': if method == 'get': print "getting imap settings for %s...\n" % (username) print self.client.RetrieveImap(username) elif setting == 'set': print "updating imap settings for %s...\n" % (username) print self.client.UpdateImap(username=username, enable=not(options.disable)) else: print "deleting imap settings isn't supported" elif setting == 'filter': if method == 'get': print "getting email filters is not yet possible\n" parser.print_help() elif method == 'set': print "creating an email filter for %s...\n" % (username) print self.client.CreateFilter(username=username, from_address=FILTER_FROM, to_address=FILTER_TO, subject=FILTER_SUBJECT, has_the_word=FILTER_HAS_THE_WORD, does_not_have_the_word= FILTER_DOES_NOT_HAVE_THE_WORD, has_attachments=FILTER_HAS_ATTACHMENT, label=FILTER_LABEL, mark_as_read=FILTER_SHOULD_MARK_AS_READ, archive=FILTER_SHOULD_ARCHIVE) else: print "deleting filters isn't supported" elif setting == 'general': if method == 'get': print "getting general email settings is not yet possible\n" parser.print_help() elif method == 'set': print "updating general settings for %s...\n" % (username) print self.client.UpdateGeneralSettings(username=username, page_size=GENERAL_PAGE_SIZE, shortcuts= GENERAL_ENABLE_SHORTCUTS, arrows= GENERAL_ENABLE_ARROWS, snippets= GENERAL_ENABLE_SNIPPETS, use_unicode= GENERAL_ENABLE_UNICODE) else: print "deleting general settings isn't supported" elif setting == 'language': if method == 'get': print "getting language settings is not yet possible\n" parser.print_help() elif method == 'set': print "updating language for %s...\n" % (username) print self.client.UpdateLanguage(username=username, language=LANGUAGE) else: print "deleting language settings isn't supported" elif setting == 'webclip': if method == 'get': print "getting webclip settings is not yet possible\n" parser.print_help() elif method == 'get': print "updating webclip settings for %s...\n" % (username) print self.client.UpdateWebclip(username=username, enable=not(options.disable)) else: print "deleting webclip settings isn't supported" elif setting == 'delegation': if method == 'get': print "getting email delegates for %s..." % (username) print self.client.RetrieveEmailDelegates(username=username) elif method == 'set': address = args['delegationId'] print "adding %s as an email delegate to %s..." % (address, username) print self.client.AddEmailDelegate(username=username, address=address) else: address = args['delegationId'] print "deleting %s as an email delegate for %s..." % (address, username) print self.client.DeleteEmailDelegate(username=username, address=address) else: parser.print_help() def main(): """Demos the Email Settings API using the EmailSettingsSample object.""" usage = 'usage: %prog [options]' global parser global options parser = OptionParser(usage=usage) parser.add_option('--domain', help="The Google Apps domain, e.g. 'domain.com'.") parser.add_option('--email', help="The admin's email account, e.g. 'admin@domain.com'.") parser.add_option('--password', help="The admin's password.") parser.add_option('--app', help="The name of the app.") parser.add_option('--username', help="The user account on which to perform operations.") parser.add_option('--setting', choices=['filter', 'label', 'forwarding', 'sendas', 'pop', 'signature', 'vacation', 'imap', 'general', 'language', 'webclip', 'delegation'], help="The email setting to use. Choose from filter, label, \ forwarding, sendas, pop, signature, vacation, imap, \ general, language, webclip, and delegation.") parser.add_option('--method', default='get', choices=['get', 'set', 'delete'], help="Specify whether to get, set/update or delete \ setting. Choose between get (default), set, and delete.") parser.add_option('--disable', action="store_true", default=False, dest="disable", help="Disable a setting when using the set method with the\ --disable option. The default is to enable the setting.") parser.add_option('--delegationId', default=None, help="The emailId of the account to which email access has\ to be delegated. Required for adding or deleting an \ email delegate.") (options, args) = parser.parse_args() if (options.domain is None or options.email is None or options.password == None or options.username is None or options.app is None or options.setting is None or (options.setting == 'delegation' and options.method != 'get' and options.delegationId is None)): parser.print_help() return args = {'delegationId':options.delegationId} sample = EmailSettingsSample(options.domain, options.email, options.password, options.app) sample.run(options.username, options.setting, options.method, args) if __name__ == '__main__': main()
Python
#!/usr/bin/python2.4 # # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample app for Google Apps Email Migration features. EmailMigrationSample: Demonstrates the use of the Email Migration API """ __author__ = 'pti@google.com (Prashant Tiwari)' from optparse import OptionParser import os import string from gdata.apps.migration import service class EmailMigrationSample(object): """Sample application demonstrating use of the Email Migration API.""" def __init__(self, domain, email, password): """Constructor for the EmailMigrationSample object. Construct an EmailMigrationSample with the given args. Args: domain: The domain name ("domain.com") email: The email account of the user or the admin ("john@domain.com") password: The domain admin's password """ self.service = service.MigrationService( email=email, password=password, domain=domain, source='googlecode-migrationsample-v1') self.service.ProgrammaticLogin() # Sample mail properties self.mail_item_properties = ['IS_INBOX', 'IS_UNREAD'] self.mail_labels = ['EmailMigrationSample'] def Migrate(self, path): """Migrates messages at the given path. Args: path: The file or directory path where messages are stored """ if os.path.isfile(path): if os.path.splitext(path)[1] != '.txt': print "The input file is not a .txt file" return self._MigrateOneMail(path) elif os.path.isdir(path): if path.endswith(os.sep): path = path[0: len(path) - 1] txt_file_paths = [] filenames = os.listdir(path) for filename in filenames: # Filter out the non-txt files in the directory filepath = path + os.sep + filename if os.path.isfile(filepath) and os.path.splitext(filepath)[1] == '.txt': txt_file_paths.append(filepath) if not txt_file_paths: print "Found no .txt file in the directory" return elif len(txt_file_paths) == 1: # Don't use threading if there's only one txt file in the dir self._MigrateOneMail(txt_file_paths[0]) else: self._MigrateManyMails(txt_file_paths) def _MigrateOneMail(self, path): """Imports a single message via the ImportMail service. Args: path: The path of the message file """ print "Attempting to migrate 1 message..." content = self._ReadFileAsString(path) self.service.ImportMail(user_name=options.username, mail_message=content, mail_item_properties=self.mail_item_properties, mail_labels=self.mail_labels) print "Successfully migrated 1 message." def _MigrateManyMails(self, paths): """Imports several messages via the ImportMultipleMails service. Args: paths: List of paths of message files """ print "Attempting to migrate %d messages..." % (len(paths)) for path in paths: content = self._ReadFileAsString(path) self.service.AddMailEntry(mail_message=content, mail_item_properties=self.mail_item_properties, mail_labels=self.mail_labels, identifier=path) success = self.service.ImportMultipleMails(user_name=options.username) print "Successfully migrated %d of %d messages." % (success, len(paths)) def _ReadFileAsString(self, path): """Reads the file found at path into a string Args: path: The path of the message file Returns: The file contents as a string Raises: IOError: An error occurred while trying to read the file """ try: input_file = open(path, 'r') file_str = [] for eachline in input_file: file_str.append(eachline) input_file.close() return ''.join(file_str) except IOError, e: raise IOError(e.args[1] + ': ' + path) def main(): """Demonstrates the Email Migration API using EmailMigrationSample.""" usage = 'usage: %prog [options]' global options parser = OptionParser(usage=usage) parser.add_option('-d', '--domain', help="the Google Apps domain, e.g. 'domain.com'") parser.add_option('-e', '--email', help="the email account of the user or the admin, \ e.g. 'john.smith@domain.com'") parser.add_option('-p', '--password', help="the account password") parser.add_option('-u', '--username', help="the user account on which to perform operations. for\ non-admin users this will be their own account name. \ e.g. 'jane.smith'") parser.add_option('-f', '--file', help="the system path of an RFC822 format .txt file or\ directory containing multiple such files to be migrated") (options, args) = parser.parse_args() if (options.domain is None or options.email is None or options.password is None or options.username is None or options.file is None): parser.print_help() return options.file = options.file.strip() if not os.path.exists(options.file): print "Invalid file or directory path" return sample = EmailMigrationSample(domain=options.domain, email=options.email, password=options.password) sample.Migrate(options.file) if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman (Eric Bidelman)' import getopt import mimetypes import os.path import sys import gdata.sample_util import gdata.sites.client import gdata.sites.data SOURCE_APP_NAME = 'googleInc-GoogleSitesAPIPythonLibSample-v1.1' MAIN_MENU = ['1) List site content', '2) List recent activity', '3) List revision history', '4) Create webpage', '5) Create web attachment', '6) Upload attachment', '7) Download attachment', '8) Delete item', '9) List sites', '10) Create a new site', "11) List site's sharing permissions", '12) Change settings', '13) Exit'] SETTINGS_MENU = ['1) Change current site.', '2) Change domain.'] class SitesExample(object): """Wrapper around the Sites API functionality.""" def __init__(self, site_name=None, site_domain=None, ssl=False, debug=False): if site_domain is None: site_domain = self.PromptDomain() if site_name is None: site_name = self.PromptSiteName() mimetypes.init() self.client = gdata.sites.client.SitesClient( source=SOURCE_APP_NAME, site=site_name, domain=site_domain) self.client.http_client.debug = debug self.client.ssl = ssl try: gdata.sample_util.authorize_client( self.client, service=self.client.auth_service, source=SOURCE_APP_NAME, scopes=['http://sites.google.com/feeds/', 'https://sites.google.com/feeds/']) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') def PrintMainMenu(self): """Displays a menu of options for the user to choose from.""" print '\nSites API Sample' print '================================' print '\n'.join(MAIN_MENU) print '================================\n' def PrintSettingsMenu(self): """Displays a menu of settings for the user change.""" print '\nSites API Sample > Settings' print '================================' print '\n'.join(SETTINGS_MENU) print '================================\n' def GetMenuChoice(self, menu): """Retrieves the menu selection from the user. Args: menu: list The menu to get a selection from. Returns: The integer of the menu item chosen by the user. """ max_choice = len(menu) while True: user_input = raw_input(': ') try: num = int(user_input) except ValueError: continue if num <= max_choice and num > 0: return num def PromptSiteName(self): site_name = '' while not site_name: site_name = raw_input('site name: ') if not site_name: print 'Please enter the name of your Google Site.' return site_name def PromptDomain(self): return raw_input(('If your Site is hosted on a Google Apps domain, ' 'enter it (e.g. example.com): ')) or 'site' def GetChoiceSelection(self, feed, message): for i, entry in enumerate(feed.entry): print '%d.) %s' % (i + 1, entry.title.text) choice = 0 while not choice or not 0 <= choice <= len(feed.entry): choice = int(raw_input(message)) print return choice def PrintEntry(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) if entry.page_name: print ' page name:\t%s' % entry.page_name.text if entry.content: print ' content\t%s...' % str(entry.content.html)[0:100] def PrintListItem(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) for col in entry.field: print ' %s %s\t%s' % (col.index, col.name, col.text) def PrintListPage(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) for col in entry.data.column: print ' %s %s' % (col.index, col.name) def PrintFileCabinetPage(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) print ' page name:\t%s' % entry.page_name.text print ' content\t%s...' % str(entry.content.html)[0:100] def PrintAttachment(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) if entry.summary is not None: print ' description:\t%s' % entry.summary.text print ' content\t%s, %s' % (entry.content.type, entry.content.src) def PrintWebAttachment(self, entry): print '%s [%s]' % (entry.title.text, entry.Kind()) if entry.summary.text is not None: print ' description:\t%s' % entry.summary.text print ' content src\t%s' % entry.content.src def Run(self): """Executes the demo application.""" try: while True: self.PrintMainMenu() choice = self.GetMenuChoice(MAIN_MENU) if choice == 1: kind_choice = raw_input('What kind (all|%s)?: ' % '|'.join( gdata.sites.data.SUPPORT_KINDS)) if kind_choice in gdata.sites.data.SUPPORT_KINDS: uri = '%s?kind=%s' % (self.client.make_content_feed_uri(), kind_choice) feed = self.client.GetContentFeed(uri=uri) else: feed = self.client.GetContentFeed() print "\nFetching content feed of '%s'...\n" % self.client.site for entry in feed.entry: kind = entry.Kind() if kind == 'attachment': self.PrintAttachment(entry) elif kind == 'webattachment': self.PrintWebAttachment(entry) elif kind == 'filecabinet': self.PrintFileCabinetPage(entry) elif kind == 'listitem': self.PrintListItem(entry) elif kind == 'listpage': self.PrintListPage(entry) else: self.PrintEntry(entry) print ' revision:\t%s' % entry.revision.text print ' updated:\t%s' % entry.updated.text parent_link = entry.FindParentLink() if parent_link: print ' parent link:\t%s' % parent_link if entry.GetAlternateLink(): print ' view in Sites:\t%s' % entry.GetAlternateLink().href if entry.feed_link: print ' feed of items:\t%s' % entry.feed_link.href if entry.IsDeleted(): print ' deleted:\t%s' % entry.IsDeleted() if entry.in_reply_to: print ' in reply to:\t%s' % entry.in_reply_to.href print elif choice == 2: print "\nFetching activity feed of '%s'..." % self.client.site feed = self.client.GetActivityFeed() for entry in feed.entry: print ' %s [%s on %s]' % (entry.title.text, entry.Kind(), entry.updated.text) elif choice == 3: print "\nFetching content feed of '%s'...\n" % self.client.site feed = self.client.GetContentFeed() try: selection = self.GetChoiceSelection( feed, 'Select a page to fetch revisions for: ') except TypeError: continue except ValueError: continue feed = self.client.GetRevisionFeed( feed.entry[selection - 1].GetNodeId()) for entry in feed.entry: print entry.title.text print ' new version on:\t%s' % entry.updated.text print ' view changes:\t%s' % entry.GetAlternateLink().href print ' current version:\t%s...' % str(entry.content.html)[0:100] print elif choice == 4: print "\nFetching content feed of '%s'...\n" % self.client.site feed = self.client.GetContentFeed() try: selection = self.GetChoiceSelection( feed, 'Select a parent to upload to (or hit ENTER for none): ') except ValueError: selection = None page_title = raw_input('Enter a page title: ') parent = None if selection is not None: parent = feed.entry[selection - 1] new_entry = self.client.CreatePage( 'webpage', page_title, '<b>Your html content</b>', parent=parent) if new_entry.GetAlternateLink(): print 'Created. View it at: %s' % new_entry.GetAlternateLink().href elif choice == 5: print "\nFetching filecabinets on '%s'...\n" % self.client.site uri = '%s?kind=%s' % (self.client.make_content_feed_uri(), 'filecabinet') feed = self.client.GetContentFeed(uri=uri) selection = self.GetChoiceSelection( feed, 'Select a filecabinet to create the web attachment on: ') url = raw_input('Enter the URL of the attachment: ') content_type = raw_input("Enter the attachment's mime type: ") title = raw_input('Enter a title for the web attachment: ') description = raw_input('Enter a description: ') parent_entry = None if selection is not None: parent_entry = feed.entry[selection - 1] self.client.CreateWebAttachment(url, content_type, title, parent_entry, description=description) print 'Created!' elif choice == 6: print "\nFetching filecainets on '%s'...\n" % self.client.site uri = '%s?kind=%s' % (self.client.make_content_feed_uri(), 'filecabinet') feed = self.client.GetContentFeed(uri=uri) selection = self.GetChoiceSelection( feed, 'Select a filecabinet to upload to: ') filepath = raw_input('Enter a filename: ') page_title = raw_input('Enter a title for the file: ') description = raw_input('Enter a description: ') filename = os.path.basename(filepath) file_ex = filename[filename.rfind('.'):] if not file_ex in mimetypes.types_map: content_type = raw_input( 'Unrecognized file extension. Please enter the mime type: ') else: content_type = mimetypes.types_map[file_ex] entry = None if selection is not None: entry = feed.entry[selection - 1] new_entry = self.client.UploadAttachment( filepath, entry, content_type=content_type, title=page_title, description=description) print 'Uploaded. View it at: %s' % new_entry.GetAlternateLink().href elif choice == 7: print "\nFetching all attachments on '%s'...\n" % self.client.site uri = '%s?kind=%s' % (self.client.make_content_feed_uri(), 'attachment') feed = self.client.GetContentFeed(uri=uri) selection = self.GetChoiceSelection( feed, 'Select an attachment to download: ') filepath = raw_input('Save as: ') entry = None if selection is not None: entry = feed.entry[selection - 1] self.client.DownloadAttachment(entry, filepath) print 'Downloaded.' elif choice == 8: print "\nFetching content feed of '%s'...\n" % self.client.site feed = self.client.GetContentFeed() selection = self.GetChoiceSelection(feed, 'Select a page to delete: ') entry = None if selection is not None: entry = feed.entry[selection - 1] self.client.Delete(entry) print 'Removed!' elif choice == 9: print ('\nFetching your list of sites for domain: %s...\n' % self.client.domain) feed = self.client.GetSiteFeed() for entry in feed.entry: print entry.title.text print ' site name: ' + entry.site_name.text if entry.summary.text: print ' summary: ' + entry.summary.text if entry.FindSourceLink(): print ' copied from site: ' + entry.FindSourceLink() print ' acl feed: %s\n' % entry.FindAclLink() elif choice == 10: title = raw_input('Enter a title: ') summary = raw_input('Enter a description: ') theme = raw_input('Theme name (ex. "default"): ') new_entry = self.client.CreateSite( title, description=summary, theme=theme) print 'Site created! View it at: ' + new_entry.GetAlternateLink().href elif choice == 11: print "\nFetching acl permissions of '%s'...\n" % self.client.site feed = self.client.GetAclFeed() for entry in feed.entry: print '%s (%s) - %s' % (entry.scope.value, entry.scope.type, entry.role.value) elif choice == 12: self.PrintSettingsMenu() settings_choice = self.GetMenuChoice(SETTINGS_MENU) if settings_choice == 1: self.client.site = self.PromptSiteName() elif settings_choice == 2: self.client.domain = self.PromptDomain() elif choice == 13: print 'Later!\n' return except gdata.client.RequestError, error: print error except KeyboardInterrupt: return def main(): """The main function runs the SitesExample application.""" print 'NOTE: Please run these tests only with a test account.\n' try: opts, args = getopt.getopt(sys.argv[1:], '', ['site=', 'domain=', 'ssl', 'debug']) except getopt.error, msg: print """python sites_sample.py --site [sitename] --domain [domain or "site"] --ssl [enables https if set] --debug [prints debug info if set]""" sys.exit(2) site = None domain = None debug = False ssl = False for option, arg in opts: if option == '--site': site = arg elif option == '--domain': domain = arg elif option == '--ssl': ssl = True elif option == '--debug': debug = True sample = SitesExample(site, domain, ssl=ssl, debug=debug) sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2008 Yu-Jie Lin # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.webmastertools.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import getpass username = '' password = '' username = raw_input('Please enter your username: ') password = getpass.getpass() client = gdata.webmastertools.service.GWebmasterToolsService( email=username, password=password, source='PythonWebmasterToolsSample-1') print 'Logging in' client.ProgrammaticLogin() print 'Retrieving Sites feed' feed = client.GetSitesFeed() # Format the feed print print 'You have %d site(s), last updated at %s' % ( len(feed.entry), feed.updated.text) print print "%-25s %25s %25s" % ('Site', 'Last Updated', 'Last Crawled') print '='*80 def safeElementText(element): if hasattr(element, 'text'): return element.text return '' # Format each site for entry in feed.entry: print "%-25s %25s %25s" % ( entry.title.text.replace('http://', '')[:25], entry.updated.text[:25], safeElementText(entry.crawled)[:25]) print " Preferred: %-23s Indexed: %5s GeoLoc: %10s" % ( safeElementText(entry.preferred_domain)[:30], entry.indexed.text[:5], safeElementText(entry.geolocation)[:10]) print " Crawl rate: %-10s Verified: %5s" % ( safeElementText(entry.crawl_rate)[:10], entry.verified.text[:5]) print
Python
#!/usr/bin/python # # Copyright (C) 2008 Yu-Jie Lin # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import urllib import gdata.webmastertools.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import getpass username = '' password = '' username = raw_input('Please enter your username: ') password = getpass.getpass() client = gdata.webmastertools.service.GWebmasterToolsService( email=username, password=password, source='PythonWebmasterToolsSample-1') EXAMPLE_SITE = 'http://www.example.com/' EXAMPLE_SITEMAP = 'http://www.example.com/sitemap-index.xml' def safeElementText(element): if hasattr(element, 'text'): return element.text return '' print 'Logging in' client.ProgrammaticLogin() print print 'Adding site: %s' % EXAMPLE_SITE entry = client.AddSite(EXAMPLE_SITE) print print "%-25s %25s %25s" % ('Site', 'Last Updated', 'Last Crawled') print '='*80 print "%-25s %25s %25s" % ( entry.title.text.replace('http://', '')[:25], entry.updated.text[:25], safeElementText(entry.crawled)[:25]) print " Preferred: %-23s Indexed: %5s GeoLoc: %10s" % ( safeElementText(entry.preferred_domain)[:30], entry.indexed.text[:5], safeElementText(entry.geolocation)[:10]) print " Crawl rate: %-10s Verified: %5s" % ( safeElementText(entry.crawl_rate)[:10], entry.verified.text[:5]) # Verifying a site. This sample won't do this since we don't own example.com #client.VerifySite(EXAMPLE_SITE, 'htmlpage') # The following needs the ownership of the site #client.UpdateGeoLocation(EXAMPLE_SITE, 'US') #client.UpdateCrawlRate(EXAMPLE_SITE, 'normal') #client.UpdatePreferredDomain(EXAMPLE_SITE, 'preferwww') #client.UpdateEnhancedImageSearch(EXAMPLE_SITE, 'true') print print 'Adding sitemap: %s' % EXAMPLE_SITEMAP entry = client.AddSitemap(EXAMPLE_SITE, EXAMPLE_SITEMAP) print entry.title.text.replace('http://', '')[:80] print " Last Updated : %29s Status: %10s" % ( entry.updated.text[:29], entry.sitemap_status.text[:10]) print " Last Downloaded: %29s URL Count: %10s" % ( safeElementText(entry.sitemap_last_downloaded)[:29], safeElementText(entry.sitemap_url_count)[:10]) # Add a mobile sitemap #entry = client.AddMobileSitemap(EXAMPLE_SITE, 'http://.../sitemap-mobile-example.xml', 'XHTML') # Add a news sitemap, your site must be included in Google News. # See also http://google.com/support/webmasters/bin/answer.py?answer=42738 #entry = client.AddNewsSitemap(EXAMPLE_SITE, 'http://.../sitemap-news-example.xml', 'Label') print print 'Deleting sitemap: %s' % EXAMPLE_SITEMAP client.DeleteSitemap(EXAMPLE_SITE, EXAMPLE_SITEMAP) print print 'Deleting site: %s' % EXAMPLE_SITE client.DeleteSite(EXAMPLE_SITE) print
Python
#!/usr/bin/python # # Copyright (C) 2008 Yu-Jie Lin # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.webmastertools.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import getpass username = '' password = '' site_uri = '' username = raw_input('Please enter your username: ') password = getpass.getpass() site_uri = raw_input('Please enter your site url: ') client = gdata.webmastertools.service.GWebmasterToolsService( email=username, password=password, source='PythonWebmasterToolsSample-1') print 'Logging in' client.ProgrammaticLogin() print 'Retrieving Sitemaps feed' feed = client.GetSitemapsFeed(site_uri) # Format the feed print print 'You have %d sitemap(s), last updated at %s' % ( len(feed.entry), feed.updated.text) print print '='*80 def safeElementText(element): if hasattr(element, 'text'): return element.text return '' # Format each site for entry in feed.entry: print entry.title.text.replace('http://', '')[:80] print " Last Updated : %29s Status: %10s" % ( entry.updated.text[:29], entry.sitemap_status.text[:10]) print " Last Downloaded: %29s URL Count: %10s" % ( safeElementText(entry.sitemap_last_downloaded)[:29], safeElementText(entry.sitemap_url_count)[:10]) print
Python
#!/usr/bin/python2.4 # # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample app for Google Apps Calendar Resource features. CalendarResourceSample: Demonstrates the use of the Calendar Resource API """ __author__ = 'pti@google.com (Prashant Tiwari)' import getpass from gdata.calendar_resource.client import CalendarResourceClient class CalendarResourceSample(object): def __init__(self, domain, email, password): """Constructor for the CalendarResourceSample object. Construct a CalendarResourceSample with the given args. Args: domain: The domain name ("domain.com") email: The email account of the user or the admin ("john@domain.com") password: The domain admin's password """ self.client = CalendarResourceClient(domain=domain) self.client.ClientLogin(email=email, password=password, source='googlecode-calendarresourcesample-v1') def create(self, resource_properties): """Creates a calendar resource with the given resource_properties Args: resource_properties: A dictionary of calendar resource properties """ print 'Creating a new calendar resource with id %s...' % ( resource_properties['resource_id']) print self.client.CreateResource( resource_id=resource_properties['resource_id'], resource_common_name=resource_properties['resource_name'], resource_description=resource_properties['resource_description'], resource_type=resource_properties['resource_type']) def get(self, resource_id=None): """Retrieves the calendar resource with the given resource_id Args: resource_id: The optional calendar resource identifier """ if resource_id: print 'Retrieving the calendar resource with id %s...' % (resource_id) print self.client.GetResource(resource_id=resource_id) else: print 'Retrieving all calendar resources...' print self.client.GetResourceFeed() def update(self, resource_properties): """Updates the calendar resource with the given resource_properties Args: resource_properties: A dictionary of calendar resource properties """ print 'Updating the calendar resource with id %s...' % ( resource_properties['resource_id']) print self.client.UpdateResource( resource_id=resource_properties['resource_id'], resource_common_name=resource_properties['resource_name'], resource_description=resource_properties['resource_description'], resource_type=resource_properties['resource_type']) def delete(self, resource_id): """Deletes the calendar resource with the given resource_id Args: resource_id: The unique calendar resource identifier """ print 'Deleting the calendar resource with id %s...' % (resource_id) self.client.DeleteResource(resource_id) print 'Calendar resource successfully deleted.' def main(): """Demonstrates the Calendar Resource API using CalendarResourceSample.""" domain = None admin_email = None admin_password = None do_continue = 'y' print("Google Apps Calendar Resource API Sample\n\n") while not domain: domain = raw_input('Google Apps domain: ') while not admin_email: admin_email = '%s@%s' % (raw_input('Administrator username: '), domain) while not admin_password: admin_password = getpass.getpass('Administrator password: ') sample = CalendarResourceSample(domain=domain, email=admin_email, password=admin_password) while do_continue.lower() != 'n': do_continue = call_service(sample) def call_service(sample): """Calls the service methods on the user input""" operation = None while operation not in ['c', 'C', 'g', 'G', 'u', 'U', 'd', 'D', 'q', 'Q']: operation = raw_input('Do [c=create|g=get|u=update|d=delete|q=quit]: ') operation = operation.lower() if operation == 'q': return 'n' resource_properties = get_input(operation) if operation == 'c': sample.create(resource_properties) elif operation == 'g': sample.get(resource_properties['resource_id']) elif operation == 'u': sample.update(resource_properties) elif operation == 'd': sample.delete(resource_properties['resource_id']) do_continue = None while do_continue not in ['', 'y', 'Y', 'n', 'N']: do_continue = raw_input('Want to continue (Y/n): ') if do_continue == '': do_continue = 'y' return do_continue.lower() def get_input(operation): """Gets user input from console""" resource_id = None resource_name = None resource_description = None resource_type = None if operation == 'g': resource_id = raw_input('Resource id (leave blank to get all resources): ') else: while not resource_id: resource_id = raw_input('Resource id: ') if operation == 'c': resource_name = raw_input('Resource common name (recommended): ') resource_description = raw_input('Resource description (recommended): ') resource_type = raw_input('Resource type (recommended): ') elif operation == 'u': resource_name = raw_input( 'New resource common name (leave blank if no change): ') resource_description = raw_input( 'New resource description (leave blank if no change): ') resource_type = raw_input('New resource type (leave blank if no change): ') resource_properties = {'resource_id': resource_id, 'resource_name': resource_name, 'resource_description': resource_description, 'resource_type': resource_type} return resource_properties if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2007, 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = ('api.jfisher (Jeff Fisher), ' 'e.bidelman (Eric Bidelman)') import sys import re import os.path import getopt import getpass import gdata.docs.service import gdata.spreadsheet.service def truncate(content, length=15, suffix='...'): if len(content) <= length: return content else: return content[:length] + suffix class DocsSample(object): """A DocsSample object demonstrates the Document List feed.""" def __init__(self, email, password): """Constructor for the DocsSample object. Takes an email and password corresponding to a gmail account to demonstrate the functionality of the Document List feed. Args: email: [string] The e-mail address of the account to use for the sample. password: [string] The password corresponding to the account specified by the email parameter. Returns: A DocsSample object used to run the sample demonstrating the functionality of the Document List feed. """ source = 'Document List Python Sample' self.gd_client = gdata.docs.service.DocsService() self.gd_client.ClientLogin(email, password, source=source) # Setup a spreadsheets service for downloading spreadsheets self.gs_client = gdata.spreadsheet.service.SpreadsheetsService() self.gs_client.ClientLogin(email, password, source=source) def _PrintFeed(self, feed): """Prints out the contents of a feed to the console. Args: feed: A gdata.docs.DocumentListFeed instance. """ print '\n' if not feed.entry: print 'No entries in feed.\n' print '%-18s %-12s %s' % ('TITLE', 'TYPE', 'RESOURCE ID') for entry in feed.entry: print '%-18s %-12s %s' % (truncate(entry.title.text.encode('UTF-8')), entry.GetDocumentType(), entry.resourceId.text) def _GetFileExtension(self, file_name): """Returns the uppercase file extension for a file. Args: file_name: [string] The basename of a filename. Returns: A string containing the file extension of the file. """ match = re.search('.*\.([a-zA-Z]{3,}$)', file_name) if match: return match.group(1).upper() return False def _UploadMenu(self): """Prompts that enable a user to upload a file to the Document List feed.""" file_path = '' file_path = raw_input('Enter path to file: ') if not file_path: return elif not os.path.isfile(file_path): print 'Not a valid file.' return file_name = os.path.basename(file_path) ext = self._GetFileExtension(file_name) if not ext or ext not in gdata.docs.service.SUPPORTED_FILETYPES: print 'File type not supported. Check the file extension.' return else: content_type = gdata.docs.service.SUPPORTED_FILETYPES[ext] title = '' while not title: title = raw_input('Enter name for document: ') try: ms = gdata.MediaSource(file_path=file_path, content_type=content_type) except IOError: print 'Problems reading file. Check permissions.' return if ext in ['CSV', 'ODS', 'XLS', 'XLSX']: print 'Uploading spreadsheet...' elif ext in ['PPT', 'PPS']: print 'Uploading presentation...' else: print 'Uploading word processor document...' entry = self.gd_client.Upload(ms, title) if entry: print 'Upload successful!' print 'Document now accessible at:', entry.GetAlternateLink().href else: print 'Upload error.' def _DownloadMenu(self): """Prompts that enable a user to download a local copy of a document.""" resource_id = '' resource_id = raw_input('Enter an resource id: ') file_path = '' file_path = raw_input('Save file to: ') if not file_path or not resource_id: return file_name = os.path.basename(file_path) ext = self._GetFileExtension(file_name) if not ext or ext not in gdata.docs.service.SUPPORTED_FILETYPES: print 'File type not supported. Check the file extension.' return else: content_type = gdata.docs.service.SUPPORTED_FILETYPES[ext] doc_type = resource_id[:resource_id.find(':')] # When downloading a spreadsheet, the authenticated request needs to be # sent with the spreadsheet service's auth token. if doc_type == 'spreadsheet': print 'Downloading spreadsheet to %s...' % (file_path,) docs_token = self.gd_client.GetClientLoginToken() self.gd_client.SetClientLoginToken(self.gs_client.GetClientLoginToken()) self.gd_client.Export(resource_id, file_path, gid=0) self.gd_client.SetClientLoginToken(docs_token) else: print 'Downloading document to %s...' % (file_path,) self.gd_client.Export(resource_id, file_path) def _ListDocuments(self): """Retrieves and displays a list of documents based on the user's choice.""" print 'Retrieve (all/document/folder/presentation/spreadsheet/pdf): ' category = raw_input('Enter a category: ') if category == 'all': feed = self.gd_client.GetDocumentListFeed() elif category == 'folder': query = gdata.docs.service.DocumentQuery(categories=['folder'], params={'showfolders': 'true'}) feed = self.gd_client.Query(query.ToUri()) else: query = gdata.docs.service.DocumentQuery(categories=[category]) feed = self.gd_client.Query(query.ToUri()) self._PrintFeed(feed) def _ListAclPermissions(self): """Retrieves a list of a user's folders and displays them.""" resource_id = raw_input('Enter an resource id: ') query = gdata.docs.service.DocumentAclQuery(resource_id) print '\nListing document permissions:' feed = self.gd_client.GetDocumentListAclFeed(query.ToUri()) for acl_entry in feed.entry: print '%s - %s (%s)' % (acl_entry.role.value, acl_entry.scope.value, acl_entry.scope.type) def _ModifyAclPermissions(self): """Create or updates the ACL entry on an existing document.""" resource_id = raw_input('Enter an resource id: ') email = raw_input('Enter an email address: ') role_value = raw_input('Enter a permission (reader/writer/owner/remove): ') uri = gdata.docs.service.DocumentAclQuery(resource_id).ToUri() acl_feed = self.gd_client.GetDocumentListAclFeed(uri) found_acl_entry = None for acl_entry in acl_feed.entry: if acl_entry.scope.value == email: found_acl_entry = acl_entry break if found_acl_entry: if role_value == 'remove': # delete ACL entry self.gd_client.Delete(found_acl_entry.GetEditLink().href) else: # update ACL entry found_acl_entry.role.value = role_value updated_entry = self.gd_client.Put( found_acl_entry, found_acl_entry.GetEditLink().href, converter=gdata.docs.DocumentListAclEntryFromString) else: scope = gdata.docs.Scope(value=email, type='user') role = gdata.docs.Role(value=role_value) acl_entry = gdata.docs.DocumentListAclEntry(scope=scope, role=role) inserted_entry = self.gd_client.Post( acl_entry, uri, converter=gdata.docs.DocumentListAclEntryFromString) print '\nListing document permissions:' acl_feed = self.gd_client.GetDocumentListAclFeed(uri) for acl_entry in acl_feed.entry: print '%s - %s (%s)' % (acl_entry.role.value, acl_entry.scope.value, acl_entry.scope.type) def _FullTextSearch(self): """Searches a user's documents for a text string. Provides prompts to search a user's documents and displays the results of such a search. The text_query parameter of the DocumentListQuery object corresponds to the contents of the q parameter in the feed. Note that this parameter searches the content of documents, not just their titles. """ input = raw_input('Enter search term: ') query = gdata.docs.service.DocumentQuery(text_query=input) feed = self.gd_client.Query(query.ToUri()) self._PrintFeed(feed) def _PrintMenu(self): """Displays a menu of options for the user to choose from.""" print ('\nDocument List Sample\n' '1) List your documents.\n' '2) Search your documents.\n' '3) Upload a document.\n' '4) Download a document.\n' "5) List a document's permissions.\n" "6) Add/change a document's permissions.\n" '7) Exit.\n') def _GetMenuChoice(self, max): """Retrieves the menu selection from the user. Args: max: [int] The maximum number of allowed choices (inclusive) Returns: The integer of the menu item chosen by the user. """ while True: input = raw_input('> ') try: num = int(input) except ValueError: print 'Invalid choice. Please choose a value between 1 and', max continue if num > max or num < 1: print 'Invalid choice. Please choose a value between 1 and', max else: return num def Run(self): """Prompts the user to choose funtionality to be demonstrated.""" try: while True: self._PrintMenu() choice = self._GetMenuChoice(7) if choice == 1: self._ListDocuments() elif choice == 2: self._FullTextSearch() elif choice == 3: self._UploadMenu() elif choice == 4: self._DownloadMenu() elif choice == 5: self._ListAclPermissions() elif choice == 6: self._ModifyAclPermissions() elif choice == 7: print '\nGoodbye.' return except KeyboardInterrupt: print '\nGoodbye.' return def main(): """Demonstrates use of the Docs extension using the DocsSample object.""" # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=']) except getopt.error, msg: print 'python docs_example.py --user [username] --pw [password] ' sys.exit(2) user = '' pw = '' key = '' # Process options for option, arg in opts: if option == '--user': user = arg elif option == '--pw': pw = arg while not user: print 'NOTE: Please run these tests only with a test account.' user = raw_input('Please enter your username: ') while not pw: pw = getpass.getpass() if not pw: print 'Password cannot be blank.' try: sample = DocsSample(user, pw) except gdata.service.BadAuthentication: print 'Invalid user credentials given.' return sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman@google.com (Eric Bidelman)' import getopt import mimetypes import os.path import sys import atom.data import gdata.client import gdata.data import gdata.gauth import gdata.docs.client import gdata.docs.data import gdata.sample_util APP_NAME = 'GDataResumableUploadPySample-v1.0' def get_mimetype(filename): file_ext = filename[filename.rfind('.'):] if file_ext in mimetypes.types_map: content_type = mimetypes.types_map[file_ext] else: content_type = raw_input( "Unrecognized file extension. Please enter the file's content type: ") return content_type class ResumableUploadDemo(object): """Helper class to setup a resumable upload, and upload a file.""" CREATE_SESSION_URI = '/feeds/upload/create-session/default/private/full' client = None # A gdata.client.GDClient object. uploader = None # A gdata.client.ResumableUploader object. def __init__(self, filepath, chunk_size=None, convert=None, host=None, ssl=False, debug=False): self.client = gdata.docs.client.DocsClient(source=APP_NAME) self.client.ssl = ssl self.client.http_client.debug = debug self.convert = convert if host: self.client.host = host if chunk_size: self.chunk_size = chunk_size # Authenticate the user with CLientLogin, OAuth, or AuthSub. try: gdata.sample_util.authorize_client( self.client, service=self.client.auth_service, source=APP_NAME, scopes=self.client.auth_scopes) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') mimetypes.init() # Register common mimetypes on system. self.f = open(filepath) content_type = get_mimetype(self.f.name) file_size = os.path.getsize(self.f.name) self.uploader = gdata.client.ResumableUploader( self.client, self.f, content_type, file_size, chunk_size=self.chunk_size, desired_class=gdata.docs.data.DocsEntry) def __del__(self): if self.uploader is not None: self.uploader.file_handle.close() def UploadAutomaticChunks(self, new_entry): """Uploads an entire file, handing the chunking for you. Args: new_entry: gdata.data.docs.DocsEntry An object holding metadata to create the document with. Returns: A gdata.docs.data.DocsEntry of the created document on the server. """ uri = self.CREATE_SESSION_URI # If convert=false is used on the initial request to start a resumable # upload, the document will be treated as arbitrary file upload. if self.convert is not None: uri += '?convert=' + self.convert return self.uploader.UploadFile(uri, entry=new_entry) def UploadInManualChunks(self, new_entry): """Uploads a file, demonstrating manually chunking the file. Args: new_entry: gdata.data.docs.DocsEntry An object holding metadata to create the document with. Returns: A gdata.docs.data.DocsEntry of the created document on the server. """ uri = self.CREATE_SESSION_URI # If convert=false is used on the initial request to start a resumable # upload, the document will be treated as arbitrary file upload. if self.convert is not None: uri += '?convert=' + self.convert # Need to create the initial session manually. self.uploader._InitSession(uri, entry=new_entry) start_byte = 0 entry = None while not entry: print 'Uploading bytes: %s-%s/%s' % (start_byte, self.uploader.chunk_size - 1, self.uploader.total_file_size) entry = self.uploader.UploadChunk( start_byte, self.uploader.file_handle.read(self.uploader.chunk_size)) start_byte += self.uploader.chunk_size return entry def UploadUsingNormalPath(self): """Uploads a file using the standard DocList API upload path. This method is included to show the difference between the standard upload path and the resumable upload path. Also note, file uploads using this normal upload method max out ~10MB. Returns: A gdata.docs.data.DocsEntry of the created document on the server. """ ms = gdata.data.MediaSource( file_handle=self.f, content_type=self.uploader.content_type, content_length=self.uploader.total_file_size) uri = self.client.DOCLIST_FEED_URI # If convert=false is used on the initial request to start a resumable # upload, the document will be treated as arbitrary file upload. if self.convert is not None: uri += '?convert=' + self.convert return self.client.Upload(ms, self.f.name, folder_or_uri=uri) def main(): try: opts, args = getopt.getopt( sys.argv[1:], '', ['filepath=', 'convert=', 'chunk_size=', 'ssl', 'debug']) except getopt.error, msg: print '''python resumable_upload_sample.py --filepath= [file to upload] --convert= [document uploads will be converted to native Google Docs. Possible values are 'true' and 'false'.] --ssl [enables HTTPS if set] --debug [prints debug info if set]''' print ('Example usage: python resumable_upload_sample.py ' '--filepath=/path/to/test.doc --convert=true --ssl') sys.exit(2) filepath = None convert = 'true' # Convert to Google Docs format by default chunk_size = gdata.client.ResumableUploader.DEFAULT_CHUNK_SIZE debug = False ssl = False for option, arg in opts: if option == '--filepath': filepath = arg elif option == '--convert': convert = arg.lower() elif option == '--chunk_size': chunk_size = int(arg) elif option == '--ssl': ssl = True elif option == '--debug': debug = True if filepath is None: filepath = raw_input('Enter path to a file: ') demo = ResumableUploadDemo(filepath, chunk_size=chunk_size, convert=convert, ssl=ssl, debug=debug) title = raw_input('Enter title for the document: ') print 'Uploading %s ( %s ) @ %s bytes...' % (demo.uploader.file_handle.name, demo.uploader.content_type, demo.uploader.total_file_size) entry = demo.UploadInManualChunks( gdata.docs.data.DocsEntry(title=atom.data.Title(text=title))) print 'Done: %s' % demo.uploader.QueryUploadStatus() print 'Document uploaded: ' + entry.title.text print 'Quota used: %s' % entry.quota_bytes_used.text print 'file closed: %s' % demo.uploader.file_handle.closed if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'api.jscudder (Jeffrey Scudder)' import sys import getopt import getpass import atom import gdata.contacts.data import gdata.contacts.client class ContactsSample(object): """ContactsSample object demonstrates operations with the Contacts feed.""" def __init__(self, email, password): """Constructor for the ContactsSample object. Takes an email and password corresponding to a gmail account to demonstrate the functionality of the Contacts feed. Args: email: [string] The e-mail address of the account to use for the sample. password: [string] The password corresponding to the account specified by the email parameter. Yields: A ContactsSample object used to run the sample demonstrating the functionality of the Contacts feed. """ self.gd_client = gdata.contacts.client.ContactsClient(source='GoogleInc-ContactsPythonSample-1') self.gd_client.ClientLogin(email, password, self.gd_client.source) def PrintFeed(self, feed, ctr=0): """Prints out the contents of a feed to the console. Args: feed: A gdata.contacts.ContactsFeed instance. ctr: [int] The number of entries in this feed previously printed. This allows continuous entry numbers when paging through a feed. Returns: The number of entries printed, including those previously printed as specified in ctr. This is for passing as an argument to ctr on successive calls to this method. """ if not feed.entry: print '\nNo entries in feed.\n' return 0 for i, entry in enumerate(feed.entry): print '\n%s %s' % (ctr+i+1, entry.title.text) if entry.content: print ' %s' % (entry.content.text) for email in entry.email: if email.primary and email.primary == 'true': print ' %s' % (email.address) # Show the contact groups that this contact is a member of. for group in entry.group_membership_info: print ' Member of group: %s' % (group.href) # Display extended properties. for extended_property in entry.extended_property: if extended_property.value: value = extended_property.value else: value = extended_property.GetXmlBlob() print ' Extended Property %s: %s' % (extended_property.name, value) return len(feed.entry) + ctr def PrintPaginatedFeed(self, feed, print_method): """ Print all pages of a paginated feed. This will iterate through a paginated feed, requesting each page and printing the entries contained therein. Args: feed: A gdata.contacts.ContactsFeed instance. print_method: The method which will be used to print each page of the feed. Must accept these two named arguments: feed: A gdata.contacts.ContactsFeed instance. ctr: [int] The number of entries in this feed previously printed. This allows continuous entry numbers when paging through a feed. """ ctr = 0 while feed: # Print contents of current feed ctr = print_method(feed=feed, ctr=ctr) # Prepare for next feed iteration next = feed.GetNextLink() feed = None if next: if self.PromptOperationShouldContinue(): # Another feed is available, and the user has given us permission # to fetch it feed = self.gd_client.GetContacts(uri=next.href) else: # User has asked us to terminate feed = None def PromptOperationShouldContinue(self): """ Display a "Continue" prompt. This give is used to give users a chance to break out of a loop, just in case they have too many contacts/groups. Returns: A boolean value, True if the current operation should continue, False if the current operation should terminate. """ while True: input = raw_input("Continue [Y/n]? ") if input is 'N' or input is 'n': return False elif input is 'Y' or input is 'y' or input is '': return True def ListAllContacts(self): """Retrieves a list of contacts and displays name and primary email.""" feed = self.gd_client.GetContacts() self.PrintPaginatedFeed(feed, self.PrintContactsFeed) def PrintGroupsFeed(self, feed, ctr): if not feed.entry: print '\nNo groups in feed.\n' return 0 for i, entry in enumerate(feed.entry): print '\n%s %s' % (ctr+i+1, entry.title.text) if entry.content: print ' %s' % (entry.content.text) # Display the group id which can be used to query the contacts feed. print ' Group ID: %s' % entry.id.text # Display extended properties. for extended_property in entry.extended_property: if extended_property.value: value = extended_property.value else: value = extended_property.GetXmlBlob() print ' Extended Property %s: %s' % (extended_property.name, value) return len(feed.entry) + ctr def PrintContactsFeed(self, feed, ctr): if not feed.entry: print '\nNo contacts in feed.\n' return 0 for i, entry in enumerate(feed.entry): if not entry.name is None: family_name = entry.name.family_name is None and " " or entry.name.family_name.text full_name = entry.name.full_name is None and " " or entry.name.full_name.text given_name = entry.name.given_name is None and " " or entry.name.given_name.text print '\n%s %s: %s - %s' % (ctr+i+1, full_name, given_name, family_name) else: print '\n%s %s (title)' % (ctr+i+1, entry.title.text) if entry.content: print ' %s' % (entry.content.text) for p in entry.structured_postal_address: print ' %s' % (p.formatted_address.text) # Display the group id which can be used to query the contacts feed. print ' Group ID: %s' % entry.id.text # Display extended properties. for extended_property in entry.extended_property: if extended_property.value: value = extended_property.value else: value = extended_property.GetXmlBlob() print ' Extended Property %s: %s' % (extended_property.name, value) for user_defined_field in entry.user_defined_field: print ' User Defined Field %s: %s' % (user_defined_field.key, user_defined_field.value) return len(feed.entry) + ctr def ListAllGroups(self): feed = self.gd_client.GetGroups() self.PrintPaginatedFeed(feed, self.PrintGroupsFeed) def CreateMenu(self): """Prompts that enable a user to create a contact.""" name = raw_input('Enter contact\'s name: ') notes = raw_input('Enter notes for contact: ') primary_email = raw_input('Enter primary email address: ') new_contact = gdata.contacts.data.ContactEntry(name=gdata.data.Name(full_name=gdata.data.FullName(text=name))) new_contact.content = atom.data.Content(text=notes) # Create a work email address for the contact and use as primary. new_contact.email.append(gdata.data.Email(address=primary_email, primary='true', rel=gdata.data.WORK_REL)) entry = self.gd_client.CreateContact(new_contact) if entry: print 'Creation successful!' print 'ID for the new contact:', entry.id.text else: print 'Upload error.' def QueryMenu(self): """Prompts for updated-min query parameters and displays results.""" updated_min = raw_input( 'Enter updated min (example: 2007-03-16T00:00:00): ') query = gdata.contacts.client.ContactsQuery() query.updated_min = updated_min feed = self.gd_client.GetContacts(q=query) self.PrintFeed(feed) def QueryGroupsMenu(self): """Prompts for updated-min query parameters and displays results.""" updated_min = raw_input( 'Enter updated min (example: 2007-03-16T00:00:00): ') query = gdata.contacts.client.ContactsQuery(feed='/m8/feeds/groups/default/full') query.updated_min = updated_min feed = self.gd_client.GetGroups(q=query) self.PrintGroupsFeed(feed, 0) def _SelectContact(self): feed = self.gd_client.GetContacts() self.PrintFeed(feed) selection = 5000 while selection > len(feed.entry)+1 or selection < 1: selection = int(raw_input( 'Enter the number for the contact you would like to modify: ')) return feed.entry[selection-1] def UpdateContactMenu(self): selected_entry = self._SelectContact() new_name = raw_input('Enter a new name for the contact: ') if not selected_entry.name: selected_entry.name = gdata.data.Name() selected_entry.name.full_name = gdata.data.FullName(text=new_name) self.gd_client.Update(selected_entry) def DeleteContactMenu(self): selected_entry = self._SelectContact() self.gd_client.Delete(selected_entry) def PrintMenu(self): """Displays a menu of options for the user to choose from.""" print ('\nContacts Sample\n' '1) List all of your contacts.\n' '2) Create a contact.\n' '3) Query contacts on updated time.\n' '4) Modify a contact.\n' '5) Delete a contact.\n' '6) List all of your contact groups.\n' '7) Query your groups on updated time.\n' '8) Exit.\n') def GetMenuChoice(self, max): """Retrieves the menu selection from the user. Args: max: [int] The maximum number of allowed choices (inclusive) Returns: The integer of the menu item chosen by the user. """ while True: input = raw_input('> ') try: num = int(input) except ValueError: print 'Invalid choice. Please choose a value between 1 and', max continue if num > max or num < 1: print 'Invalid choice. Please choose a value between 1 and', max else: return num def Run(self): """Prompts the user to choose funtionality to be demonstrated.""" try: while True: self.PrintMenu() choice = self.GetMenuChoice(8) if choice == 1: self.ListAllContacts() elif choice == 2: self.CreateMenu() elif choice == 3: self.QueryMenu() elif choice == 4: self.UpdateContactMenu() elif choice == 5: self.DeleteContactMenu() elif choice == 6: self.ListAllGroups() elif choice == 7: self.QueryGroupsMenu() elif choice == 8: return except KeyboardInterrupt: print '\nGoodbye.' return def main(): """Demonstrates use of the Contacts extension using the ContactsSample object.""" # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=']) except getopt.error, msg: print 'python contacts_example.py --user [username] --pw [password]' sys.exit(2) user = '' pw = '' # Process options for option, arg in opts: if option == '--user': user = arg elif option == '--pw': pw = arg while not user: print 'NOTE: Please run these tests only with a test account.' user = raw_input('Please enter your username: ') while not pw: pw = getpass.getpass() if not pw: print 'Password cannot be blank.' try: sample = ContactsSample(user, pw) except gdata.client.BadAuthentication: print 'Invalid user credentials given.' return sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/env python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains a Sample for Google Profiles. ProfilesSample: demonstrates operations with the Profiles feed. """ __author__ = 'jtoledo (Julian Toledo)' import getopt import getpass import sys import gdata.contacts import gdata.contacts.service class ProfilesSample(object): """ProfilesSample object demonstrates operations with the Profiles feed.""" def __init__(self, email, password, domain): """Constructor for the ProfilesSample object. Takes an email and password corresponding to a gmail account to demonstrate the functionality of the Profiles feed. Args: email: [string] The e-mail address of the account to use for the sample. password: [string] The password corresponding to the account specified by the email parameter. domain: [string] The domain for the Profiles feed """ self.gd_client = gdata.contacts.service.ContactsService( contact_list=domain) self.gd_client.email = email self.gd_client.password = password self.gd_client.source = 'GoogleInc-ProfilesPythonSample-1' self.gd_client.ProgrammaticLogin() def PrintFeed(self, feed, ctr=0): """Prints out the contents of a feed to the console. Args: feed: A gdata.profiles.ProfilesFeed instance. ctr: [int] The number of entries in this feed previously printed. This allows continuous entry numbers when paging through a feed. Returns: The number of entries printed, including those previously printed as specified in ctr. This is for passing as an ar1gument to ctr on successive calls to this method. """ if not feed.entry: print '\nNo entries in feed.\n' return 0 for entry in feed.entry: self.PrintEntry(entry) return len(feed.entry) + ctr def PrintEntry(self, entry): """Prints out the contents of a single Entry to the console. Args: entry: A gdata.contacts.ProfilesEntry """ print '\n%s' % (entry.title.text) for email in entry.email: if email.primary == 'true': print 'Email: %s (primary)' % (email.address) else: print 'Email: %s' % (email.address) if entry.nickname: print 'Nickname: %s' % (entry.nickname.text) if entry.occupation: print 'Occupation: %s' % (entry.occupation.text) if entry.gender: print 'Gender: %s' % (entry.gender.value) if entry.birthday: print 'Birthday: %s' % (entry.birthday.when) for relation in entry.relation: print 'Relation: %s %s' % (relation.rel, relation.text) for user_defined_field in entry.user_defined_field: print 'UserDefinedField: %s %s' % (user_defined_field.key, user_defined_field.value) for website in entry.website: print 'Website: %s %s' % (website.href, website.rel) for phone_number in entry.phone_number: print 'Phone Number: %s' % phone_number.text for organization in entry.organization: print 'Organization:' if organization.org_name: print ' Name: %s' % (organization.org_name.text) if organization.org_title: print ' Title: %s' % (organization.org_title.text) if organization.org_department: print ' Department: %s' % (organization.org_department.text) if organization.org_job_description: print ' Job Desc: %s' % (organization.org_job_description.text) def PrintPaginatedFeed(self, feed, print_method): """Print all pages of a paginated feed. This will iterate through a paginated feed, requesting each page and printing the entries contained therein. Args: feed: A gdata.contacts.ProfilesFeed instance. print_method: The method which will be used to print each page of the """ ctr = 0 while feed: # Print contents of current feed ctr = print_method(feed=feed, ctr=ctr) # Prepare for next feed iteration next = feed.GetNextLink() feed = None if next: if self.PromptOperationShouldContinue(): # Another feed is available, and the user has given us permission # to fetch it feed = self.gd_client.GetProfilesFeed(next.href) else: # User has asked us to terminate feed = None def PromptOperationShouldContinue(self): """Display a "Continue" prompt. This give is used to give users a chance to break out of a loop, just in case they have too many profiles/groups. Returns: A boolean value, True if the current operation should continue, False if the current operation should terminate. """ while True: key_input = raw_input('Continue [Y/n]? ') if key_input is 'N' or key_input is 'n': return False elif key_input is 'Y' or key_input is 'y' or key_input is '': return True def ListAllProfiles(self): """Retrieves a list of profiles and displays name and primary email.""" feed = self.gd_client.GetProfilesFeed() self.PrintPaginatedFeed(feed, self.PrintFeed) def SelectProfile(self): username = raw_input('Please enter your username for the profile: ') entry_uri = self.gd_client.GetFeedUri('profiles')+'/'+username try: entry = self.gd_client.GetProfile(entry_uri) self.PrintEntry(entry) except gdata.service.RequestError: print 'Invalid username for the profile.' def PrintMenu(self): """Displays a menu of options for the user to choose from.""" print ('\nProfiles Sample\n' '1) List all of your Profiles.\n' '2) Get a single Profile.\n' '3) Exit.\n') def GetMenuChoice(self, maximum): """Retrieves the menu selection from the user. Args: maximum: [int] The maximum number of allowed choices (inclusive) Returns: The integer of the menu item chosen by the user. """ while True: key_input = raw_input('> ') try: num = int(key_input) except ValueError: print 'Invalid choice. Please choose a value between 1 and', maximum continue if num > maximum or num < 1: print 'Invalid choice. Please choose a value between 1 and', maximum else: return num def Run(self): """Prompts the user to choose funtionality to be demonstrated.""" try: while True: self.PrintMenu() choice = self.GetMenuChoice(3) if choice == 1: self.ListAllProfiles() elif choice == 2: self.SelectProfile() elif choice == 3: return except KeyboardInterrupt: print '\nGoodbye.' return def main(): """Demonstrates use of the Profiles using the ProfilesSample object.""" # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=', 'domain=']) except getopt.error, msg: print 'python profiles_example.py --user [username] --pw [password]' print ' --domain [domain]' sys.exit(2) user = '' pw = '' domain = '' # Process options for option, arg in opts: if option == '--user': user = arg elif option == '--pw': pw = arg elif option == '--domain': domain = arg while not user: print 'NOTE: Please run these tests only with a test account.' user = raw_input('Please enter your email: ') while not pw: pw = getpass.getpass('Please enter password: ') if not pw: print 'Password cannot be blank.' while not domain: domain = raw_input('Please enter your Apps domain: ') try: sample = ProfilesSample(user, pw, domain) except gdata.service.BadAuthentication: print 'Invalid user credentials given.' return sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'api.rboyd@gmail.com (Ryan Boyd)' try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import gdata.calendar.data import gdata.calendar.client import gdata.acl.data import atom import getopt import sys import string import time class CalendarExample: def __init__(self, email, password): """Creates a CalendarService and provides ClientLogin auth details to it. The email and password are required arguments for ClientLogin. The CalendarService automatically sets the service to be 'cl', as is appropriate for calendar. The 'source' defined below is an arbitrary string, but should be used to reference your name or the name of your organization, the app name and version, with '-' between each of the three values. The account_type is specified to authenticate either Google Accounts or Google Apps accounts. See gdata.service or http://code.google.com/apis/accounts/AuthForInstalledApps.html for more info on ClientLogin. NOTE: ClientLogin should only be used for installed applications and not for multi-user web applications.""" self.cal_client = gdata.calendar.client.CalendarClient(source='Google-Calendar_Python_Sample-1.0') self.cal_client.ClientLogin(email, password, self.cal_client.source); def _PrintUserCalendars(self): """Retrieves the list of calendars to which the authenticated user either owns or subscribes to. This is the same list as is represented in the Google Calendar GUI. Although we are only printing the title of the calendar in this case, other information, including the color of the calendar, the timezone, and more. See CalendarListEntry for more details on available attributes.""" feed = self.cal_client.GetAllCalendarsFeed() print 'Printing allcalendars: %s' % feed.title.text for i, a_calendar in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, a_calendar.title.text,) def _PrintOwnCalendars(self): """Retrieves the list of calendars to which the authenticated user owns -- Although we are only printing the title of the calendar in this case, other information, including the color of the calendar, the timezone, and more. See CalendarListEntry for more details on available attributes.""" feed = self.cal_client.GetOwnCalendarsFeed() print 'Printing owncalendars: %s' % feed.title.text for i, a_calendar in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, a_calendar.title.text,) def _PrintAllEventsOnDefaultCalendar(self): """Retrieves all events on the primary calendar for the authenticated user. In reality, the server limits the result set intially returned. You can use the max_results query parameter to allow the server to send additional results back (see query parameter use in DateRangeQuery for more info). Additionally, you can page through the results returned by using the feed.GetNextLink().href value to get the location of the next set of results.""" feed = self.cal_client.GetCalendarEventFeed() print 'Events on Primary Calendar: %s' % (feed.title.text,) for i, an_event in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, an_event.title.text,) for p, a_participant in zip(xrange(len(an_event.who)), an_event.who): print '\t\t%s. %s' % (p, a_participant.email,) print '\t\t\t%s' % (a_participant.value,) if a_participant.attendee_status: print '\t\t\t%s' % (a_participant.attendee_status.value,) def _FullTextQuery(self, text_query='Tennis'): """Retrieves events from the calendar which match the specified full-text query. The full-text query searches the title and content of an event, but it does not search the value of extended properties at the time of this writing. It uses the default (primary) calendar of the authenticated user and uses the private visibility/full projection feed. Please see: http://code.google.com/apis/calendar/reference.html#Feeds for more information on the feed types. Note: as we're not specifying any query parameters other than the full-text query, recurring events returned will not have gd:when elements in the response. Please see the Google Calendar API query paramters reference for more info: http://code.google.com/apis/calendar/reference.html#Parameters""" print 'Full text query for events on Primary Calendar: \'%s\'' % ( text_query,) query = gdata.calendar.client.CalendarEventQuery(text_query=text_query) feed = self.cal_client.GetCalendarEventFeed(q=query) for i, an_event in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, an_event.title.text,) print '\t\t%s. %s' % (i, an_event.content.text,) for a_when in an_event.when: print '\t\tStart time: %s' % (a_when.start,) print '\t\tEnd time: %s' % (a_when.end,) def _DateRangeQuery(self, start_date='2007-01-01', end_date='2007-07-01'): """Retrieves events from the server which occur during the specified date range. This uses the CalendarEventQuery class to generate the URL which is used to retrieve the feed. For more information on valid query parameters, see: http://code.google.com/apis/calendar/reference.html#Parameters""" print 'Date range query for events on Primary Calendar: %s to %s' % ( start_date, end_date,) query = gdata.calendar.client.CalendarEventQuery(start_min=start_date, start_max=end_date) feed = self.cal_client.GetCalendarEventFeed(q=query) for i, an_event in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, an_event.title.text,) for a_when in an_event.when: print '\t\tStart time: %s' % (a_when.start,) print '\t\tEnd time: %s' % (a_when.end,) def _InsertCalendar(self, title='Little League Schedule', description='This calendar contains practice and game times', time_zone='America/Los_Angeles', hidden=False, location='Oakland', color='#2952A3'): """Creates a new calendar using the specified data.""" print 'Creating new calendar with title "%s"' % title calendar = gdata.calendar.data.CalendarEntry() calendar.title = atom.data.Title(text=title) calendar.summary = atom.data.Summary(text=description) calendar.where.append(gdata.calendar.data.CalendarWhere(value=location)) calendar.color = gdata.calendar.data.ColorProperty(value=color) calendar.timezone = gdata.calendar.data.TimeZoneProperty(value=time_zone) if hidden: calendar.hidden = gdata.calendar.data.HiddenProperty(value='true') else: calendar.hidden = gdata.calendar.data.HiddenProperty(value='false') new_calendar = self.cal_client.InsertCalendar(new_calendar=calendar) return new_calendar def _UpdateCalendar(self, calendar, title='New Title', color=None): """Updates the title and, optionally, the color of the supplied calendar""" print 'Updating the calendar titled "%s" with the title "%s"' % ( calendar.title.text, title) calendar.title = atom.data.Title(text=title) if color is not None: calendar.color = gdata.calendar.data.ColorProperty(value=color) updated_calendar = self.cal_client.Update(calendar) return updated_calendar def _DeleteAllCalendars(self): """Deletes all calendars. Note: the primary calendar cannot be deleted""" feed = self.cal_client.GetOwnCalendarsFeed() for entry in feed.entry: print 'Deleting calendar: %s' % entry.title.text try: self.cal_client.Delete(entry.GetEditLink().href) except gdata.client.RequestError, msg: if msg.body.startswith('Cannot remove primary calendar'): print '\t%s' % msg.body else: print '\tUnexpected Error: %s' % msg.body def _InsertSubscription(self, id='python.gcal.test%40gmail.com'): """Subscribes to the calendar with the specified ID.""" print 'Subscribing to the calendar with ID: %s' % id calendar = gdata.calendar.data.CalendarEntry() calendar.id = atom.data.Id(text=id) returned_calendar = self.cal_client.InsertCalendarSubscription(calendar) return returned_calendar def _UpdateCalendarSubscription(self, id='python.gcal.test%40gmail.com', color=None, hidden=None, selected=None): """Updates the subscription to the calendar with the specified ID.""" print 'Updating the calendar subscription with ID: %s' % id calendar_url = ( 'http://www.google.com/calendar/feeds/default/allcalendars/full/%s' % id) calendar_entry = self.cal_client.GetCalendarEntry(calendar_url) if color is not None: calendar_entry.color = gdata.calendar.data.ColorProperty(value=color) if hidden is not None: if hidden: calendar_entry.hidden = gdata.calendar.data.HiddenProperty(value='true') else: calendar_entry.hidden = gdata.calendar.data.HiddenProperty(value='false') if selected is not None: if selected: calendar_entry.selected = gdata.calendar.data.SelectedProperty(value='true') else: calendar_entry.selected = gdata.calendar.data.SelectedProperty(value='false') updated_calendar = self.cal_client.Update(calendar_entry) return updated_calendar def _DeleteCalendarSubscription(self, id='python.gcal.test%40gmail.com'): """Deletes the subscription to the calendar with the specified ID.""" print 'Deleting the calendar subscription with ID: %s' % id calendar_url = ( 'http://www.google.com/calendar/feeds/default/allcalendars/full/%s' % id) calendar_entry = self.cal_client.GetCalendarEntry(calendar_url) self.cal_client.Delete(calendar_entry.GetEditLink().href) def _InsertEvent(self, title='Tennis with Beth', content='Meet for a quick lesson', where='On the courts', start_time=None, end_time=None, recurrence_data=None): """Inserts a basic event using either start_time/end_time definitions or gd:recurrence RFC2445 icalendar syntax. Specifying both types of dates is not valid. Note how some members of the CalendarEventEntry class use arrays and others do not. Members which are allowed to occur more than once in the calendar or GData "kinds" specifications are stored as arrays. Even for these elements, Google Calendar may limit the number stored to 1. The general motto to use when working with the Calendar data API is that functionality not available through the GUI will not be available through the API. Please see the GData Event "kind" document: http://code.google.com/apis/gdata/elements.html#gdEventKind for more information""" event = gdata.calendar.data.CalendarEventEntry() event.title = atom.data.Title(text=title) event.content = atom.data.Content(text=content) event.where.append(gdata.data.Where(value=where)) if recurrence_data is not None: # Set a recurring event event.recurrence = gdata.data.Recurrence(text=recurrence_data) else: if start_time is None: # Use current time for the start_time and have the event last 1 hour start_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime()) end_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(time.time() + 3600)) event.when.append(gdata.data.When(start=start_time, end=end_time)) new_event = self.cal_client.InsertEvent(event) return new_event def _InsertSingleEvent(self, title='One-time Tennis with Beth', content='Meet for a quick lesson', where='On the courts', start_time=None, end_time=None): """Uses the _InsertEvent helper method to insert a single event which does not have any recurrence syntax specified.""" new_event = self._InsertEvent(title, content, where, start_time, end_time, recurrence_data=None) print 'New single event inserted: %s' % (new_event.id.text,) print '\tEvent edit URL: %s' % (new_event.GetEditLink().href,) print '\tEvent HTML URL: %s' % (new_event.GetHtmlLink().href,) return new_event def _InsertRecurringEvent(self, title='Weekly Tennis with Beth', content='Meet for a quick lesson', where='On the courts', recurrence_data=None): """Uses the _InsertEvent helper method to insert a recurring event which has only RFC2445 icalendar recurrence syntax specified. Note the use of carriage return/newline pairs at the end of each line in the syntax. Even when specifying times (as opposed to only dates), VTIMEZONE syntax is not required if you use a standard Java timezone ID. Please see the docs for more information on gd:recurrence syntax: http://code.google.com/apis/gdata/elements.html#gdRecurrence """ if recurrence_data is None: recurrence_data = ('DTSTART;VALUE=DATE:20070501\r\n' + 'DTEND;VALUE=DATE:20070502\r\n' + 'RRULE:FREQ=WEEKLY;BYDAY=Tu;UNTIL=20070904\r\n') new_event = self._InsertEvent(title, content, where, recurrence_data=recurrence_data, start_time=None, end_time=None) print 'New recurring event inserted: %s' % (new_event.id.text,) print '\tEvent edit URL: %s' % (new_event.GetEditLink().href,) print '\tEvent HTML URL: %s' % (new_event.GetHtmlLink().href,) return new_event def _InsertQuickAddEvent(self, content="Tennis with John today 3pm-3:30pm"): """Creates an event with the quick_add property set to true so the content is processed as quick add content instead of as an event description.""" event = gdata.calendar.data.CalendarEventEntry() event.content = atom.data.Content(text=content) event.quick_add = gdata.calendar.data.QuickAddProperty(value='true') new_event = self.cal_client.InsertEvent(event) return new_event def _InsertSimpleWebContentEvent(self): """Creates a WebContent object and embeds it in a WebContentLink. The WebContentLink is appended to the existing list of links in the event entry. Finally, the calendar client inserts the event.""" # Create a WebContent object url = 'http://www.google.com/logos/worldcup06.gif' web_content = gdata.calendar.data.WebContent(url=url, width='276', height='120') # Create a WebContentLink object that contains the WebContent object title = 'World Cup' href = 'http://www.google.com/calendar/images/google-holiday.gif' type = 'image/gif' web_content_link = gdata.calendar.data.WebContentLink(title=title, href=href, link_type=type, web_content=web_content) # Create an event that contains this web content event = gdata.calendar.data.CalendarEventEntry() event.link.append(web_content_link) print 'Inserting Simple Web Content Event' new_event = self.cal_client.InsertEvent(event) return new_event def _InsertWebContentGadgetEvent(self): """Creates a WebContent object and embeds it in a WebContentLink. The WebContentLink is appended to the existing list of links in the event entry. Finally, the calendar client inserts the event. Web content gadget events display Calendar Gadgets inside Google Calendar.""" # Create a WebContent object url = 'http://google.com/ig/modules/datetime.xml' web_content = gdata.calendar.data.WebContent(url=url, width='300', height='136') web_content.web_content_gadget_pref.append( gdata.calendar.data.WebContentGadgetPref(name='color', value='green')) # Create a WebContentLink object that contains the WebContent object title = 'Date and Time Gadget' href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif' type = 'application/x-google-gadgets+xml' web_content_link = gdata.calendar.data.WebContentLink(title=title, href=href, link_type=type, web_content=web_content) # Create an event that contains this web content event = gdata.calendar.data.CalendarEventEntry() event.link.append(web_content_link) print 'Inserting Web Content Gadget Event' new_event = self.cal_client.InsertEvent(event) return new_event def _UpdateTitle(self, event, new_title='Updated event title'): """Updates the title of the specified event with the specified new_title. Note that the UpdateEvent method (like InsertEvent) returns the CalendarEventEntry object based upon the data returned from the server after the event is inserted. This represents the 'official' state of the event on the server. The 'edit' link returned in this event can be used for future updates. Due to the use of the 'optimistic concurrency' method of version control, most GData services do not allow you to send multiple update requests using the same edit URL. Please see the docs: http://code.google.com/apis/gdata/reference.html#Optimistic-concurrency """ previous_title = event.title.text event.title.text = new_title print 'Updating title of event from:\'%s\' to:\'%s\'' % ( previous_title, event.title.text,) return self.cal_client.Update(event) def _AddReminder(self, event, minutes=10): """Adds a reminder to the event. This uses the default reminder settings for the user to determine what type of notifications are sent (email, sms, popup, etc.) and sets the reminder for 'minutes' number of minutes before the event. Note: you can only use values for minutes as specified in the Calendar GUI.""" for a_when in event.when: if len(a_when.reminder) > 0: a_when.reminder[0].minutes = minutes else: a_when.reminder.append(gdata.data.Reminder(minutes=str(minutes))) print 'Adding %d minute reminder to event' % (minutes,) return self.cal_client.Update(event) def _AddExtendedProperty(self, event, name='http://www.example.com/schemas/2005#mycal.id', value='1234'): """Adds an arbitrary name/value pair to the event. This value is only exposed through the API. Extended properties can be used to store extra information needed by your application. The recommended format is used as the default arguments above. The use of the URL format is to specify a namespace prefix to avoid collisions between different applications.""" event.extended_property.append( gdata.calendar.data.CalendarExtendedProperty(name=name, value=value)) print 'Adding extended property to event: \'%s\'=\'%s\'' % (name, value,) return self.cal_client.Update(event) def _DeleteEvent(self, event): """Given an event object returned for the calendar server, this method deletes the event. The edit link present in the event is the URL used in the HTTP DELETE request.""" self.cal_client.Delete(event.GetEditLink().href) def _PrintAclFeed(self): """Sends a HTTP GET to the default ACL URL (http://www.google.com/calendar/feeds/default/acl/full) and displays the feed returned in the response.""" feed = self.cal_client.GetCalendarAclFeed() print feed.title.text for i, a_rule in zip(xrange(len(feed.entry)), feed.entry): print '\t%s. %s' % (i, a_rule.title.text,) print '\t\t Role: %s' % (a_rule.role.value,) print '\t\t Scope %s - %s' % (a_rule.scope.type, a_rule.scope.value) def _CreateAclRule(self, username): """Creates a ACL rule that grants the given user permission to view free/busy information on the default calendar. Note: It is not necessary to specify a title for the ACL entry. The server will set this to be the value of the role specified (in this case "freebusy").""" print 'Creating Acl rule for user: %s' % username rule = gdata.calendar.data.CalendarAclEntry() rule.scope = gdata.acl.data.AclScope(value=username, type="user") roleValue = "http://schemas.google.com/gCal/2005#%s" % ("freebusy") rule.role = gdata.acl.data.AclRole(value=roleValue) aclUrl = "https://www.google.com/calendar/feeds/default/acl/full" returned_rule = self.cal_client.InsertAclEntry(rule, aclUrl) def _RetrieveAclRule(self, username): """Builds the aclEntryUri or the entry created in the previous example. The sends a HTTP GET message and displays the entry returned in the response.""" aclEntryUri = "http://www.google.com/calendar/feeds/" aclEntryUri += "default/acl/full/user:%s" % (username) entry = self.cal_client.GetCalendarAclEntry(aclEntryUri) print '\t%s' % (entry.title.text,) print '\t\t Role: %s' % (entry.role.value,) print '\t\t Scope %s - %s' % (entry.scope.type, entry.scope.value) return entry def _UpdateAclRule(self, entry): """Modifies the value of the role in the given entry and POSTs the updated entry. Note that while the role of an ACL entry can be updated, the scope can not be modified.""" print 'Update Acl rule: %s' % (entry.GetEditLink().href) roleValue = "http://schemas.google.com/gCal/2005#%s" % ("read") entry.role = gdata.acl.data.AclRole(value=roleValue) returned_rule = self.cal_client.Update(entry) def _DeleteAclRule(self, entry): """Given an ACL entry returned for the calendar server, this method deletes the entry. The edit link present in the entry is the URL used in the HTTP DELETE request.""" self.cal_client.Delete(entry.GetEditLink().href) def _batchRequest(self, updateEntry, deleteEntry): """Execute a batch request to create, update and delete an entry.""" print 'Executing batch request to insert, update and delete entries.' # feed that holds all the batch rquest entries request_feed = gdata.calendar.data.CalendarEventFeed() # creating an event entry to insert insertEntry = gdata.calendar.data.CalendarEventEntry() insertEntry.title = atom.data.Title(text='Python: batch insert') insertEntry.content = atom.data.Content(text='my content') start_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime()) end_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(time.time() + 3600)) insertEntry.when.append(gdata.calendar.data.When(start=start_time, end=end_time)) insertEntry.batch_id = gdata.data.BatchId(text='insert-request') # add the insert entry to the batch feed request_feed.AddInsert(entry=insertEntry) if updateEntry: updateEntry.batch_id = gdata.data.BatchId(text='update-request') updateEntry.title = atom.data.Title(text='Python: batch update') # add the update entry to the batch feed request_feed.AddUpdate(entry=updateEntry) if deleteEntry: deleteEntry.batch_id = gdata.data.BatchId(text='delete-request') # add the delete entry to the batch feed request_feed.AddDelete(entry=deleteEntry) # submit the batch request to the server response_feed = self.cal_client.ExecuteBatch(request_feed, gdata.calendar.client.DEFAULT_BATCH_URL) # iterate the response feed to get the operation status for entry in response_feed.entry: print '\tbatch id: %s' % (entry.batch_id.text,) print '\tstatus: %s' % (entry.batch_status.code,) print '\treason: %s' % (entry.batch_status.reason,) if entry.batch_id.text == 'insert-request': insertEntry = entry elif entry.batch_id.text == 'update-request': updateEntry = entry return (insertEntry, updateEntry) def Run(self, delete='false'): """Runs each of the example methods defined above. Note how the result of the _InsertSingleEvent call is used for updating the title and the result of updating the title is used for inserting the reminder and again with the insertion of the extended property. This is due to the Calendar's use of GData's optimistic concurrency versioning control system: http://code.google.com/apis/gdata/reference.html#Optimistic-concurrency """ # Getting feeds and query results self._PrintUserCalendars() self._PrintOwnCalendars() self._PrintAllEventsOnDefaultCalendar() self._FullTextQuery() self._DateRangeQuery() # Inserting and updating events see = self._InsertSingleEvent() see_u_title = self._UpdateTitle(see, 'New title for single event') see_u_reminder = self._AddReminder(see_u_title, minutes=30) see_u_ext_prop = self._AddExtendedProperty(see_u_reminder, name='propname', value='propvalue') ree = self._InsertRecurringEvent() simple_web_content_event = self._InsertSimpleWebContentEvent() web_content_gadget_event = self._InsertWebContentGadgetEvent() quick_add_event = self._InsertQuickAddEvent() # Access Control List examples self._PrintAclFeed() self._CreateAclRule("user@gmail.com") entry = self._RetrieveAclRule("user@gmail.com") self._UpdateAclRule(entry) self._DeleteAclRule(entry) # Creating, updating and deleting calendars inserted_calendar = self._InsertCalendar() updated_calendar = self._UpdateCalendar(calendar=inserted_calendar) # Insert Subscription inserted_subscription = self._InsertSubscription() updated_subscription = self._UpdateCalendarSubscription(selected=False) # Execute a batch request (quick_add_event, see_u_ext_prop) = self._batchRequest(see_u_ext_prop, quick_add_event) # Delete entries if delete argument='true' if delete == 'true': print 'Deleting created events' self.cal_client.Delete(see_u_ext_prop) self.cal_client.Delete(ree) self.cal_client.Delete(simple_web_content_event) self.cal_client.Delete(web_content_gadget_event) self.cal_client.Delete(quick_add_event) print 'Deleting subscriptions' self._DeleteCalendarSubscription() print 'Deleting all calendars' self._DeleteAllCalendars() def main(): """Runs the CalendarExample application with the provided username and and password values. Authentication credentials are required. NOTE: It is recommended that you run this sample using a test account.""" # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "", ["user=", "pw=", "delete="]) except getopt.error, msg: print ('python calendarExample.py --user [username] --pw [password] ' + '--delete [true|false] ') sys.exit(2) user = '' pw = '' delete = 'false' # Process options for o, a in opts: if o == "--user": user = a elif o == "--pw": pw = a elif o == "--delete": delete = a if user == '' or pw == '': print ('python calendarExample.py --user [username] --pw [password] ' + '--delete [true|false] ') sys.exit(2) sample = CalendarExample(user, pw) sample.Run(delete) if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file demonstrates how to use the Google Data API's Python client library # to interface with the Codesearch service. __author__ = 'vbarathan@gmail.com (Prakash Barathan)' from gdata import service import gdata.codesearch.service import gdata import atom import getopt import sys class CodesearchExample: def __init__(self): """Creates a GData service instance to talk to Codesearch service.""" self.service = gdata.codesearch.service.CodesearchService( source='Codesearch_Python_Sample-1.0') def PrintCodeSnippets(self, query): """Prints the codesearch results for given query.""" feed = self.service.GetSnippetsFeed(query) print feed.title.text + " Results for '" + query + "'" print '============================================' for entry in feed.entry: print "" + entry.title.text for match in entry.match: print "\tline#" + match.line_number + ":" + match.text.replace('\n', '') print def main(): """The main function runs the CodesearchExample application with user specified query.""" # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "", ["query="]) except getopt.error, msg: print ('python CodesearchExample.py --query [query_text]') sys.exit(2) query = '' # Process options for o, a in opts: if o == "--query": query = a if query == '': print ('python CodesearchExample.py --query [query]') sys.exit(2) sample = CodesearchExample() sample.PrintCodeSnippets(query) if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # This sample uses the Google Spreadsheets data API and the Google # Calendar data API. The script pulls a list of birthdays from a # Google Spreadsheet and inserts them as webContent events in the # user's Google Calendar. # # The script expects a certain format in the spreadsheet: Name, # Birthday, Photo URL, and Edit URL as headers. Expected format # of the birthday is: MM/DD. Edit URL is to be left blank by the # user - the script uses this column to determine whether to insert # a new event or to update an event at the URL. # # See the spreadsheet below for an example: # http://spreadsheets.google.com/pub?key=pfMX-JDVnx47J0DxqssIQHg # __author__ = 'api.stephaniel@google.com (Stephanie Liu)' try: from xml.etree import ElementTree # for Python 2.5 users except: from elementtree import ElementTree import gdata.spreadsheet.service import gdata.calendar.service import gdata.calendar import gdata.service import atom.service import gdata.spreadsheet import atom import string import time import datetime import getopt import getpass import sys class BirthdaySample: # CONSTANTS: Expected column headers: name, birthday, photourl, editurl & # default calendar reminder set to 2 days NAME = "name" BIRTHDAY = "birthday" PHOTO_URL = "photourl" EDIT_URL = "editurl" REMINDER = 60 * 24 * 2 # minutes def __init__(self, email, password): """ Initializes spreadsheet and calendar clients. Creates SpreadsheetsService and CalendarService objects and authenticates to each with ClientLogin. For more information about ClientLogin authentication: http://code.google.com/apis/accounts/AuthForInstalledApps.html Args: email: string password: string """ self.s_client = gdata.spreadsheet.service.SpreadsheetsService() self.s_client.email = email self.s_client.password = password self.s_client.source = 'exampleCo-birthdaySample-1' self.s_client.ProgrammaticLogin() self.c_client = gdata.calendar.service.CalendarService() self.c_client.email = email self.c_client.password = password self.c_client.source = 'exampleCo-birthdaySample-1' self.c_client.ProgrammaticLogin() def _PrintFeed(self, feed): """ Prints out Spreadsheet feeds in human readable format. Generic function taken from spreadsheetsExample.py. Args: feed: SpreadsheetsCellsFeed, SpreadsheetsListFeed, SpreadsheetsWorksheetsFeed, or SpreadsheetsSpreadsheetsFeed """ for i, entry in enumerate(feed.entry): if isinstance(feed, gdata.spreadsheet.SpreadsheetsCellsFeed): print '%s %s\n' % (entry.title.text, entry.content.text) elif isinstance(feed, gdata.spreadsheet.SpreadsheetsListFeed): print '%s %s %s\n' % (i, entry.title.text, entry.content.text) else: print '%s %s\n' % (i, entry.title.text) def _PromptForSpreadsheet(self): """ Prompts user to select spreadsheet. Gets and displays titles of all spreadsheets for user to select. Generic function taken from spreadsheetsExample.py. Args: none Returns: spreadsheet ID that the user selected: string """ feed = self.s_client.GetSpreadsheetsFeed() self._PrintFeed(feed) input = raw_input('\nSelection: ') # extract and return the spreadsheet ID return feed.entry[string.atoi(input)].id.text.rsplit('/', 1)[1] def _PromptForWorksheet(self, key): """ Prompts user to select desired worksheet. Gets and displays titles of all worksheets for user to select. Generic function taken from spreadsheetsExample.py. Args: key: string Returns: the worksheet ID that the user selected: string """ feed = self.s_client.GetWorksheetsFeed(key) self._PrintFeed(feed) input = raw_input('\nSelection: ') # extract and return the worksheet ID return feed.entry[string.atoi(input)].id.text.rsplit('/', 1)[1] def _AddReminder(self, event, minutes): """ Adds a reminder to a calendar event. This function sets the reminder attribute of the CalendarEventEntry. The script sets it to 2 days by default, and this value is not settable by the user. However, it can easily be changed to take this option. Args: event: CalendarEventEntry minutes: int Returns: the updated event: CalendarEventEntry """ for a_when in event.when: if len(a_when.reminder) > 0: a_when.reminder[0].minutes = minutes else: a_when.reminder.append(gdata.calendar.Reminder(minutes=minutes)) return self.c_client.UpdateEvent(event.GetEditLink().href, event) def _CreateBirthdayWebContentEvent(self, name, birthday, photo_url): """ Create the birthday web content event. This function creates and populates a CalendarEventEntry. webContent specific attributes are set. To learn more about the webContent format: http://www.google.com/support/calendar/bin/answer.py?answer=48528 Args: name: string birthday: string - expected format (MM/DD) photo_url: string Returns: the webContent CalendarEventEntry """ title = "%s's Birthday!" % name content = "It's %s's Birthday!" % name month = string.atoi(birthday.split("/")[0]) day = string.atoi(birthday.split("/")[1]) # Get current year year = time.ctime()[-4:] year = string.atoi(year) # Calculate the "end date" for the all day event start_time = datetime.date(year, month, day) one_day = datetime.timedelta(days=1) end_time = start_time + one_day start_time_str = start_time.strftime("%Y-%m-%d") end_time_str = end_time.strftime("%Y-%m-%d") # Create yearly recurrence rule recurrence_data = ("DTSTART;VALUE=DATE:%s\r\n" "DTEND;VALUE=DATE:%s\r\n" "RRULE:FREQ=YEARLY;WKST=SU\r\n" % (start_time.strftime("%Y%m%d"), end_time.strftime("%Y%m%d"))) web_rel = "http://schemas.google.com/gCal/2005/webContent" icon_href = "http://www.perstephanie.com/images/birthdayicon.gif" icon_type = "image/gif" extension_text = ( 'gCal:webContent xmlns:gCal="http://schemas.google.com/gCal/2005"' ' url="%s" width="300" height="225"' % (photo_url)) event = gdata.calendar.CalendarEventEntry() event.title = atom.Title(text=title) event.content = atom.Content(text=content) event.recurrence = gdata.calendar.Recurrence(text=recurrence_data) event.when.append(gdata.calendar.When(start_time=start_time_str, end_time=end_time_str)) # Adding the webContent specific XML event.link.append(atom.Link(rel=web_rel, title=title, href=icon_href, link_type=icon_type)) event.link[0].extension_elements.append( atom.ExtensionElement(extension_text)) return event def _InsertBirthdayWebContentEvent(self, event): """ Insert event into the authenticated user's calendar. Args: event: CalendarEventEntry Returns: the newly created CalendarEventEntry """ edit_uri = '/calendar/feeds/default/private/full' return self.c_client.InsertEvent(event, edit_uri) def Run(self): """ Run sample. TODO: add exception handling Args: none """ key_id = self._PromptForSpreadsheet() wksht_id = self._PromptForWorksheet(key_id) feed = self.s_client.GetListFeed(key_id, wksht_id) found_name = False found_birthday = False found_photourl = False found_editurl = False # Check to make sure all headers are present # Need to find at least one instance of name, birthday, photourl # editurl if len(feed.entry) > 0: for name, custom in feed.entry[0].custom.iteritems(): if custom.column == self.NAME: found_name = True elif custom.column == self.BIRTHDAY: found_birthday = True elif custom.column == self.PHOTO_URL: found_photourl = True elif custom.column == self.EDIT_URL: found_editurl = True if not found_name and found_birthday and found_photourl and found_editurl: print ("ERROR - Unexpected number of column headers. Should have: %s," " %s, %s, and %s." % (self.NAME, self.BIRTHDAY, self.PHOTO_URL, self.EDIT_URL)) sys.exit(1) # For every row in the spreadsheet, grab all the data and either insert # a new event into the calendar, or update the existing event # Create dict to represent the row data to update edit link back to # Spreadsheet for entry in feed.entry: d = {} input_valid = True for name, custom in entry.custom.iteritems(): d[custom.column] = custom.text month = int(d[self.BIRTHDAY].split("/")[0]) day = int(d[self.BIRTHDAY].split("/")[1]) # Some input checking. Script will allow the insert to continue with # a missing name value. if d[self.NAME] is None: d[self.NAME] = " " if d[self.PHOTO_URL] is None: input_valid = False if d[self.BIRTHDAY] is None: input_valid = False elif not 1 <= month <= 12 or not 1 <= day <= 31: input_valid = False if d[self.EDIT_URL] is None and input_valid: event = self._CreateBirthdayWebContentEvent(d[self.NAME], d[self.BIRTHDAY], d[self.PHOTO_URL]) event = self._InsertBirthdayWebContentEvent(event) event = self._AddReminder(event, self.REMINDER) print "Added %s's birthday!" % d[self.NAME] elif input_valid: # Event already exists edit_link = d[self.EDIT_URL] event = self._CreateBirthdayWebContentEvent(d[self.NAME], d[self.BIRTHDAY], d[self.PHOTO_URL]) event = self.c_client.UpdateEvent(edit_link, event) event = self._AddReminder(event, self.REMINDER) print "Updated %s's birthday!" % d[self.NAME] if input_valid: d[self.EDIT_URL] = event.GetEditLink().href self.s_client.UpdateRow(entry, d) else: print "Warning - Skipping row, missing valid input." def main(): email = raw_input("Please enter your email: ") password = getpass.getpass("Please enter your password: ") sample = BirthdaySample(email, password) sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman (Eric Bidelman)' import gdata.contacts import gdata.contacts.service import gdata.docs import gdata.docs.service CONSUMER_KEY = 'yourdomain.com' CONSUMER_SECRET = 'YOUR_CONSUMER_KEY' SIG_METHOD = gdata.auth.OAuthSignatureMethod.HMAC_SHA1 requestor_id = 'any.user@yourdomain.com' # Contacts Data API ============================================================ contacts = gdata.contacts.service.ContactsService() contacts.SetOAuthInputParameters( SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, two_legged_oauth=True, requestor_id=requestor_id) # GET - fetch user's contact list print "\nList of contacts for %s:" % (requestor_id,) feed = contacts.GetContactsFeed() for entry in feed.entry: print entry.title.text # GET - fetch another user's contact list requestor_id = 'another_user@yourdomain.com' print "\nList of contacts for %s:" % (requestor_id,) contacts.GetOAuthInputParameters().requestor_id = requestor_id feed = contacts.GetContactsFeed() for entry in feed.entry: print entry.title.text # Google Documents List Data API =============================================== docs = gdata.docs.service.DocsService() docs.SetOAuthInputParameters( SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, two_legged_oauth=True, requestor_id=requestor_id) # POST - upload a document print "\nUploading document to %s's Google Documents account:" % (requestor_id,) ms = gdata.MediaSource( file_path='/path/to/test.txt', content_type=gdata.docs.service.SUPPORTED_FILETYPES['TXT']) # GET - fetch user's document list entry = docs.UploadDocument(ms, 'Company Perks') print 'Document now accessible online at:', entry.GetAlternateLink().href print "\nList of Google Documents for %s" % (requestor_id,) feed = docs.GetDocumentListFeed() for entry in feed.entry: print entry.title.text
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Note: # This sample demonstrates 2 Legged OAuth using v2 of the Google Data APIs. # See 2_legged_oauth.py for an example of using 2LO with v1.0 of the APIs. __author__ = 'e.bidelman (Eric Bidelman)' import gdata.gauth import gdata.contacts.client import gdata.docs.client SOURCE_APP_NAME = 'google-PyClient2LOSample-v2.0' CONSUMER_KEY = 'yourdomain.com' CONSUMER_SECRET = 'YOUR_CONSUMER_KEY' def PrintContacts(client): print '\nListing contacts for %s...' % client.auth_token.requestor_id feed = client.GetContacts() for entry in feed.entry: print entry.title.text # Contacts Data API Example ==================================================== requestor_id = 'any.user@' + CONSUMER_KEY two_legged_oauth_token = gdata.gauth.TwoLeggedOAuthHmacToken( CONSUMER_KEY, CONSUMER_SECRET, requestor_id) contacts_client = gdata.contacts.client.ContactsClient(source=SOURCE_APP_NAME) contacts_client.auth_token = two_legged_oauth_token # GET - fetch user's contact list PrintContacts(contacts_client) # GET - fetch another user's contact list contacts_client.auth_token.requestor_id = 'different.user' + CONSUMER_KEY PrintContacts(contacts_client) # Documents List Data API Example ============================================== docs_client = gdata.docs.client.DocsClient(source=SOURCE_APP_NAME) docs_client.auth_token = two_legged_oauth_token docs_client.ssl = True # POST - upload a document print "\nUploading doc to %s's account..." % docs_client.auth_token.requestor_id entry = docs_client.Upload('test.txt', 'MyDocTitle', content_type='text/plain') print 'Document now accessible online at:', entry.GetAlternateLink().href # GET - fetch the user's document list print '\nListing Google Docs for %s...' % docs_client.auth_token.requestor_id feed = docs_client.GetDocList() for entry in feed.entry: print entry.title.text
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'kunalmshah.userid (Kunal Shah)' import sys import os.path import getopt import gdata.auth import gdata.docs.service class OAuthSample(object): """An OAuthSample object demonstrates the three-legged OAuth process.""" def __init__(self, consumer_key, consuer_secret): """Constructor for the OAuthSample object. Takes a consumer key and consumer secret, authenticates using OAuth mechanism and lists the document titles using Document List Data API. Uses HMAC-SHA1 signature method. Args: consumer_key: string Domain identifying third_party web application. consumer_secret: string Secret generated during registration. Returns: An OAuthSample object used to run the sample demonstrating the way to use OAuth authentication mode. """ self.consumer_key = consumer_key self.consumer_secret = consuer_secret self.gd_client = gdata.docs.service.DocsService() def _PrintFeed(self, feed): """Prints out the contents of a feed to the console. Args: feed: A gdata.docs.DocumentListFeed instance. """ if not feed.entry: print 'No entries in feed.\n' i = 1 for entry in feed.entry: print '%d. %s\n' % (i, entry.title.text.encode('UTF-8')) i += 1 def _ListAllDocuments(self): """Retrieves a list of all of a user's documents and displays them.""" feed = self.gd_client.GetDocumentListFeed() self._PrintFeed(feed) def Run(self): """Demonstrates usage of OAuth authentication mode and retrieves a list of documents using Document List Data API.""" print '\nSTEP 1: Set OAuth input parameters.' self.gd_client.SetOAuthInputParameters( gdata.auth.OAuthSignatureMethod.HMAC_SHA1, self.consumer_key, consumer_secret=self.consumer_secret) print '\nSTEP 2: Fetch OAuth Request token.' request_token = self.gd_client.FetchOAuthRequestToken() print 'Request Token fetched: %s' % request_token print '\nSTEP 3: Set the fetched OAuth token.' self.gd_client.SetOAuthToken(request_token) print 'OAuth request token set.' print '\nSTEP 4: Generate OAuth authorization URL.' auth_url = self.gd_client.GenerateOAuthAuthorizationURL() print 'Authorization URL: %s' % auth_url raw_input('Manually go to the above URL and authenticate.' 'Press a key after authorization.') print '\nSTEP 5: Upgrade to an OAuth access token.' self.gd_client.UpgradeToOAuthAccessToken() print 'Access Token: %s' % ( self.gd_client.token_store.find_token(request_token.scopes[0])) print '\nYour Documents:\n' self._ListAllDocuments() print 'STEP 6: Revoke the OAuth access token after use.' self.gd_client.RevokeOAuthToken() print 'OAuth access token revoked.' def main(): """Demonstrates usage of OAuth authentication mode. Prints a list of documents. This demo uses HMAC-SHA1 signature method. """ # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['consumer_key=', 'consumer_secret=']) except getopt.error, msg: print ('python oauth_example.py --consumer_key [oauth_consumer_key] ' '--consumer_secret [consumer_secret] ') sys.exit(2) consumer_key = '' consumer_secret = '' # Process options for option, arg in opts: if option == '--consumer_key': consumer_key = arg elif option == '--consumer_secret': consumer_secret = arg while not consumer_key: consumer_key = raw_input('Please enter consumer key: ') while not consumer_secret: consumer_secret = raw_input('Please enter consumer secret: ') sample = OAuthSample(consumer_key, consumer_secret) sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman (Eric Bidelman)' import cgi import os import gdata.auth import gdata.docs import gdata.docs.service import gdata.alt.appengine from appengine_utilities.sessions import Session from django.utils import simplejson from google.appengine.api import users from google.appengine.ext import webapp from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app SETTINGS = { 'APP_NAME': 'google-GDataOAuthAppEngine-v1', 'CONSUMER_KEY': 'YOUR_CONSUMER_KEY', 'CONSUMER_SECRET': 'YOUR_CONSUMER_SECRET', 'SIG_METHOD': gdata.auth.OAuthSignatureMethod.HMAC_SHA1, 'SCOPES': ['http://docs.google.com/feeds/', 'https://docs.google.com/feeds/'] } gdocs = gdata.docs.service.DocsService(source=SETTINGS['APP_NAME']) gdocs.SetOAuthInputParameters(SETTINGS['SIG_METHOD'], SETTINGS['CONSUMER_KEY'], consumer_secret=SETTINGS['CONSUMER_SECRET']) gdata.alt.appengine.run_on_appengine(gdocs) class MainPage(webapp.RequestHandler): """Main page displayed to user.""" # GET / def get(self): if not users.get_current_user(): self.redirect(users.create_login_url(self.request.uri)) access_token = gdocs.token_store.find_token('%20'.join(SETTINGS['SCOPES'])) if isinstance(access_token, gdata.auth.OAuthToken): form_action = '/fetch_data' form_value = 'Now fetch my docs!' revoke_token_link = True else: form_action = '/get_oauth_token' form_value = 'Give this website access to my Google Docs' revoke_token_link = None template_values = { 'form_action': form_action, 'form_value': form_value, 'user': users.get_current_user(), 'revoke_token_link': revoke_token_link, 'oauth_token': access_token, 'consumer': gdocs.GetOAuthInputParameters().GetConsumer(), 'sig_method': gdocs.GetOAuthInputParameters().GetSignatureMethod().get_name() } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) class OAuthDance(webapp.RequestHandler): """Handler for the 3 legged OAuth dance, v1.0a.""" """This handler is responsible for fetching an initial OAuth request token, redirecting the user to the approval page. When the user grants access, they will be redirected back to this GET handler and their authorized request token will be exchanged for a long-lived access token.""" # GET /get_oauth_token def get(self): """Invoked after we're redirected back from the approval page.""" self.session = Session() oauth_token = gdata.auth.OAuthTokenFromUrl(self.request.uri) if oauth_token: oauth_token.secret = self.session['oauth_token_secret'] oauth_token.oauth_input_params = gdocs.GetOAuthInputParameters() gdocs.SetOAuthToken(oauth_token) # 3.) Exchange the authorized request token for an access token oauth_verifier = self.request.get('oauth_verifier', default_value='') access_token = gdocs.UpgradeToOAuthAccessToken( oauth_verifier=oauth_verifier) # Remember the access token in the current user's token store if access_token and users.get_current_user(): gdocs.token_store.add_token(access_token) elif access_token: gdocs.current_token = access_token gdocs.SetOAuthToken(access_token) self.redirect('/') # POST /get_oauth_token def post(self): """Fetches a request token and redirects the user to the approval page.""" self.session = Session() if users.get_current_user(): # 1.) REQUEST TOKEN STEP. Provide the data scope(s) and the page we'll # be redirected back to after the user grants access on the approval page. req_token = gdocs.FetchOAuthRequestToken( scopes=SETTINGS['SCOPES'], oauth_callback=self.request.uri) # When using HMAC, persist the token secret in order to re-create an # OAuthToken object coming back from the approval page. self.session['oauth_token_secret'] = req_token.secret # Generate the URL to redirect the user to. Add the hd paramter for a # better user experience. Leaving it off will give the user the choice # of what account (Google vs. Google Apps) to login with. domain = self.request.get('domain', default_value='default') approval_page_url = gdocs.GenerateOAuthAuthorizationURL( extra_params={'hd': domain}) # 2.) APPROVAL STEP. Redirect to user to Google's OAuth approval page. self.redirect(approval_page_url) class FetchData(OAuthDance): """Fetches the user's data.""" """This class inherits from OAuthDance in order to utilize OAuthDance.post() in case of a request error (e.g. the user has a bad token).""" # GET /fetch_data def get(self): self.redirect('/') # POST /fetch_data def post(self): """Fetches the user's data.""" try: feed = gdocs.GetDocumentListFeed() json = [] for entry in feed.entry: if entry.lastModifiedBy is not None: last_modified_by = entry.lastModifiedBy.email.text else: last_modified_by = '' if entry.lastViewed is not None: last_viewed = entry.lastViewed.text else: last_viewed = '' json.append({'title': entry.title.text, 'links': {'alternate': entry.GetHtmlLink().href}, 'published': entry.published.text, 'updated': entry.updated.text, 'resourceId': entry.resourceId.text, 'type': entry.GetDocumentType(), 'lastModifiedBy': last_modified_by, 'lastViewed': last_viewed }) self.response.out.write(simplejson.dumps(json)) except gdata.service.RequestError, error: OAuthDance.post(self) class RevokeToken(webapp.RequestHandler): # GET /revoke_token def get(self): """Revokes the current user's OAuth access token.""" try: gdocs.RevokeOAuthToken() except gdata.service.RevokingOAuthTokenFailed: pass gdocs.token_store.remove_all_tokens() self.redirect('/') def main(): application = webapp.WSGIApplication([('/', MainPage), ('/get_oauth_token', OAuthDance), ('/fetch_data', FetchData), ('/revoke_token', RevokeToken)], debug=True) run_wsgi_app(application)
Python
""" Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from google.appengine.ext import db from cache import Cache class Paginator(object): """ This class is used for maintaining pagination objects. """ @classmethod def get(cls, count=10, q_filters={}, search=None, start=None, model=None, \ order='ASC', order_by='__key__'): """ get queries the database on model, starting with key, ordered by order. It receives count + 1 items, returning count and setting a next field to the count + 1 item key. It then reverses the sort, and grabs count objects, returning the last as a the previous. Arguments: count: The amount of entries to pull on query q_filter: The filter value (optional) search: Search is used for SearchableModel searches start: The key to start the page from model: The Model object to query against. This is not a string, it must be a Model derived object. order: The order in which to pull the values. order_by: The attribute to order results by. This defaults to __key__ Returns a dict: { 'next': next_key, 'prev': prev_key, 'items': entities_pulled } """ # argument validation if model == None: raise ValueError('You must pass a model to query') # a valid model object will have a gql method. if callable(model.gql) == False: raise TypeError('model must be a valid model object.') # cache check cache_string = "gae_paginator_" for q_filter in q_filters: cache_string = cache_string + q_filter + "_" + q_filters[q_filter] + "_" cache_string = cache_string + "index" c = Cache() if c.has_key(cache_string): return c[cache_string] # build query query = model.all() if len(q_filters) > 0: for q_filter in q_filters: query.filter(q_filter + " = ", q_filters[q_filter]) if start: if order.lower() == "DESC".lower(): query.filter(order_by + " <", start) else: query.filter(order_by + " >", start) if search: query.search(search) if order.lower() == "DESC".lower(): query.order("-" + order_by) else: query.order(order_by) results = query.fetch(count + 1) if len(results) == count + 1: next = getattr(results[count - 1], order_by) # reverse the query to get the value for previous if start is not None: rquery = model.all() for q_filter in q_filters: rquery.filter(q_filter + " = ", q_filters[q_filter]) if search: query.search(search) if order.lower() == "DESC".lower(): rquery.order(order_by) else: rquery.order("-" + order_by) rresults = rquery.fetch(count) previous = getattr(results[0], order_by) else: previous = None else: next = None return { "results": results, "next": next, "previous": previous }
Python
# -*- coding: utf-8 -*- """ Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # main python imports import datetime import pickle import random import __main__ # google appengine import from google.appengine.ext import db from google.appengine.api import memcache # settings DEFAULT_TIMEOUT = 3600 # cache expires after one hour (3600 sec) CLEAN_CHECK_PERCENT = 50 # 15% of all requests will clean the database MAX_HITS_TO_CLEAN = 100 # the maximum number of cache hits to clean on attempt class _AppEngineUtilities_Cache(db.Model): # It's up to the application to determine the format of their keys cachekey = db.StringProperty() createTime = db.DateTimeProperty(auto_now_add=True) timeout = db.DateTimeProperty() value = db.BlobProperty() class Cache(object): """ Cache is used for storing pregenerated output and/or objects in the Big Table datastore to minimize the amount of queries needed for page displays. The idea is that complex queries that generate the same results really should only be run once. Cache can be used to store pregenerated value made from queries (or other calls such as urlFetch()), or the query objects themselves. """ def __init__(self, clean_check_percent = CLEAN_CHECK_PERCENT, max_hits_to_clean = MAX_HITS_TO_CLEAN, default_timeout = DEFAULT_TIMEOUT): """ Initializer Args: clean_check_percent: how often cache initialization should run the cache cleanup max_hits_to_clean: maximum number of stale hits to clean default_timeout: default length a cache item is good for """ self.clean_check_percent = clean_check_percent self.max_hits_to_clean = max_hits_to_clean self.default_timeout = default_timeout if random.randint(1, 100) < self.clean_check_percent: self._clean_cache() if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheInitialized') def _clean_cache(self): """ _clean_cache is a routine that is run to find and delete cache items that are old. This helps keep the size of your over all datastore down. """ query = _AppEngineUtilities_Cache.all() query.filter('timeout < ', datetime.datetime.now()) results = query.fetch(self.max_hits_to_clean) db.delete(results) #for result in results: # result.delete() def _validate_key(self, key): if key == None: raise KeyError def _validate_value(self, value): if value == None: raise ValueError def _validate_timeout(self, timeout): if timeout == None: timeout = datetime.datetime.now() +\ datetime.timedelta(seconds=DEFAULT_TIMEOUT) if type(timeout) == type(1): timeout = datetime.datetime.now() + \ datetime.timedelta(seconds = timeout) if type(timeout) != datetime.datetime: raise TypeError if timeout < datetime.datetime.now(): raise ValueError return timeout def add(self, key = None, value = None, timeout = None): """ add adds an entry to the cache, if one does not already exist. """ self._validate_key(key) self._validate_value(value) timeout = self._validate_timeout(timeout) if key in self: raise KeyError cacheEntry = _AppEngineUtilities_Cache() cacheEntry.cachekey = key cacheEntry.value = pickle.dumps(value) cacheEntry.timeout = timeout # try to put the entry, if it fails silently pass # failures may happen due to timeouts, the datastore being read # only for maintenance or other applications. However, cache # not being able to write to the datastore should not # break the application try: cacheEntry.put() except: pass memcache_timeout = timeout - datetime.datetime.now() memcache.set('cache-'+key, value, int(memcache_timeout.seconds)) if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheAdded') def set(self, key = None, value = None, timeout = None): """ add adds an entry to the cache, overwriting an existing value if one already exists. """ self._validate_key(key) self._validate_value(value) timeout = self._validate_timeout(timeout) cacheEntry = self._read(key) if not cacheEntry: cacheEntry = _AppEngineUtilities_Cache() cacheEntry.cachekey = key cacheEntry.value = pickle.dumps(value) cacheEntry.timeout = timeout try: cacheEntry.put() except: pass memcache_timeout = timeout - datetime.datetime.now() memcache.set('cache-'+key, value, int(memcache_timeout.seconds)) if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheSet') def _read(self, key = None): """ _read returns a cache object determined by the key. It's set to private because it returns a db.Model object, and also does not handle the unpickling of objects making it not the best candidate for use. The special method __getitem__ is the preferred access method for cache data. """ query = _AppEngineUtilities_Cache.all() query.filter('cachekey', key) query.filter('timeout > ', datetime.datetime.now()) results = query.fetch(1) if len(results) is 0: return None return results[0] if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheReadFromDatastore') if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheRead') def delete(self, key = None): """ Deletes a cache object determined by the key. """ memcache.delete('cache-'+key) result = self._read(key) if result: if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheDeleted') result.delete() def get(self, key): """ get is used to return the cache value associated with the key passed. """ mc = memcache.get('cache-'+key) if mc: if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheReadFromMemcache') if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheRead') return mc result = self._read(key) if result: timeout = result.timeout - datetime.datetime.now() # print timeout.seconds memcache.set('cache-'+key, pickle.loads(result.value), int(timeout.seconds)) return pickle.loads(result.value) else: raise KeyError def get_many(self, keys): """ Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ dict = {} for key in keys: value = self.get(key) if value is not None: dict[key] = val return dict def __getitem__(self, key): """ __getitem__ is necessary for this object to emulate a container. """ return self.get(key) def __setitem__(self, key, value): """ __setitem__ is necessary for this object to emulate a container. """ return self.set(key, value) def __delitem__(self, key): """ Implement the 'del' keyword """ return self.delete(key) def __contains__(self, key): """ Implements "in" operator """ try: r = self.__getitem__(key) except KeyError: return False return True def has_key(self, keyname): """ Equivalent to k in a, use that form in new code """ return self.__contains__(keyname)
Python
# -*- coding: utf-8 -*- """ Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # main python imports import os import time import datetime import random import md5 import Cookie import pickle import __main__ from time import strftime import logging # google appengine imports from google.appengine.ext import db from google.appengine.api import memcache #django simplejson import, used for flash from django.utils import simplejson from rotmodel import ROTModel # settings, if you have these set elsewhere, such as your django settings file, # you'll need to adjust the values to pull from there. class _AppEngineUtilities_Session(db.Model): """ Model for the sessions in the datastore. This contains the identifier and validation information for the session. """ sid = db.StringListProperty() session_key = db.FloatProperty() ip = db.StringProperty() ua = db.StringProperty() last_activity = db.DateTimeProperty() dirty = db.BooleanProperty(default=False) working = db.BooleanProperty(default=False) deleted = db.BooleanProperty(default=False) # used for cases where # datastore delete doesn't # work def put(self): """ Extend put so that it writes vaules to memcache as well as the datastore, and keeps them in sync, even when datastore writes fails. """ if self.session_key: memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), self) else: # new session, generate a new key, which will handle the put and set the memcache self.create_key() self.last_activity = datetime.datetime.now() try: self.dirty = False logging.info("doing a put") db.put(self) memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), self) except: self.dirty = True memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), self) return self @classmethod def get_session(cls, session_obj=None): """ Uses the passed sid to get a session object from memcache, or datastore if a valid one exists. """ if session_obj.sid == None: return None session_key = session_obj.sid.split('_')[0] session = memcache.get("_AppEngineUtilities_Session_" + str(session_key)) if session: if session.deleted == True: session.delete() return None if session.dirty == True and session.working != False: # the working bit is used to make sure multiple requests, which can happen # with ajax oriented sites, don't try to put at the same time session.working = True memcache.set("_AppEngineUtilities_Session_" + str(session_key), session) session.put() if session_obj.sid in session.sid: logging.info('grabbed session from memcache') sessionAge = datetime.datetime.now() - session.last_activity if sessionAge.seconds > session_obj.session_expire_time: session.delete() return None return session else: return None # Not in memcache, check datastore query = _AppEngineUtilities_Session.all() query.filter("sid = ", session_obj.sid) results = query.fetch(1) if len(results) > 0: sessionAge = datetime.datetime.now() - results[0].last_activity if sessionAge.seconds > self.session_expire_time: results[0].delete() return None memcache.set("_AppEngineUtilities_Session_" + str(session_key), results[0]) memcache.set("_AppEngineUtilities_SessionData_" + str(session_key), results[0].get_items_ds()) logging.info('grabbed session from datastore') return results[0] else: return None def get_items(self): """ Returns all the items stored in a session """ items = memcache.get("_AppEngineUtilities_SessionData_" + str(self.session_key)) if items: for item in items: if item.deleted == True: item.delete() items.remove(item) return items query = _AppEngineUtilities_SessionData.all() query.filter('session_key', self.session_key) results = query.fetch(1000) return results def get_item(self, keyname = None): """ Returns a single item from the memcache or datastore """ mc = memcache.get("_AppEngineUtilities_SessionData_" + str(self.session_key)) if mc: for item in mc: if item.keyname == keyname: if item.deleted == True: item.delete() return None return item query = _AppEngineUtilities_SessionData.all() query.filter("session_key = ", self.session_key) query.filter("keyname = ", keyname) results = query.fetch(1) if len(results) > 0: memcache.set("_AppEngineUtilities_SessionData_" + str(self.session_key), self.get_items_ds()) return results[0] return None def get_items_ds(self): """ This gets all the items straight from the datastore, does not interact with the memcache. """ query = _AppEngineUtilities_SessionData.all() query.filter('session_key', self.session_key) results = query.fetch(1000) return results def delete(self): try: query = _AppEngineUtilities_SessionData.all() query.filter("session_key = ", self.session_key) results = query.fetch(1000) db.delete(results) db.delete(self) memcache.delete_multi(["_AppEngineUtilities_Session_" + str(self.session_key), "_AppEngineUtilities_SessionData_" + str(self.session_key)]) except: mc = memcache.get("_AppEngineUtilities_Session_" + str(self.session_key)) mc.deleted = True memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), mc) def create_key(self): """ Creates a unique key for the session. """ self.session_key = time.time() valid = False while valid == False: # verify session_key is unique if memcache.get("_AppEngineUtilities_Session_" + str(self.session_key)): self.session_key = self.session_key + 0.001 else: query = _AppEngineUtilities_Session.all() query.filter("session_key = ", self.session_key) results = query.fetch(1) if len(results) > 0: self.session_key = self.session_key + 0.001 else: try: self.put() memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), self) except: self.dirty = True memcache.set("_AppEngineUtilities_Session_" + str(self.session_key), self) valid = True class _AppEngineUtilities_SessionData(db.Model): """ Model for the session data in the datastore. """ session_key = db.FloatProperty() keyname = db.StringProperty() content = db.BlobProperty() dirty = db.BooleanProperty(default=False) deleted = db.BooleanProperty(default=False) def put(self): """ Adds a keyname/value for session to the datastore and memcache """ # update or insert in datastore try: db.put(self) self.dirty = False except: self.dirty = True # update or insert in memcache mc_items = memcache.get("_AppEngineUtilities_SessionData_" + str(self.session_key)) if mc_items: value_updated = False for item in mc_items: if value_updated == True: break if item.keyname == self.keyname: logging.info("updating " + self.keyname) item.content = self.content memcache.set("_AppEngineUtilities_SessionData_" + str(self.session_key), mc_items) value_updated = True break if value_updated == False: #logging.info("adding " + self.keyname) mc_items.append(self) memcache.set("_AppEngineUtilities_SessionData_" + str(self.session_key), mc_items) def delete(self): """ Deletes an entity from the session in memcache and the datastore """ try: db.delete(self) except: self.deleted = True mc_items = memcache.get("_AppEngineUtilities_SessionData_" + str(self.session_key)) value_handled = False for item in mc_items: if value_handled == True: break if item.keyname == self.keyname: if self.deleted == True: item.deleted = True else: mc_items.remove(item) memcache.set("_AppEngineUtilities_SessionData_" + str(self.session_key), mc_items) class _DatastoreWriter(object): def put(self, keyname, value, session): """ Insert a keyname/value pair into the datastore for the session. Args: keyname: The keyname of the mapping. value: The value of the mapping. """ keyname = session._validate_key(keyname) if value is None: raise ValueError('You must pass a value to put.') # datestore write trumps cookie. If there is a cookie value # with this keyname, delete it so we don't have conflicting # entries. if session.cookie_vals.has_key(keyname): del(session.cookie_vals[keyname]) session.output_cookie[session.cookie_name + '_data'] = \ simplejson.dumps(session.cookie_vals) print session.output_cookie.output() sessdata = session._get(keyname=keyname) if sessdata is None: sessdata = _AppEngineUtilities_SessionData() sessdata.session_key = session.session.session_key sessdata.keyname = keyname sessdata.content = pickle.dumps(value) # UNPICKLING CACHE session.cache[keyname] = pickle.dumps(value) session.cache[keyname] = value sessdata.put() # todo _set_memcache() should be going away when this is done # session._set_memcache() class _CookieWriter(object): def put(self, keyname, value, session): """ Insert a keyname/value pair into the datastore for the session. Args: keyname: The keyname of the mapping. value: The value of the mapping. """ keyname = session._validate_key(keyname) if value is None: raise ValueError('You must pass a value to put.') # Use simplejson for cookies instead of pickle. session.cookie_vals[keyname] = value # update the requests session cache as well. session.cache[keyname] = value session.output_cookie[session.cookie_name + '_data'] = \ simplejson.dumps(session.cookie_vals) print session.output_cookie.output() class Session(object): """ Sessions used to maintain user presence between requests. Sessions store a unique id as a cookie in the browser and referenced in a datastore object. This maintains user presence by validating requests as visits from the same browser. You can add extra data to the session object by using it as a dictionary object. Values can be any python object that can be pickled. For extra performance, session objects are also store in memcache and kept consistent with the datastore. This increases the performance of read requests to session data. """ COOKIE_NAME = 'appengine-utilities-session-sid' # session token DEFAULT_COOKIE_PATH = '/' SESSION_EXPIRE_TIME = 7200 # sessions are valid for 7200 seconds (2 hours) CLEAN_CHECK_PERCENT = 50 # By default, 50% of all requests will clean the database INTEGRATE_FLASH = True # integrate functionality from flash module? CHECK_IP = True # validate sessions by IP CHECK_USER_AGENT = True # validate sessions by user agent SET_COOKIE_EXPIRES = True # Set to True to add expiration field to cookie SESSION_TOKEN_TTL = 5 # Number of seconds a session token is valid for. UPDATE_LAST_ACTIVITY = 60 # Number of seconds that may pass before # last_activity is updated WRITER = "datastore" # Use the datastore writer by default. cookie is the # other option. def __init__(self, cookie_path=DEFAULT_COOKIE_PATH, cookie_name=COOKIE_NAME, session_expire_time=SESSION_EXPIRE_TIME, clean_check_percent=CLEAN_CHECK_PERCENT, integrate_flash=INTEGRATE_FLASH, check_ip=CHECK_IP, check_user_agent=CHECK_USER_AGENT, set_cookie_expires=SET_COOKIE_EXPIRES, session_token_ttl=SESSION_TOKEN_TTL, last_activity_update=UPDATE_LAST_ACTIVITY, writer=WRITER): """ Initializer Args: cookie_name: The name for the session cookie stored in the browser. session_expire_time: The amount of time between requests before the session expires. clean_check_percent: The percentage of requests the will fire off a cleaning routine that deletes stale session data. integrate_flash: If appengine-utilities flash utility should be integrated into the session object. check_ip: If browser IP should be used for session validation check_user_agent: If the browser user agent should be used for sessoin validation. set_cookie_expires: True adds an expires field to the cookie so it saves even if the browser is closed. session_token_ttl: Number of sessions a session token is valid for before it should be regenerated. """ self.cookie_path = cookie_path self.cookie_name = cookie_name self.session_expire_time = session_expire_time self.integrate_flash = integrate_flash self.check_user_agent = check_user_agent self.check_ip = check_ip self.set_cookie_expires = set_cookie_expires self.session_token_ttl = session_token_ttl self.last_activity_update = last_activity_update self.writer = writer # make sure the page is not cached in the browser self.no_cache_headers() # Check the cookie and, if necessary, create a new one. self.cache = {} string_cookie = os.environ.get('HTTP_COOKIE', '') self.cookie = Cookie.SimpleCookie() self.output_cookie = Cookie.SimpleCookie() self.cookie.load(string_cookie) try: self.cookie_vals = \ simplejson.loads(self.cookie[self.cookie_name + '_data'].value) # sync self.cache and self.cookie_vals which will make those # values available for all gets immediately. for k in self.cookie_vals: self.cache[k] = self.cookie_vals[k] self.output_cookie[self.cookie_name + '_data'] = self.cookie[self.cookie_name + '_data'] # sync the input cookie with the output cookie except: self.cookie_vals = {} if writer == "cookie": pass else: self.sid = None new_session = True # do_put is used to determine if a datastore write should # happen on this request. do_put = False # check for existing cookie if self.cookie.get(cookie_name): self.sid = self.cookie[cookie_name].value self.session = _AppEngineUtilities_Session.get_session(self) # will return None if # sid expired if self.session: new_session = False if new_session: # start a new session self.session = _AppEngineUtilities_Session() self.session.put() self.sid = self.new_sid() if 'HTTP_USER_AGENT' in os.environ: self.session.ua = os.environ['HTTP_USER_AGENT'] else: self.session.ua = None if 'REMOTE_ADDR' in os.environ: self.session.ip = os.environ['REMOTE_ADDR'] else: self.session.ip = None self.session.sid = [self.sid] # do put() here to get the session key self.session.put() else: # check the age of the token to determine if a new one # is required duration = datetime.timedelta(seconds=self.session_token_ttl) session_age_limit = datetime.datetime.now() - duration if self.session.last_activity < session_age_limit: logging.info("UPDATING SID LA = " + str(self.session.last_activity) + " - TL = " + str(session_age_limit)) self.sid = self.new_sid() if len(self.session.sid) > 2: self.session.sid.remove(self.session.sid[0]) self.session.sid.append(self.sid) do_put = True else: self.sid = self.session.sid[-1] # check if last_activity needs updated ula = datetime.timedelta(seconds=self.last_activity_update) if datetime.datetime.now() > self.session.last_activity + ula: do_put = True self.output_cookie[cookie_name] = self.sid self.output_cookie[cookie_name]['path'] = cookie_path # UNPICKLING CACHE self.cache['sid'] = pickle.dumps(self.sid) self.cache['sid'] = self.sid if do_put: if self.sid != None or self.sid != "": logging.info("doing put") self.session.put() if self.set_cookie_expires: if not self.output_cookie.has_key(cookie_name + '_data'): self.output_cookie[cookie_name + '_data'] = "" self.output_cookie[cookie_name + '_data']['expires'] = \ self.session_expire_time print self.output_cookie.output() # fire up a Flash object if integration is enabled if self.integrate_flash: import flash self.flash = flash.Flash(cookie=self.cookie) # randomly delete old stale sessions in the datastore (see # CLEAN_CHECK_PERCENT variable) if random.randint(1, 100) < clean_check_percent: self._clean_old_sessions() def new_sid(self): """ Create a new session id. """ sid = str(self.session.session_key) + "_" +md5.new(repr(time.time()) + \ str(random.random())).hexdigest() return sid ''' # removed as model now has get_session classmethod def _get_session(self): """ Get the user's session from the datastore """ query = _AppEngineUtilities_Session.all() query.filter('sid', self.sid) if self.check_user_agent: query.filter('ua', os.environ['HTTP_USER_AGENT']) if self.check_ip: query.filter('ip', os.environ['REMOTE_ADDR']) results = query.fetch(1) if len(results) is 0: return None else: sessionAge = datetime.datetime.now() - results[0].last_activity if sessionAge.seconds > self.session_expire_time: results[0].delete() return None return results[0] ''' def _get(self, keyname=None): """ Return all of the SessionData object data from the datastore onlye, unless keyname is specified, in which case only that instance of SessionData is returned. Important: This does not interact with memcache and pulls directly from the datastore. This also does not get items from the cookie store. Args: keyname: The keyname of the value you are trying to retrieve. """ if keyname != None: return self.session.get_item(keyname) return self.session.get_items() """ OLD query = _AppEngineUtilities_SessionData.all() query.filter('session', self.session) if keyname != None: query.filter('keyname =', keyname) results = query.fetch(1000) if len(results) is 0: return None if keyname != None: return results[0] return results """ def _validate_key(self, keyname): """ Validate the keyname, making sure it is set and not a reserved name. """ if keyname is None: raise ValueError('You must pass a keyname for the session' + \ ' data content.') elif keyname in ('sid', 'flash'): raise ValueError(keyname + ' is a reserved keyname.') if type(keyname) != type([str, unicode]): return str(keyname) return keyname def _put(self, keyname, value): """ Insert a keyname/value pair into the datastore for the session. Args: keyname: The keyname of the mapping. value: The value of the mapping. """ if self.writer == "datastore": writer = _DatastoreWriter() else: writer = _CookieWriter() writer.put(keyname, value, self) def _delete_session(self): """ Delete the session and all session data. """ if hasattr(self, "session"): self.session.delete() self.cookie_vals = {} self.cache = {} self.output_cookie[self.cookie_name + '_data'] = \ simplejson.dumps(self.cookie_vals) print self.output_cookie.output() """ OLD if hasattr(self, "session"): sessiondata = self._get() # delete from datastore if sessiondata is not None: for sd in sessiondata: sd.delete() # delete from memcache memcache.delete('sid-'+str(self.session.key())) # delete the session now that all items that reference it are deleted. self.session.delete() # unset any cookie values that may exist self.cookie_vals = {} self.cache = {} self.output_cookie[self.cookie_name + '_data'] = \ simplejson.dumps(self.cookie_vals) print self.output_cookie.output() """ # if the event class has been loaded, fire off the sessionDeleted event if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('sessionDelete') def delete(self): """ Delete the current session and start a new one. This is useful for when you need to get rid of all data tied to a current session, such as when you are logging out a user. """ self._delete_session() @classmethod def delete_all_sessions(cls): """ Deletes all sessions and session data from the data store and memcache: NOTE: This is not fully developed. It also will not delete any cookie data as this does not work for each incoming request. Keep this in mind if you are using the cookie writer. """ all_sessions_deleted = False all_data_deleted = False while not all_sessions_deleted: query = _AppEngineUtilities_Session.all() results = query.fetch(75) if len(results) is 0: all_sessions_deleted = True else: for result in results: result.delete() def _clean_old_sessions(self): """ Delete expired sessions from the datastore. This is only called for CLEAN_CHECK_PERCENT percent of requests because it could be rather intensive. """ duration = datetime.timedelta(seconds=self.session_expire_time) session_age = datetime.datetime.now() - duration query = _AppEngineUtilities_Session.all() query.filter('last_activity <', session_age) results = query.fetch(50) for result in results: """ OLD data_query = _AppEngineUtilities_SessionData.all() data_query.filter('session', result) data_results = data_query.fetch(1000) for data_result in data_results: data_result.delete() memcache.delete('sid-'+str(result.key())) """ result.delete() # Implement Python container methods def __getitem__(self, keyname): """ Get item from session data. keyname: The keyname of the mapping. """ # flash messages don't go in the datastore if self.integrate_flash and (keyname == 'flash'): return self.flash.msg if keyname in self.cache: # UNPICKLING CACHE return pickle.loads(str(self.cache[keyname])) return self.cache[keyname] if keyname in self.cookie_vals: return self.cookie_vals[keyname] if hasattr(self, "session"): data = self._get(keyname) if data: #UNPICKLING CACHE self.cache[keyname] = data.content self.cache[keyname] = pickle.loads(data.content) return pickle.loads(data.content) else: raise KeyError(str(keyname)) raise KeyError(str(keyname)) def __setitem__(self, keyname, value): """ Set item in session data. Args: keyname: They keyname of the mapping. value: The value of mapping. """ if self.integrate_flash and (keyname == 'flash'): self.flash.msg = value else: keyname = self._validate_key(keyname) self.cache[keyname] = value # self._set_memcache() # commented out because this is done in the datestore put return self._put(keyname, value) def delete_item(self, keyname, throw_exception=False): """ Delete item from session data, ignoring exceptions if necessary. Args: keyname: The keyname of the object to delete. throw_exception: false if exceptions are to be ignored. Returns: Nothing. """ if throw_exception: self.__delitem__(keyname) return None else: try: self.__delitem__(keyname) except KeyError: return None def __delitem__(self, keyname): """ Delete item from session data. Args: keyname: The keyname of the object to delete. """ bad_key = False sessdata = self._get(keyname = keyname) if sessdata is None: bad_key = True else: sessdata.delete() if keyname in self.cookie_vals: del self.cookie_vals[keyname] bad_key = False self.output_cookie[self.cookie_name + '_data'] = \ simplejson.dumps(self.cookie_vals) print self.output_cookie.output() if bad_key: raise KeyError(str(keyname)) if keyname in self.cache: del self.cache[keyname] def __len__(self): """ Return size of session. """ # check memcache first if hasattr(self, "session"): results = self._get() if results is not None: return len(results) + len(self.cookie_vals) else: return 0 return len(self.cookie_vals) def __contains__(self, keyname): """ Check if an item is in the session data. Args: keyname: The keyname being searched. """ try: r = self.__getitem__(keyname) except KeyError: return False return True def __iter__(self): """ Iterate over the keys in the session data. """ # try memcache first if hasattr(self, "session"): for k in self._get(): yield k.keyname for k in self.cookie_vals: yield k def __str__(self): """ Return string representation. """ #if self._get(): return '{' + ', '.join(['"%s" = "%s"' % (k, self[k]) for k in self]) + '}' #else: # return [] ''' OLD def _set_memcache(self): """ Set a memcache object with all the session data. Optionally you can add a key and value to the memcache for put operations. """ # Pull directly from the datastore in order to ensure that the # information is as up to date as possible. if self.writer == "datastore": data = {} sessiondata = self._get() if sessiondata is not None: for sd in sessiondata: data[sd.keyname] = pickle.loads(sd.content) memcache.set('sid-'+str(self.session.key()), data, \ self.session_expire_time) ''' def cycle_key(self): """ Changes the session id. """ self.sid = self.new_sid() if len(self.session.sid) > 2: self.session.sid.remove(self.session.sid[0]) self.session.sid.append(self.sid) def flush(self): """ Delete's the current session, creating a new one. """ self._delete_session() self.__init__() def no_cache_headers(self): """ Adds headers, avoiding any page caching in the browser. Useful for highly dynamic sites. """ print "Expires: Tue, 03 Jul 2001 06:00:00 GMT" print strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z") print "Cache-Control: no-store, no-cache, must-revalidate, max-age=0" print "Cache-Control: post-check=0, pre-check=0" print "Pragma: no-cache" def clear(self): """ Remove all items """ sessiondata = self._get() # delete from datastore if sessiondata is not None: for sd in sessiondata: sd.delete() # delete from memcache self.cache = {} self.cookie_vals = {} self.output_cookie[self.cookie_name + '_data'] = \ simplejson.dumps(self.cookie_vals) print self.output_cookie.output() def has_key(self, keyname): """ Equivalent to k in a, use that form in new code """ return self.__contains__(keyname) def items(self): """ A copy of list of (key, value) pairs """ op = {} for k in self: op[k] = self[k] return op def keys(self): """ List of keys. """ l = [] for k in self: l.append(k) return l def update(*dicts): """ Updates with key/value pairs from b, overwriting existing keys, returns None """ for dict in dicts: for k in dict: self._put(k, dict[k]) return None def values(self): """ A copy list of values. """ v = [] for k in self: v.append(self[k]) return v def get(self, keyname, default = None): """ a[k] if k in a, else x """ try: return self.__getitem__(keyname) except KeyError: if default is not None: return default return None def setdefault(self, keyname, default = None): """ a[k] if k in a, else x (also setting it) """ try: return self.__getitem__(keyname) except KeyError: if default is not None: self.__setitem__(keyname, default) return default return None @classmethod def check_token(cls, cookie_name=COOKIE_NAME, delete_invalid=True): """ Retrieves the token from a cookie and validates that it is a valid token for an existing cookie. Cookie validation is based on the token existing on a session that has not expired. This is useful for determining if datastore or cookie writer should be used in hybrid implementations. Args: cookie_name: Name of the cookie to check for a token. delete_invalid: If the token is not valid, delete the session cookie, to avoid datastore queries on future requests. Returns True/False NOTE: TODO This currently only works when the datastore is working, which of course is pointless for applications using the django middleware. This needs to be resolved before merging back into the main project. """ string_cookie = os.environ.get('HTTP_COOKIE', '') cookie = Cookie.SimpleCookie() cookie.load(string_cookie) if cookie.has_key(cookie_name): query = _AppEngineUtilities_Session.all() query.filter('sid', cookie[cookie_name].value) results = query.fetch(1) if len(results) > 0: return True else: if delete_invalid: output_cookie = Cookie.SimpleCookie() output_cookie[cookie_name] = cookie[cookie_name] output_cookie[cookie_name]['expires'] = 0 print output_cookie.output() return False
Python
""" Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from google.appengine.ext import db class ROTModel(db.Model): """ ROTModel overrides the db.Model put function, having it retry up to 3 times when it encounters a datastore timeout. This is to try an maximize the chance the data makes it into the datastore when attempted. If it fails, it raises the db.Timeout error and the calling application will need to handle that. """ def put(self): count = 0 while count < 3: try: return db.Model.put(self) except db.Timeout: count += 1 else: raise db.Timeout()
Python
""" Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import __main__ class Event(object): """ Event is a simple publish/subscribe based event dispatcher It sets itself to the __main__ function. In order to use it, you must import it and __main__ """ def __init__(self): self.events = [] def subscribe(self, event, callback, args = None): """ This method will subscribe a callback function to an event name. """ if not {"event": event, "callback": callback, "args": args, } \ in self.events: self.events.append({"event": event, "callback": callback, \ "args": args, }) def unsubscribe(self, event, callback, args = None): """ This method will unsubscribe a callback from an event. """ if {"event": event, "callback": callback, "args": args, }\ in self.events: self.events.remove({"event": event, "callback": callback,\ "args": args, }) def fire_event(self, event = None): """ This method is what a method uses to fire an event, initiating all registered callbacks """ for e in self.events: if e["event"] == event: if type(e["args"]) == type([]): e["callback"](*e["args"]) elif type(e["args"]) == type({}): e["callback"](**e["args"]) elif e["args"] == None: e["callback"]() else: e["callback"](e["args"]) """ Assign to the event class to __main__ """ __main__.AEU_Events = Event()
Python
""" Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os import sys import Cookie import pickle from time import strftime from django.utils import simplejson COOKIE_NAME = 'appengine-utilities-flash' class Flash(object): """ Send messages to the user between pages. When you instantiate the class, the attribute 'msg' will be set from the cookie, and the cookie will be deleted. If there is no flash cookie, 'msg' will default to None. To set a flash message for the next page, simply set the 'msg' attribute. Example psuedocode: if new_entity.put(): flash = Flash() flash.msg = 'Your new entity has been created!' return redirect_to_entity_list() Then in the template on the next page: {% if flash.msg %} <div class="flash-msg">{{ flash.msg }}</div> {% endif %} """ def __init__(self, cookie=None): """ Load the flash message and clear the cookie. """ self.no_cache_headers() # load cookie if cookie is None: browser_cookie = os.environ.get('HTTP_COOKIE', '') self.cookie = Cookie.SimpleCookie() self.cookie.load(browser_cookie) else: self.cookie = cookie # check for flash data if self.cookie.get(COOKIE_NAME): # set 'msg' attribute cookie_val = self.cookie[COOKIE_NAME].value # we don't want to trigger __setattr__(), which creates a cookie try: self.__dict__['msg'] = simplejson.loads(cookie_val) except: # not able to load the json, so do not set message. This should # catch for when the browser doesn't delete the cookie in time for # the next request, and only blanks out the content. pass # clear the cookie self.cookie[COOKIE_NAME] = '' self.cookie[COOKIE_NAME]['path'] = '/' self.cookie[COOKIE_NAME]['expires'] = 0 print self.cookie[COOKIE_NAME] else: # default 'msg' attribute to None self.__dict__['msg'] = None def __setattr__(self, name, value): """ Create a cookie when setting the 'msg' attribute. """ if name == 'cookie': self.__dict__['cookie'] = value elif name == 'msg': self.__dict__['msg'] = value self.__dict__['cookie'][COOKIE_NAME] = simplejson.dumps(value) self.__dict__['cookie'][COOKIE_NAME]['path'] = '/' print self.cookie else: raise ValueError('You can only set the "msg" attribute.') def no_cache_headers(self): """ Adds headers, avoiding any page caching in the browser. Useful for highly dynamic sites. """ print "Expires: Tue, 03 Jul 2001 06:00:00 GMT" print strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z") print "Cache-Control: no-store, no-cache, must-revalidate, max-age=0" print "Cache-Control: post-check=0, pre-check=0" print "Pragma: no-cache"
Python
""" Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os import cgi import re import datetime import pickle from google.appengine.ext import db from google.appengine.api import urlfetch from google.appengine.api import memcache APPLICATION_PORT = '8080' CRON_PORT = '8081' class _AppEngineUtilities_Cron(db.Model): """ Model for the tasks in the datastore. This contains the scheduling and url information, as well as a field that sets the next time the instance should run. """ cron_entry = db.StringProperty() next_run = db.DateTimeProperty() cron_compiled = db.BlobProperty() url = db.LinkProperty() class Cron(object): """ Cron is a scheduling utility built for appengine, modeled after crontab for unix systems. While true scheduled tasks are not possible within the Appengine environment currently, this is an attmempt to provide a request based alternate. You configure the tasks in an included interface, and the import the class on any request you want capable of running tasks. On each request where Cron is imported, the list of tasks that need to be run will be pulled and run. A task is a url within your application. It's important to make sure that these requests fun quickly, or you could risk timing out the actual request. See the documentation for more information on configuring your application to support Cron and setting up tasks. """ def __init__(self): # Check if any tasks need to be run query = _AppEngineUtilities_Cron.all() query.filter('next_run <= ', datetime.datetime.now()) results = query.fetch(1000) if len(results) > 0: one_second = datetime.timedelta(seconds = 1) before = datetime.datetime.now() for r in results: if re.search(':' + APPLICATION_PORT, r.url): r.url = re.sub(':' + APPLICATION_PORT, ':' + CRON_PORT, r.url) #result = urlfetch.fetch(r.url) diff = datetime.datetime.now() - before if int(diff.seconds) < 1: if memcache.add(str(r.key), "running"): result = urlfetch.fetch(r.url) r.next_run = self._get_next_run(pickle.loads(r.cron_compiled)) r.put() memcache.delete(str(r.key)) else: break def add_cron(self, cron_string): cron = cron_string.split(" ") if len(cron) is not 6: raise ValueError, 'Invalid cron string. Format: * * * * * url' cron = { 'min': cron[0], 'hour': cron[1], 'day': cron[2], 'mon': cron[3], 'dow': cron[4], 'url': cron[5], } cron_compiled = self._validate_cron(cron) next_run = self._get_next_run(cron_compiled) cron_entry = _AppEngineUtilities_Cron() cron_entry.cron_entry = cron_string cron_entry.next_run = next_run cron_entry.cron_compiled = pickle.dumps(cron_compiled) cron_entry.url = cron["url"] cron_entry.put() def _validate_cron(self, cron): """ Parse the field to determine whether it is an integer or lists, also converting strings to integers where necessary. If passed bad values, raises a ValueError. """ parsers = { 'dow': self._validate_dow, 'mon': self._validate_mon, 'day': self._validate_day, 'hour': self._validate_hour, 'min': self._validate_min, 'url': self. _validate_url, } for el in cron: parse = parsers[el] cron[el] = parse(cron[el]) return cron def _validate_type(self, v, t): """ Validates that the number (v) passed is in the correct range for the type (t). Raise ValueError, if validation fails. Valid ranges: day of week = 0-7 month = 1-12 day = 1-31 hour = 0-23 minute = 0-59 All can * which will then return the range for that entire type. """ if t == "dow": if v >= 0 and v <= 7: return [v] elif v == "*": return "*" else: raise ValueError, "Invalid day of week." elif t == "mon": if v >= 1 and v <= 12: return [v] elif v == "*": return range(1, 12) else: raise ValueError, "Invalid month." elif t == "day": if v >= 1 and v <= 31: return [v] elif v == "*": return range(1, 31) else: raise ValueError, "Invalid day." elif t == "hour": if v >= 0 and v <= 23: return [v] elif v == "*": return range(0, 23) else: raise ValueError, "Invalid hour." elif t == "min": if v >= 0 and v <= 59: return [v] elif v == "*": return range(0, 59) else: raise ValueError, "Invalid minute." def _validate_list(self, l, t): """ Validates a crontab list. Lists are numerical values seperated by a comma with no spaces. Ex: 0,5,10,15 Arguments: l: comma seperated list of numbers t: type used for validation, valid values are dow, mon, day, hour, min """ elements = l.split(",") return_list = [] # we have a list, validate all of them for e in elements: if "-" in e: return_list.extend(self._validate_range(e, t)) else: try: v = int(e) self._validate_type(v, t) return_list.append(v) except: raise ValueError, "Names are not allowed in lists." # return a list of integers return return_list def _validate_range(self, r, t): """ Validates a crontab range. Ranges are 2 numerical values seperated by a dash with no spaces. Ex: 0-10 Arguments: r: dash seperated list of 2 numbers t: type used for validation, valid values are dow, mon, day, hour, min """ elements = r.split('-') # a range should be 2 elements if len(elements) is not 2: raise ValueError, "Invalid range passed: " + str(r) # validate the minimum and maximum are valid for the type for e in elements: self._validate_type(int(e), t) # return a list of the numbers in the range. # +1 makes sure the end point is included in the return value return range(int(elements[0]), int(elements[1]) + 1) def _validate_step(self, s, t): """ Validates a crontab step. Steps are complicated. They can be based on a range 1-10/2 or just step through all valid */2. When parsing times you should always check for step first and see if it has a range or not, before checking for ranges because this will handle steps of ranges returning the final list. Steps of lists is not supported. Arguments: s: slash seperated string t: type used for validation, valid values are dow, mon, day, hour, min """ elements = s.split('/') # a range should be 2 elements if len(elements) is not 2: raise ValueError, "Invalid step passed: " + str(s) try: step = int(elements[1]) except: raise ValueError, "Invalid step provided " + str(s) r_list = [] # if the first element is *, use all valid numbers if elements[0] is "*" or elements[0] is "": r_list.extend(self._validate_type('*', t)) # check and see if there is a list of ranges elif "," in elements[0]: ranges = elements[0].split(",") for r in ranges: # if it's a range, we need to manage that if "-" in r: r_list.extend(self._validate_range(r, t)) else: try: r_list.extend(int(r)) except: raise ValueError, "Invalid step provided " + str(s) elif "-" in elements[0]: r_list.extend(self._validate_range(elements[0], t)) return range(r_list[0], r_list[-1] + 1, step) def _validate_dow(self, dow): """ """ # if dow is * return it. This is for date parsing where * does not mean # every day for crontab entries. if dow is "*": return dow days = { 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6, # per man crontab sunday can be 0 or 7. 'sun': [0, 7], } if dow in days: dow = days[dow] return [dow] # if dow is * return it. This is for date parsing where * does not mean # every day for crontab entries. elif dow is "*": return dow elif "/" in dow: return(self._validate_step(dow, "dow")) elif "," in dow: return(self._validate_list(dow, "dow")) elif "-" in dow: return(self._validate_range(dow, "dow")) else: valid_numbers = range(0, 8) if not int(dow) in valid_numbers: raise ValueError, "Invalid day of week " + str(dow) else: return [int(dow)] def _validate_mon(self, mon): months = { 'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12, } if mon in months: mon = months[mon] return [mon] elif mon is "*": return range(1, 13) elif "/" in mon: return(self._validate_step(mon, "mon")) elif "," in mon: return(self._validate_list(mon, "mon")) elif "-" in mon: return(self._validate_range(mon, "mon")) else: valid_numbers = range(1, 13) if not int(mon) in valid_numbers: raise ValueError, "Invalid month " + str(mon) else: return [int(mon)] def _validate_day(self, day): if day is "*": return range(1, 32) elif "/" in day: return(self._validate_step(day, "day")) elif "," in day: return(self._validate_list(day, "day")) elif "-" in day: return(self._validate_range(day, "day")) else: valid_numbers = range(1, 31) if not int(day) in valid_numbers: raise ValueError, "Invalid day " + str(day) else: return [int(day)] def _validate_hour(self, hour): if hour is "*": return range(0, 24) elif "/" in hour: return(self._validate_step(hour, "hour")) elif "," in hour: return(self._validate_list(hour, "hour")) elif "-" in hour: return(self._validate_range(hour, "hour")) else: valid_numbers = range(0, 23) if not int(hour) in valid_numbers: raise ValueError, "Invalid hour " + str(hour) else: return [int(hour)] def _validate_min(self, min): if min is "*": return range(0, 60) elif "/" in min: return(self._validate_step(min, "min")) elif "," in min: return(self._validate_list(min, "min")) elif "-" in min: return(self._validate_range(min, "min")) else: valid_numbers = range(0, 59) if not int(min) in valid_numbers: raise ValueError, "Invalid min " + str(min) else: return [int(min)] def _validate_url(self, url): # kludge for issue 842, right now we use request headers # to set the host. if url[0] is not "/": url = "/" + url url = 'http://' + str(os.environ['HTTP_HOST']) + url return url # content below is for when that issue gets fixed #regex = re.compile("^(http|https):\/\/([a-z0-9-]\.+)*", re.IGNORECASE) #if regex.match(url) is not None: # return url #else: # raise ValueError, "Invalid url " + url def _calc_month(self, next_run, cron): while True: if cron["mon"][-1] < next_run.month: next_run = next_run.replace(year=next_run.year+1, \ month=cron["mon"][0], \ day=1,hour=0,minute=0) else: if next_run.month in cron["mon"]: return next_run else: one_month = datetime.timedelta(months=1) next_run = next_run + one_month def _calc_day(self, next_run, cron): # start with dow as per cron if dow and day are set # then dow is used if it comes before day. If dow # is *, then ignore it. if str(cron["dow"]) != str("*"): # convert any integers to lists in order to easily compare values m = next_run.month while True: if next_run.month is not m: next_run = next_run.replace(hour=0, minute=0) next_run = self._calc_month(next_run, cron) if next_run.weekday() in cron["dow"] or next_run.day in cron["day"]: return next_run else: one_day = datetime.timedelta(days=1) next_run = next_run + one_day else: m = next_run.month while True: if next_run.month is not m: next_run = next_run.replace(hour=0, minute=0) next_run = self._calc_month(next_run, cron) # if cron["dow"] is next_run.weekday() or cron["day"] is next_run.day: if next_run.day in cron["day"]: return next_run else: one_day = datetime.timedelta(days=1) next_run = next_run + one_day def _calc_hour(self, next_run, cron): m = next_run.month d = next_run.day while True: if next_run.month is not m: next_run = next_run.replace(hour=0, minute=0) next_run = self._calc_month(next_run, cron) if next_run.day is not d: next_run = next_run.replace(hour=0) next_run = self._calc_day(next_run, cron) if next_run.hour in cron["hour"]: return next_run else: m = next_run.month d = next_run.day one_hour = datetime.timedelta(hours=1) next_run = next_run + one_hour def _calc_minute(self, next_run, cron): one_minute = datetime.timedelta(minutes=1) m = next_run.month d = next_run.day h = next_run.hour while True: if next_run.month is not m: next_run = next_run.replace(minute=0) next_run = self._calc_month(next_run, cron) if next_run.day is not d: next_run = next_run.replace(minute=0) next_run = self._calc_day(next_run, cron) if next_run.hour is not h: next_run = next_run.replace(minute=0) next_run = self._calc_day(next_run, cron) if next_run.minute in cron["min"]: return next_run else: m = next_run.month d = next_run.day h = next_run.hour next_run = next_run + one_minute def _get_next_run(self, cron): one_minute = datetime.timedelta(minutes=1) # go up 1 minute because it shouldn't happen right when added now = datetime.datetime.now() + one_minute next_run = now.replace(second=0, microsecond=0) # start with month, which will also help calculate year next_run = self._calc_month(next_run, cron) next_run = self._calc_day(next_run, cron) next_run = self._calc_hour(next_run, cron) next_run = self._calc_minute(next_run, cron) return next_run
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman (Eric Bidelman)' import cgi import os import gdata.auth import gdata.docs import gdata.docs.service import gdata.alt.appengine from appengine_utilities.sessions import Session from django.utils import simplejson from google.appengine.api import users from google.appengine.ext import webapp from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app SETTINGS = { 'APP_NAME': 'google-GDataOAuthAppEngine-v1', 'CONSUMER_KEY': 'YOUR_CONSUMER_KEY', 'SIG_METHOD': gdata.auth.OAuthSignatureMethod.RSA_SHA1, 'SCOPES': ['http://docs.google.com/feeds/', 'https://docs.google.com/feeds/'] } f = open('/path/to/your/rsa_private_key.pem') RSA_KEY = f.read() f.close() gdocs = gdata.docs.service.DocsService(source=SETTINGS['APP_NAME']) gdocs.SetOAuthInputParameters(SETTINGS['SIG_METHOD'], SETTINGS['CONSUMER_KEY'], rsa_key=RSA_KEY) gdata.alt.appengine.run_on_appengine(gdocs) class MainPage(webapp.RequestHandler): """Main page displayed to user.""" # GET / def get(self): if not users.get_current_user(): self.redirect(users.create_login_url(self.request.uri)) access_token = gdocs.token_store.find_token('%20'.join(SETTINGS['SCOPES'])) if isinstance(access_token, gdata.auth.OAuthToken): form_action = '/fetch_data' form_value = 'Now fetch my docs!' revoke_token_link = True else: form_action = '/get_oauth_token' form_value = 'Give this website access to my Google Docs' revoke_token_link = None template_values = { 'form_action': form_action, 'form_value': form_value, 'user': users.get_current_user(), 'revoke_token_link': revoke_token_link, 'oauth_token': access_token, 'consumer': gdocs.GetOAuthInputParameters().GetConsumer(), 'sig_method': gdocs.GetOAuthInputParameters().GetSignatureMethod().get_name() } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values)) class OAuthDance(webapp.RequestHandler): """Handler for the 3 legged OAuth dance, v1.0a.""" """This handler is responsible for fetching an initial OAuth request token, redirecting the user to the approval page. When the user grants access, they will be redirected back to this GET handler and their authorized request token will be exchanged for a long-lived access token.""" # GET /get_oauth_token def get(self): """Invoked after we're redirected back from the approval page.""" self.session = Session() oauth_token = gdata.auth.OAuthTokenFromUrl(self.request.uri) if oauth_token: oauth_token.oauth_input_params = gdocs.GetOAuthInputParameters() gdocs.SetOAuthToken(oauth_token) # 3.) Exchange the authorized request token for an access token oauth_verifier = self.request.get('oauth_verifier', default_value='') access_token = gdocs.UpgradeToOAuthAccessToken( oauth_verifier=oauth_verifier) # Remember the access token in the current user's token store if access_token and users.get_current_user(): gdocs.token_store.add_token(access_token) elif access_token: gdocs.current_token = access_token gdocs.SetOAuthToken(access_token) self.redirect('/') # POST /get_oauth_token def post(self): """Fetches a request token and redirects the user to the approval page.""" self.session = Session() if users.get_current_user(): # 1.) REQUEST TOKEN STEP. Provide the data scope(s) and the page we'll # be redirected back to after the user grants access on the approval page. req_token = gdocs.FetchOAuthRequestToken( scopes=SETTINGS['SCOPES'], oauth_callback=self.request.uri) # Generate the URL to redirect the user to. Add the hd paramter for a # better user experience. Leaving it off will give the user the choice # of what account (Google vs. Google Apps) to login with. domain = self.request.get('domain', default_value='default') approval_page_url = gdocs.GenerateOAuthAuthorizationURL( extra_params={'hd': domain}) # 2.) APPROVAL STEP. Redirect to user to Google's OAuth approval page. self.redirect(approval_page_url) class FetchData(OAuthDance): """Fetches the user's data.""" """This class inherits from OAuthDance in order to utilize OAuthDance.post() in case of a request error (e.g. the user has a bad token).""" # GET /fetch_data def get(self): self.redirect('/') # POST /fetch_data def post(self): """Fetches the user's data.""" try: feed = gdocs.GetDocumentListFeed() json = [] for entry in feed.entry: if entry.lastModifiedBy is not None: last_modified_by = entry.lastModifiedBy.email.text else: last_modified_by = '' if entry.lastViewed is not None: last_viewed = entry.lastViewed.text else: last_viewed = '' json.append({'title': entry.title.text, 'links': {'alternate': entry.GetHtmlLink().href}, 'published': entry.published.text, 'updated': entry.updated.text, 'resourceId': entry.resourceId.text, 'type': entry.GetDocumentType(), 'lastModifiedBy': last_modified_by, 'lastViewed': last_viewed }) self.response.out.write(simplejson.dumps(json)) except gdata.service.RequestError, error: OAuthDance.post(self) class RevokeToken(webapp.RequestHandler): # GET /revoke_token def get(self): """Revokes the current user's OAuth access token.""" try: gdocs.RevokeOAuthToken() except gdata.service.RevokingOAuthTokenFailed: pass gdocs.token_store.remove_all_tokens() self.redirect('/') def main(): application = webapp.WSGIApplication([('/', MainPage), ('/get_oauth_token', OAuthDance), ('/fetch_data', FetchData), ('/revoke_token', RevokeToken)], debug=True) run_wsgi_app(application)
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.base.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import gdata.base import getpass # Demonstrates item insertion with a dry run insert operation. The item will # NOT be added to Google Base. gb_client = gdata.base.service.GBaseService() gb_client.email = raw_input('Please enter your username: ') gb_client.password = getpass.getpass() print 'Logging in' gb_client.ProgrammaticLogin() # Create a test item which will be used in a dry run insert item = gdata.base.GBaseItem() item.author.append(atom.Author(name=atom.Name(text='Mr. Smith'))) item.title = atom.Title(text='He Jingxian\'s chicken') item.link.append(atom.Link(rel='alternate', link_type='text/html', href='http://www.host.com/123456jsh9')) item.label.append(gdata.base.Label(text='kung pao chicken')) item.label.append(gdata.base.Label(text='chinese cuisine')) item.label.append(gdata.base.Label(text='testrecipes')) item.item_type = gdata.base.ItemType(text='recipes') item.AddItemAttribute(name='cooking_time', value_type='intUnit', value='30 minutes') item.AddItemAttribute(name='main_ingredient', value='chicken') item.AddItemAttribute(name='main_ingredient', value='chili') # Make an insert request with the dry run flag set so that the item will not # actually be created. result = gb_client.InsertItem(item, url_params={'dry-run': 'true'}) # Send the XML from the server to standard out. print 'Here\'s the XML from the server\'s simulated insert' print str(result) print 'Done'
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.base.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import gdata.base # Demonstrates queries to the snippets feed and stepping through the results. gb_client = gdata.base.service.GBaseService() q = gdata.base.service.BaseQuery() q.feed = '/base/feeds/snippets' q['start-index'] = '1' q['max-results'] = '10' q.bq = raw_input('Please enter your Google Base query: ') feed = gb_client.QuerySnippetsFeed(q.ToUri()) while(int(q['start-index']) < 989): # Display the titles of the snippets. print 'Snippet query results items %s to %s' % (q['start-index'], int(q['start-index'])+10) for entry in feed.entry: print ' ', entry.title.text # Show the next 10 results from the snippets feed when the user presses # enter. nothing = raw_input('Press enter to see the next 10 results') q['start-index'] = str(int(q['start-index']) + 10) feed = gb_client.QuerySnippetsFeed(q.ToUri()) print 'You\'ve reached the upper limit of 1000 items. Goodbye :)'
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.base.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import gdata.base # Demonstrates queries to the attributes feed gb_client = gdata.base.service.GBaseService() q = gdata.base.service.BaseQuery() q.feed = '/base/feeds/attributes' q.bq = raw_input('Please item type to query for (ex: housing): ') print q.ToUri() feed = gb_client.QueryAttributesFeed(q.ToUri()) print feed.title.text for entry in feed.entry: for attr in entry.attribute: display_str = 'attribute name:%s, type:%s' % (attr.name, attr.type) values = '' for value in attr.value: values += '(' + value.text + ',' + value.count + ')' if values != '': display_str += ', values: %s' % values print ' ' + display_str
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gdata.base.service import gdata.service try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import gdata.base # Demonstrates queries to the itemtypes feed for specified locale. gb_client = gdata.base.service.GBaseService() locale = raw_input('Please enter locale (ex: en_US): ') q = gdata.base.service.BaseQuery() q.feed = '/base/feeds/itemtypes/%s' % locale print q.ToUri() feed = gb_client.QueryItemTypesFeed(q.ToUri()) print feed.title.text for entry in feed.entry: print '\t' + entry.title.text for attr in entry.attributes.attribute: print '\t\tAttr name:%s, type:%s' % (attr.name, attr.type)
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass from gdata.contentforshopping.data import build_entry from gdata.contentforshopping.client import ContentForShoppingClient # Gather merchant information account_id = raw_input('Merchant Account ID? ').strip() email = raw_input('Google Email Address? ').strip() # Create a client client = ContentForShoppingClient(account_id) # Perform programmatic login client.client_login(email, getpass.getpass('Google Password? '), 'Shopping API for Content sample', 'structuredcontent') # Generate a product entry product_entry = build_entry( product_id='ipod2', target_country = 'US', content_language = 'EN', title='iPod Nano 8GB', content='A nice small mp3 player', price='149', price_unit='USD', shipping_price = '5', shipping_price_unit = 'USD', tax_rate='17.5', condition = 'new', link = 'http://pseudoscience.co.uk/google4e823e35f032f011.html', ) # Post it to the service client.insert_product(product_entry)
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass from gdata.contentforshopping.data import build_entry from gdata.contentforshopping.client import ContentForShoppingClient # Gather merchant information account_id = raw_input('Merchant Account ID? ').strip() email = raw_input('Google Email Address? ').strip() # Create a client client = ContentForShoppingClient(account_id) # Perform programmatic login client.client_login(email, getpass.getpass('Google Password? '), 'Shopping API for Content sample', 'structuredcontent') products = [] for color in ['red', 'green', 'white', 'black', 'purple', 'brown', 'yellow', 'orange', 'magenta']: # Generate a product entry product_entry = build_entry( product_id='ipod%s' % color, target_country = 'US', content_language = 'EN', title='iPod Nano 8GB, %s' % color, content='A nice small mp3 player, in %s' % color, price='149', price_unit='USD', shipping_price = '5', shipping_price_unit = 'USD', tax_rate='17.5', condition = 'new', link = 'http://pseudoscience.co.uk/google4e823e35f032f011.html', color = color, ) products.append(product_entry) # Post it to the service client.insert_products(products)
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass from gdata.contentforshopping.client import ContentForShoppingClient # Gather merchant information account_id = raw_input('Merchant Account ID? ').strip() email = raw_input('Google Email Address? ').strip() # Create a client client = ContentForShoppingClient(account_id) # Perform programmatic login client.client_login(email, getpass.getpass('Google Password? '), 'Shopping API for Content sample', 'structuredcontent') # Get the products list from the products feed product_feed = client.get_products() print 'Listing: %s result(s)' % product_feed.total_results.text # Each product is an element in the feed's entry (a list) for product in product_feed.entry: print '- %s: %s' % (product.title.text, product.content.text)
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass from atom.data import Title from gdata.contentforshopping.client import ContentForShoppingClient from gdata.contentforshopping.data import ClientAccount, AdultContent # Gather merchant information account_id = raw_input('Merchant Account ID? ').strip() email = raw_input('Google Email Address? ').strip() # Create a client client = ContentForShoppingClient(account_id) # Perform programmatic login client.client_login(email, getpass.getpass('Google Password? '), 'Shopping API for Content sample', 'structuredcontent') # Create 10 accounts for i in range(10): client_account = ClientAccount() client_account.title = Title('Test Account %s' % (i + 1)) client_account.adult_content = AdultContent('no') # Insert the client account client.insert_client_account(client_account) # Display something to the user print i + 1, '/', 10
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass from gdata.contentforshopping.client import ContentForShoppingClient # Gather merchant information account_id = raw_input('Merchant Account ID? ').strip() email = raw_input('Google Email Address? ').strip() # Create a client client = ContentForShoppingClient(account_id) # Perform programmatic login client.client_login(email, getpass.getpass('Google Password? '), 'Shopping API for Content sample', 'structuredcontent') # Get the feed of client accounts client_account_feed = client.get_client_accounts() # Display the title and self link for each client account for client_account in client_account_feed.entry: print client_account.title.text, client_account.GetSelfLink().href
Python
#!/usr/bin/python # # Copyright 2008 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample to demonstrate using secure AuthSub in the Google Data Python client. This sample focuses on the Google Health Data API because it requires the use of secure tokens. This samples makes queries against the H9 Developer's Sandbox (https://www.google.com/h9). To run this sample: 1.) Use Apache's mod_python 2.) Run from your local webserver (e.g. http://localhost/...) 3.) You need to have entered medication data into H9 HealthAubSubHelper: Class to handle secure AuthSub tokens. GetMedicationHTML: Returns the user's medication formatted in HTML. index: Main entry point for the web app. """ __author__ = 'e.bidelman@google.com (Eric Bidelman)' import os import sys import urllib import gdata.auth import gdata.service H9_PROFILE_FEED_URL = 'https://www.google.com/h9/feeds/profile/default' class HealthAuthSubHelper(object): """A secure AuthSub helper to interact with the Google Health Data API""" H9_AUTHSUB_HANDLER = 'https://www.google.com/h9/authsub' H9_SCOPE = 'https://www.google.com/h9/feeds/' def GetNextUrl(self, req): """Computes the current URL the web app is running from. Args: req: mod_python mp_request instance to build the URL from. Returns: A string representing the web app's URL. """ if req.is_https(): next_url = 'https://' else: next_url = 'http://' next_url += req.hostname + req.unparsed_uri return next_url def GenerateAuthSubRequestUrl(self, next, scopes=[H9_SCOPE], secure=True, session=True, extra_params=None, include_scopes_in_next=True): """Constructs the URL to the AuthSub token handler. Args: next: string The URL AuthSub will redirect back to. Use self.GetNextUrl() to return that URL. scopes: (optional) string or list of scopes the token will be valid for. secure: (optional) boolean True if the token should be a secure one session: (optional) boolean True if the token will be exchanged for a session token. extra_params: (optional) dict of additional parameters to pass to AuthSub. include_scopes_in_next: (optional) boolean True if the scopes in the scopes should be passed to AuthSub. Returns: A string (as a URL) to use for the AuthSubRequest endpoint. """ auth_sub_url = gdata.service.GenerateAuthSubRequestUrl( next, scopes, hd='default', secure=secure, session=session, request_url=self.H9_AUTHSUB_HANDLER, include_scopes_in_next=include_scopes_in_next) if extra_params: auth_sub_url = '%s&%s' % (auth_sub_url, urllib.urlencode(extra_params)) return auth_sub_url def SetPrivateKey(self, filename): """Reads the private key from the specified file. See http://code.google.com/apis/gdata/authsub.html#Registered for\ information on how to create a RSA private key/public cert pair. Args: filename: string .pem file the key is stored in. Returns: The private key as a string. Raises: IOError: The file could not be read or does not exist. """ try: f = open(filename) rsa_private_key = f.read() f.close() except IOError, (errno, strerror): raise 'I/O error(%s): %s' % (errno, strerror) self.rsa_key = rsa_private_key return rsa_private_key def GetMedicationHTML(feed): """Prints out the user's medication to the console. Args: feed: A gdata.GDataFeed instance. Returns: An HTML formatted string containing the user's medication data. """ if not feed.entry: return '<b>No entries in feed</b><br>' html = [] for entry in feed.entry: try: ccr = entry.FindExtensions('ContinuityOfCareRecord')[0] body = ccr.FindChildren('Body')[0] meds = body.FindChildren('Medications')[0].FindChildren('Medication') for med in meds: name = med.FindChildren('Product')[0].FindChildren('ProductName')[0] html.append('<li>%s</li>' % name.FindChildren('Text')[0].text) except: html.append('<b>No medication data in this profile</b><br>') return '<ul>%s</ul>' % ''.join(html) def index(req): req.content_type = 'text/html' authsub = HealthAuthSubHelper() client = gdata.service.GDataService(service='weaver') current_url = authsub.GetNextUrl(req) rsa_key = authsub.SetPrivateKey('/path/to/yourRSAPrivateKey.pem') # Strip token query parameter's value from URL if it exists token = gdata.auth.extract_auth_sub_token_from_url(current_url, rsa_key=rsa_key) if not token: """STEP 1: No single use token in the URL or a saved session token. Generate the AuthSub URL to fetch a single use token.""" params = {'permission': 1} authsub_url = authsub.GenerateAuthSubRequestUrl(current_url, extra_params=params) req.write('<a href="%s">Link your Google Health Profile</a>' % authsub_url) else: """STEP 2: A single use token was extracted from the URL. Upgrade the one time token to a session token.""" req.write('<b>Single use token</b>: %s<br>' % str(token)) client.UpgradeToSessionToken(token) # calls gdata.service.SetAuthSubToken() """STEP 3: Done with AuthSub :) Save the token for subsequent requests. Query the Health Data API""" req.write('<b>Token info</b>: %s<br>' % client.AuthSubTokenInfo()) req.write('<b>Session token</b>: %s<br>' % client.GetAuthSubToken()) # Query the Health Data API params = {'digest': 'true', 'strict': 'true'} uri = '%s?%s' % (H9_PROFILE_FEED_URL, urllib.urlencode(params)) feed = client.GetFeed(uri) req.write('<h4>Listing medications</h4>') req.write(GetMedicationHTML(feed)) """STEP 4: Revoke the session token.""" req.write('Revoked session token') client.RevokeAuthSubToken()
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample Google Analytics Data Export API Data Feed application. This sample demonstrates how to make requests and retrieve the important information from the Google Analytics Data Export API Data Feed. This sample requires a Google Analytics username and password and uses the Client Login authorization routine. Class DataFeedDemo: Prints all the important Data Feed informantion. """ __author__ = 'api.nickm@google.com (Nick Mihailovski)' import gdata.analytics.client import gdata.sample_util def main(): """Main function for the sample.""" demo = DataFeedDemo() demo.PrintFeedDetails() demo.PrintDataSources() demo.PrintFeedAggregates() demo.PrintSegmentInfo() demo.PrintOneEntry() demo.PrintFeedTable() class DataFeedDemo(object): """Gets data from the Data Feed. Attributes: data_feed: Google Analytics AccountList returned form the API. """ def __init__(self): """Inits DataFeedDemo.""" SOURCE_APP_NAME = 'Google-dataFeedDemoPython-v2' my_client = gdata.analytics.client.AnalyticsClient(source=SOURCE_APP_NAME) try: gdata.sample_util.authorize_client( my_client, service=my_client.auth_service, source=SOURCE_APP_NAME, scopes=['https://www.google.com/analytics/feeds/']) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') table_id = gdata.sample_util.get_param( name='table_id', prompt='Please enter your Google Analytics Table id (format ga:xxxx)') # DataFeedQuery simplifies constructing API queries and uri encodes params. data_query = gdata.analytics.client.DataFeedQuery({ 'ids': table_id, 'start-date': '2008-10-01', 'end-date': '2008-10-30', 'dimensions': 'ga:source,ga:medium', 'metrics': 'ga:visits', 'sort': '-ga:visits', 'filters': 'ga:medium==referral', 'max-results': '50'}) self.feed = my_client.GetDataFeed(data_query) def PrintFeedDetails(self): """Prints important Analytics related data found at the top of the feed.""" print '\n-------- Feed Data --------' print 'Feed Title = ' + self.feed.title.text print 'Feed Id = ' + self.feed.id.text print 'Total Results Found = ' + self.feed.total_results.text print 'Start Index = ' + self.feed.start_index.text print 'Results Returned = ' + self.feed.items_per_page.text print 'Start Date = ' + self.feed.start_date.text print 'End Date = ' + self.feed.end_date.text print 'Has Sampeld Data = ' + str(self.feed.HasSampledData()) def PrintDataSources(self): """Prints data found in the data source elements. This data has information about the Google Analytics account the referenced table ID belongs to. Note there is currently exactly one data source in the data feed. """ data_source = self.feed.data_source[0] print '\n-------- Data Source Data --------' print 'Table ID = ' + data_source.table_id.text print 'Table Name = ' + data_source.table_name.text print 'Web Property Id = ' + data_source.GetProperty('ga:webPropertyId').value print 'Profile Id = ' + data_source.GetProperty('ga:profileId').value print 'Account Name = ' + data_source.GetProperty('ga:accountName').value def PrintFeedAggregates(self): """Prints data found in the aggregates elements. This contains the sum of all the metrics defined in the query across. This sum spans all the rows matched in the feed.total_results property and not just the rows returned by the response. """ aggregates = self.feed.aggregates print '\n-------- Metric Aggregates --------' for met in aggregates.metric: print '' print 'Metric Name = ' + met.name print 'Metric Value = ' + met.value print 'Metric Type = ' + met.type print 'Metric CI = ' + met.confidence_interval def PrintSegmentInfo(self): """Prints segment information if the query has advanced segments defined.""" print '-------- Advanced Segments Information --------' if self.feed.segment: if segment.name: print 'Segment Name = ' + str(segment.name) if segment.id: print 'Segment Id = ' + str(segment.id) print 'Segment Definition = ' + segment.definition.text else: print 'No segments defined' def PrintOneEntry(self): """Prints all the important Google Analytics data found in an entry""" print '\n-------- One Entry --------' if len(self.feed.entry) == 0: print 'No entries found' return entry = self.feed.entry[0] print 'ID = ' + entry.id.text for dim in entry.dimension: print 'Dimension Name = ' + dim.name print 'Dimension Value = ' + dim.value for met in entry.metric: print 'Metric Name = ' + met.name print 'Metric Value = ' + met.value print 'Metric Type = ' + met.type print 'Metric CI = ' + met.confidence_interval def PrintFeedTable(self): """Prints all the entries as a table.""" print '\n-------- All Entries In a Table --------' for entry in self.feed.entry: for dim in entry.dimension: print ('Dimension Name = %s \t Dimension Value = %s' % (dim.name, dim.value)) for met in entry.metric: print ('Metric Name = %s \t Metric Value = %s' % (met.name, met.value)) print '---' if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright 2010 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Google Analytics Management API Demo. This script demonstrates how to retrieve the important data from the Google Analytics Data Management API using the Python Client library. This example requires a Google Analytics account with data and a username and password. Each feed in the Management API is retrieved and printed using the respective print method in ManagementFeedDemo. To simplify setting filters and query parameters, each feed has it's own query class. Check the <code>gdata.analytics.client</code> module for more details on usage. main: The main method of this example. GetAnalyticsClient: Returns an authorized AnalyticsClient object. Class ManagementFeedDemo: Prints all the import Account Feed data. """ __author__ = 'api.nickm@google.com (Nick Mihailovski)' import gdata.analytics.client import gdata.sample_util ACCOUNT_ID = '~all' WEB_PROPERTY_ID = '~all' PROFILE_ID = '~all' def main(): """Main example script. Un-comment each method to print the feed.""" demo = ManagementFeedDemo(GetAnalyticsClient()) demo.PrintAccountFeed() # demo.PrintWebPropertyFeed() # demo.PrintProfileFeed() # demo.PrintGoalFeed() # demo.PrintSegmentFeed() def GetAnalyticsClient(): """Returns an authorized GoogleAnalayticsClient object. Uses the Google Data python samples wrapper to prompt the user for credentials then tries to authorize the client object with the Google Analytics API. Returns: An authorized GoogleAnalyticsClient object. """ SOURCE_APP_NAME = 'Analytics-ManagementAPI-Demo-v1' my_client = gdata.analytics.client.AnalyticsClient(source=SOURCE_APP_NAME) try: gdata.sample_util.authorize_client( my_client, service=my_client.auth_service, source=SOURCE_APP_NAME, scopes=['https://www.google.com/analytics/feeds/']) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') return my_client class ManagementFeedDemo(object): """The main demo for the management feed. Attributes: my_client: gdata.analytics.client The AnalyticsClient object for this demo. """ def __init__(self, my_client): """Initializes the ManagementFeedDemo class. Args: my_client: gdata.analytics.client An authorized GoogleAnalyticsClient object. """ self.my_client = my_client def PrintAccountFeed(self): """Requests and prints the important data in the Account Feed. Note: AccountQuery is used for the ManagementAPI. AccountFeedQuery is used for the Data Export API. """ account_query = gdata.analytics.client.AccountQuery() results = self.my_client.GetManagementFeed(account_query) print '-------- Account Feed Data --------' if not results.entry: print 'no entries found' else: for entry in results.entry: print 'Account Name = ' + entry.GetProperty('ga:accountName').value print 'Account ID = ' + entry.GetProperty('ga:accountId').value print 'Child Feed Link = ' + entry.GetChildLink('analytics#webproperties').href print def PrintWebPropertyFeed(self): """Requests and prints the important data in the Web Property Feed.""" web_property_query = gdata.analytics.client.WebPropertyQuery( acct_id=ACCOUNT_ID) results = self.my_client.GetManagementFeed(web_property_query) print '-------- Web Property Feed Data --------' if not results.entry: print 'no entries found' else: for entry in results.entry: print 'Account ID = ' + entry.GetProperty('ga:accountId').value print 'Web Property ID = ' + entry.GetProperty('ga:webPropertyId').value print 'Child Feed Link = ' + entry.GetChildLink('analytics#profiles').href print def PrintProfileFeed(self): """Requests and prints the important data in the Profile Feed. Note: TableId has a different namespace (dxp:) than all the other properties (ga:). """ profile_query = gdata.analytics.client.ProfileQuery( acct_id=ACCOUNT_ID, web_prop_id=WEB_PROPERTY_ID) results = self.my_client.GetManagementFeed(profile_query) print '-------- Profile Feed Data --------' if not results.entry: print 'no entries found' else: for entry in results.entry: print 'Account ID = ' + entry.GetProperty('ga:accountId').value print 'Web Property ID = ' + entry.GetProperty('ga:webPropertyId').value print 'Profile ID = ' + entry.GetProperty('ga:profileId').value print 'Currency = ' + entry.GetProperty('ga:currency').value print 'Timezone = ' + entry.GetProperty('ga:timezone').value print 'TableId = ' + entry.GetProperty('dxp:tableId').value print 'Child Feed Link = ' + entry.GetChildLink('analytics#goals').href print def PrintGoalFeed(self): """Requests and prints the important data in the Goal Feed. Note: There are two types of goals, destination and engagement which need to be handled differently. """ goal_query = gdata.analytics.client.GoalQuery( acct_id=ACCOUNT_ID, web_prop_id=WEB_PROPERTY_ID, profile_id=PROFILE_ID) results = self.my_client.GetManagementFeed(goal_query) print '-------- Goal Feed Data --------' if not results.entry: print 'no entries found' else: for entry in results.entry: print 'Goal Number = ' + entry.goal.number print 'Goal Name = ' + entry.goal.name print 'Goal Value = ' + entry.goal.value print 'Goal Active = ' + entry.goal.active if entry.goal.destination: self.PrintDestinationGoal(entry.goal.destination) elif entry.goal.engagement: self.PrintEngagementGoal(entry.goal.engagement) def PrintDestinationGoal(self, destination): """Prints the important information for destination goals including all the configured steps if they exist. Args: destination: gdata.data.Destination The destination goal configuration. """ print '\t----- Destination Goal -----' print '\tExpression = ' + destination.expression print '\tMatch Type = ' + destination.match_type print '\tStep 1 Required = ' + destination.step1_required print '\tCase Sensitive = ' + destination.case_sensitive if destination.step: print '\t\t----- Destination Goal Steps -----' for step in destination.step: print '\t\tStep Number = ' + step.number print '\t\tStep Name = ' + step.name print '\t\tStep Path = ' + step.path print def PrintEngagementGoal(self, engagement): """Prints the important information for engagement goals. Args: engagement: gdata.data.Engagement The engagement goal configuration. """ print '\t----- Engagement Goal -----' print '\tGoal Type = ' + engagement.type print '\tGoal Engagement = ' + engagement.comparison print '\tGoal Threshold = ' + engagement.threshold_value print def PrintSegmentFeed(self): """Requests and prints the important data in the Profile Feed.""" adv_seg_query = gdata.analytics.client.AdvSegQuery() results = self.my_client.GetManagementFeed(adv_seg_query) print '-------- Advanced Segment Feed Data --------' if not results.entry: print 'no entries found' else: for entry in results.entry: print 'Segment ID = ' + entry.segment.id print 'Segment Name = ' + entry.segment.name print 'Segment Definition = ' + entry.segment.definition.text print if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample Google Analytics Data Export API Account Feed application. This sample demonstrates how to retrieve the important data from the Google Analytics Data Export API Account feed using the Python Client library. This requires a Google Analytics username and password and uses the Client Login authorization routine. Class AccountFeedDemo: Prints all the import Account Feed data. """ __author__ = 'api.nickm@google.com (Nick Mihailovski)' import gdata.analytics.client import gdata.sample_util def main(): """Main fucntion for the sample.""" demo = AccountFeedDemo() demo.PrintFeedDetails() demo.PrintAdvancedSegments() demo.PrintCustomVarForOneEntry() demo.PrintGoalsForOneEntry() demo.PrintAccountEntries() class AccountFeedDemo(object): """Prints the Google Analytics account feed Attributes: account_feed: Google Analytics AccountList returned form the API. """ def __init__(self): """Inits AccountFeedDemo.""" SOURCE_APP_NAME = 'Google-accountFeedDemoPython-v1' my_client = gdata.analytics.client.AnalyticsClient(source=SOURCE_APP_NAME) try: gdata.sample_util.authorize_client( my_client, service=my_client.auth_service, source=SOURCE_APP_NAME, scopes=['https://www.google.com/analytics/feeds/']) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') account_query = gdata.analytics.client.AccountFeedQuery() self.feed = my_client.GetAccountFeed(account_query) def PrintFeedDetails(self): """Prints important Analytics related data found at the top of the feed.""" print '-------- Important Feed Data --------' print 'Feed Title = ' + self.feed.title.text print 'Feed Id = ' + self.feed.id.text print 'Total Results Found = ' + self.feed.total_results.text print 'Start Index = ' + self.feed.start_index.text print 'Results Returned = ' + self.feed.items_per_page.text def PrintAdvancedSegments(self): """Prints the advanced segments for this user.""" print '-------- Advances Segments --------' if not self.feed.segment: print 'No advanced segments found' else: for segment in self.feed.segment: print 'Segment Name = ' + segment.name print 'Segment Id = ' + segment.id print 'Segment Definition = ' + segment.definition.text def PrintCustomVarForOneEntry(self): """Prints custom variable information for the first profile that has custom variable configured.""" print '-------- Custom Variables --------' if not self.feed.entry: print 'No entries found' else: for entry in self.feed.entry: if entry.custom_variable: for custom_variable in entry.custom_variable: print 'Custom Variable Index = ' + custom_variable.index print 'Custom Variable Name = ' + custom_variable.name print 'Custom Variable Scope = ' + custom_variable.scope return print 'No custom variables defined for this user' def PrintGoalsForOneEntry(self): """Prints All the goal information for one profile.""" print '-------- Goal Configuration --------' if not self.feed.entry: print 'No entries found' else: for entry in self.feed.entry: if entry.goal: for goal in entry.goal: print 'Goal Number = ' + goal.number print 'Goal Name = ' + goal.name print 'Goal Value = ' + goal.value print 'Goal Active = ' + goal.active if goal.destination: self.PrintDestinationGoal(goal.destination) elif goal.engagement: self.PrintEngagementGoal(goal.engagement) return def PrintDestinationGoal(self, destination): """Prints the important information for destination goals including all the configured steps if they exist. Args: destination: gdata.data.Destination The destination goal configuration. """ print '----- Destination Goal -----' print 'Expression = ' + destination.expression print 'Match Type = ' + destination.match_type print 'Step 1 Required = ' + destination.step1_required print 'Case Sensitive = ' + destination.case_sensitive # Print goal steps. if destination.step: print '----- Destination Goal Steps -----' for step in destination.step: print 'Step Number = ' + step.number print 'Step Name = ' + step.name print 'Step Path = ' + step.path def PrintEngagementGoal(self, engagement): """Prints the important information for engagement goals. Args: engagement: gdata.data.Engagement The engagement goal configuration. """ print '----- Engagement Goal -----' print 'Goal Type = ' + engagement.type print 'Goal Engagement = ' + engagement.comparison print 'Goal Threshold = ' + engagement.threshold_value def PrintAccountEntries(self): """Prints important Analytics data found in each account entry""" print '-------- First 1000 Profiles in Account Feed --------' if not self.feed.entry: print 'No entries found' else: for entry in self.feed.entry: print 'Web Property ID = ' + entry.GetProperty('ga:webPropertyId').value print 'Account Name = ' + entry.GetProperty('ga:accountName').value print 'Account Id = ' + entry.GetProperty('ga:accountId').value print 'Profile Name = ' + entry.title.text print 'Profile ID = ' + entry.GetProperty('ga:profileId').value print 'Table ID = ' + entry.table_id.text print 'Currency = ' + entry.GetProperty('ga:currency').value print 'TimeZone = ' + entry.GetProperty('ga:timezone').value if entry.custom_variable: print 'This profile has custom variables' if entry.goal: print 'This profile has goals' if __name__ == '__main__': main()
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'api.laurabeth@gmail.com (Laura Beth Lincoln)' try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import gdata.spreadsheet.service import gdata.service import atom.service import gdata.spreadsheet import atom import getopt import sys import string class SimpleCRUD: def __init__(self, email, password): self.gd_client = gdata.spreadsheet.service.SpreadsheetsService() self.gd_client.email = email self.gd_client.password = password self.gd_client.source = 'Spreadsheets GData Sample' self.gd_client.ProgrammaticLogin() self.curr_key = '' self.curr_wksht_id = '' self.list_feed = None def _PromptForSpreadsheet(self): # Get the list of spreadsheets feed = self.gd_client.GetSpreadsheetsFeed() self._PrintFeed(feed) input = raw_input('\nSelection: ') id_parts = feed.entry[string.atoi(input)].id.text.split('/') self.curr_key = id_parts[len(id_parts) - 1] def _PromptForWorksheet(self): # Get the list of worksheets feed = self.gd_client.GetWorksheetsFeed(self.curr_key) self._PrintFeed(feed) input = raw_input('\nSelection: ') id_parts = feed.entry[string.atoi(input)].id.text.split('/') self.curr_wksht_id = id_parts[len(id_parts) - 1] def _PromptForCellsAction(self): print ('dump\n' 'update {row} {col} {input_value}\n' '\n') input = raw_input('Command: ') command = input.split(' ', 1) if command[0] == 'dump': self._CellsGetAction() elif command[0] == 'update': parsed = command[1].split(' ', 2) if len(parsed) == 3: self._CellsUpdateAction(parsed[0], parsed[1], parsed[2]) else: self._CellsUpdateAction(parsed[0], parsed[1], '') else: self._InvalidCommandError(input) def _PromptForListAction(self): print ('dump\n' 'insert {row_data} (example: insert label=content)\n' 'update {row_index} {row_data}\n' 'delete {row_index}\n' 'Note: No uppercase letters in column names!\n' '\n') input = raw_input('Command: ') command = input.split(' ' , 1) if command[0] == 'dump': self._ListGetAction() elif command[0] == 'insert': self._ListInsertAction(command[1]) elif command[0] == 'update': parsed = command[1].split(' ', 1) self._ListUpdateAction(parsed[0], parsed[1]) elif command[0] == 'delete': self._ListDeleteAction(command[1]) else: self._InvalidCommandError(input) def _CellsGetAction(self): # Get the feed of cells feed = self.gd_client.GetCellsFeed(self.curr_key, self.curr_wksht_id) self._PrintFeed(feed) def _CellsUpdateAction(self, row, col, inputValue): entry = self.gd_client.UpdateCell(row=row, col=col, inputValue=inputValue, key=self.curr_key, wksht_id=self.curr_wksht_id) if isinstance(entry, gdata.spreadsheet.SpreadsheetsCell): print 'Updated!' def _ListGetAction(self): # Get the list feed self.list_feed = self.gd_client.GetListFeed(self.curr_key, self.curr_wksht_id) self._PrintFeed(self.list_feed) def _ListInsertAction(self, row_data): entry = self.gd_client.InsertRow(self._StringToDictionary(row_data), self.curr_key, self.curr_wksht_id) if isinstance(entry, gdata.spreadsheet.SpreadsheetsList): print 'Inserted!' def _ListUpdateAction(self, index, row_data): self.list_feed = self.gd_client.GetListFeed(self.curr_key, self.curr_wksht_id) entry = self.gd_client.UpdateRow( self.list_feed.entry[string.atoi(index)], self._StringToDictionary(row_data)) if isinstance(entry, gdata.spreadsheet.SpreadsheetsList): print 'Updated!' def _ListDeleteAction(self, index): self.list_feed = self.gd_client.GetListFeed(self.curr_key, self.curr_wksht_id) self.gd_client.DeleteRow(self.list_feed.entry[string.atoi(index)]) print 'Deleted!' def _StringToDictionary(self, row_data): dict = {} for param in row_data.split(): temp = param.split('=') dict[temp[0]] = temp[1] return dict def _PrintFeed(self, feed): for i, entry in enumerate(feed.entry): if isinstance(feed, gdata.spreadsheet.SpreadsheetsCellsFeed): print '%s %s\n' % (entry.title.text, entry.content.text) elif isinstance(feed, gdata.spreadsheet.SpreadsheetsListFeed): print '%s %s %s' % (i, entry.title.text, entry.content.text) # Print this row's value for each column (the custom dictionary is # built using the gsx: elements in the entry.) print 'Contents:' for key in entry.custom: print ' %s: %s' % (key, entry.custom[key].text) print '\n', else: print '%s %s\n' % (i, entry.title.text) def _InvalidCommandError(self, input): print 'Invalid input: %s\n' % (input) def Run(self): self._PromptForSpreadsheet() self._PromptForWorksheet() input = raw_input('cells or list? ') if input == 'cells': while True: self._PromptForCellsAction() elif input == 'list': while True: self._PromptForListAction() def main(): # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "", ["user=", "pw="]) except getopt.error, msg: print 'python spreadsheetExample.py --user [username] --pw [password] ' sys.exit(2) user = '' pw = '' key = '' # Process options for o, a in opts: if o == "--user": user = a elif o == "--pw": pw = a if user == '' or pw == '': print 'python spreadsheetExample.py --user [username] --pw [password] ' sys.exit(2) sample = SimpleCRUD(user, pw) sample.Run() if __name__ == '__main__': main()
Python
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tool for uploading diffs from a version control system to the codereview app. Usage summary: upload.py [options] [-- diff_options] Diff options are passed to the diff command of the underlying system. Supported version control systems: Git Mercurial Subversion It is important for Git/Mercurial users to specify a tree/node/branch to diff against by using the '--rev' option. """ # This code is derived from appcfg.py in the App Engine SDK (open source), # and from ASPN recipe #146306. import ConfigParser import cookielib import fnmatch import getpass import logging import mimetypes import optparse import os import re import socket import subprocess import sys import urllib import urllib2 import urlparse # The md5 module was deprecated in Python 2.5. try: from hashlib import md5 except ImportError: from md5 import md5 try: import readline except ImportError: pass # The logging verbosity: # 0: Errors only. # 1: Status messages. # 2: Info logs. # 3: Debug logs. verbosity = 1 # Max size of patch or base file. MAX_UPLOAD_SIZE = 900 * 1024 # Constants for version control names. Used by GuessVCSName. VCS_GIT = "Git" VCS_MERCURIAL = "Mercurial" VCS_SUBVERSION = "Subversion" VCS_UNKNOWN = "Unknown" # whitelist for non-binary filetypes which do not start with "text/" # .mm (Objective-C) shows up as application/x-freemind on my Linux box. TEXT_MIMETYPES = ['application/javascript', 'application/x-javascript', 'application/x-freemind'] VCS_ABBREVIATIONS = { VCS_MERCURIAL.lower(): VCS_MERCURIAL, "hg": VCS_MERCURIAL, VCS_SUBVERSION.lower(): VCS_SUBVERSION, "svn": VCS_SUBVERSION, VCS_GIT.lower(): VCS_GIT, } # The result of parsing Subversion's [auto-props] setting. svn_auto_props_map = None def GetEmail(prompt): """Prompts the user for their email address and returns it. The last used email address is saved to a file and offered up as a suggestion to the user. If the user presses enter without typing in anything the last used email address is used. If the user enters a new address, it is saved for next time we prompt. """ last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") last_email = "" if os.path.exists(last_email_file_name): try: last_email_file = open(last_email_file_name, "r") last_email = last_email_file.readline().strip("\n") last_email_file.close() prompt += " [%s]" % last_email except IOError, e: pass email = raw_input(prompt + ": ").strip() if email: try: last_email_file = open(last_email_file_name, "w") last_email_file.write(email) last_email_file.close() except IOError, e: pass else: email = last_email return email def StatusUpdate(msg): """Print a status message to stdout. If 'verbosity' is greater than 0, print the message. Args: msg: The string to print. """ if verbosity > 0: print msg def ErrorExit(msg): """Print an error message to stderr and exit.""" print >>sys.stderr, msg sys.exit(1) class ClientLoginError(urllib2.HTTPError): """Raised to indicate there was an error authenticating with ClientLogin.""" def __init__(self, url, code, msg, headers, args): urllib2.HTTPError.__init__(self, url, code, msg, headers, None) self.args = args self.reason = args["Error"] class AbstractRpcServer(object): """Provides a common interface for a simple RPC server.""" def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False): """Creates a new HttpRpcServer. Args: host: The host to send requests to. auth_function: A function that takes no arguments and returns an (email, password) tuple when called. Will be called if authentication is required. host_override: The host header to send to the server (defaults to host). extra_headers: A dict of extra headers to append to every request. save_cookies: If True, save the authentication cookies to local disk. If False, use an in-memory cookiejar instead. Subclasses must implement this functionality. Defaults to False. """ self.host = host self.host_override = host_override self.auth_function = auth_function self.authenticated = False self.extra_headers = extra_headers self.save_cookies = save_cookies self.opener = self._GetOpener() if self.host_override: logging.info("Server: %s; Host: %s", self.host, self.host_override) else: logging.info("Server: %s", self.host) def _GetOpener(self): """Returns an OpenerDirector for making HTTP requests. Returns: A urllib2.OpenerDirector object. """ raise NotImplementedError() def _CreateRequest(self, url, data=None): """Creates a new urllib request.""" logging.debug("Creating request for: '%s' with payload:\n%s", url, data) req = urllib2.Request(url, data=data) if self.host_override: req.add_header("Host", self.host_override) for key, value in self.extra_headers.iteritems(): req.add_header(key, value) return req def _GetAuthToken(self, email, password): """Uses ClientLogin to authenticate the user, returning an auth token. Args: email: The user's email address password: The user's password Raises: ClientLoginError: If there was an error authenticating with ClientLogin. HTTPError: If there was some other form of HTTP error. Returns: The authentication token returned by ClientLogin. """ account_type = "GOOGLE" if self.host.endswith(".google.com"): # Needed for use inside Google. account_type = "HOSTED" req = self._CreateRequest( url="https://www.google.com/accounts/ClientLogin", data=urllib.urlencode({ "Email": email, "Passwd": password, "service": "ah", "source": "rietveld-codereview-upload", "accountType": account_type, }), ) try: response = self.opener.open(req) response_body = response.read() response_dict = dict(x.split("=") for x in response_body.split("\n") if x) return response_dict["Auth"] except urllib2.HTTPError, e: if e.code == 403: body = e.read() response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict) else: raise def _GetAuthCookie(self, auth_token): """Fetches authentication cookies for an authentication token. Args: auth_token: The authentication token returned by ClientLogin. Raises: HTTPError: If there was an error fetching the authentication cookies. """ # This is a dummy value to allow us to identify when we're successful. continue_location = "http://localhost/" args = {"continue": continue_location, "auth": auth_token} req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args))) try: response = self.opener.open(req) except urllib2.HTTPError, e: response = e if (response.code != 302 or response.info()["location"] != continue_location): raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp) self.authenticated = True def _Authenticate(self): """Authenticates the user. The authentication process works as follows: 1) We get a username and password from the user 2) We use ClientLogin to obtain an AUTH token for the user (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). 3) We pass the auth token to /_ah/login on the server to obtain an authentication cookie. If login was successful, it tries to redirect us to the URL we provided. If we attempt to access the upload API without first obtaining an authentication cookie, it returns a 401 response (or a 302) and directs us to authenticate ourselves with ClientLogin. """ for i in range(3): credentials = self.auth_function() try: auth_token = self._GetAuthToken(credentials[0], credentials[1]) except ClientLoginError, e: if e.reason == "BadAuthentication": print >>sys.stderr, "Invalid username or password." continue if e.reason == "CaptchaRequired": print >>sys.stderr, ( "Please go to\n" "https://www.google.com/accounts/DisplayUnlockCaptcha\n" "and verify you are a human. Then try again.") break if e.reason == "NotVerified": print >>sys.stderr, "Account not verified." break if e.reason == "TermsNotAgreed": print >>sys.stderr, "User has not agreed to TOS." break if e.reason == "AccountDeleted": print >>sys.stderr, "The user account has been deleted." break if e.reason == "AccountDisabled": print >>sys.stderr, "The user account has been disabled." break if e.reason == "ServiceDisabled": print >>sys.stderr, ("The user's access to the service has been " "disabled.") break if e.reason == "ServiceUnavailable": print >>sys.stderr, "The service is not available; try again later." break raise self._GetAuthCookie(auth_token) return def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, **kwargs): """Sends an RPC and returns the response. Args: request_path: The path to send the request to, eg /api/appversion/create. payload: The body of the request, or None to send an empty request. content_type: The Content-Type header to use. timeout: timeout in seconds; default None i.e. no timeout. (Note: for large requests on OS X, the timeout doesn't work right.) kwargs: Any keyword arguments are converted into query string parameters. Returns: The response body, as a string. """ # TODO: Don't require authentication. Let the server say # whether it is necessary. if not self.authenticated: self._Authenticate() old_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(timeout) try: tries = 0 while True: tries += 1 args = dict(kwargs) url = "http://%s%s" % (self.host, request_path) if args: url += "?" + urllib.urlencode(args) req = self._CreateRequest(url=url, data=payload) req.add_header("Content-Type", content_type) try: f = self.opener.open(req) response = f.read() f.close() return response except urllib2.HTTPError, e: if tries > 3: raise elif e.code == 401 or e.code == 302: self._Authenticate() ## elif e.code >= 500 and e.code < 600: ## # Server Error - try again. ## continue else: raise finally: socket.setdefaulttimeout(old_timeout) class HttpRpcServer(AbstractRpcServer): """Provides a simplified RPC-style interface for HTTP requests.""" def _Authenticate(self): """Save the cookie jar after authentication.""" super(HttpRpcServer, self)._Authenticate() if self.save_cookies: StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) self.cookie_jar.save() def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]") parser.add_option("-y", "--assume_yes", action="store_true", dest="assume_yes", default=False, help="Assume that the answer to yes/no questions is 'yes'.") # Logging group = parser.add_option_group("Logging options") group.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Print errors only.") group.add_option("-v", "--verbose", action="store_const", const=2, dest="verbose", default=1, help="Print info level logs (default).") group.add_option("--noisy", action="store_const", const=3, dest="verbose", help="Print all logs.") # Review server group = parser.add_option_group("Review server options") group.add_option("-s", "--server", action="store", dest="server", default="codereview.appspot.com", metavar="SERVER", help=("The server to upload to. The format is host[:port]. " "Defaults to '%default'.")) group.add_option("-e", "--email", action="store", dest="email", metavar="EMAIL", default=None, help="The username to use. Will prompt if omitted.") group.add_option("-H", "--host", action="store", dest="host", metavar="HOST", default=None, help="Overrides the Host header sent with all RPCs.") group.add_option("--no_cookies", action="store_false", dest="save_cookies", default=True, help="Do not save authentication cookies to local disk.") # Issue group = parser.add_option_group("Issue options") group.add_option("-d", "--description", action="store", dest="description", metavar="DESCRIPTION", default=None, help="Optional description when creating an issue.") group.add_option("-f", "--description_file", action="store", dest="description_file", metavar="DESCRIPTION_FILE", default=None, help="Optional path of a file that contains " "the description when creating an issue.") group.add_option("-r", "--reviewers", action="store", dest="reviewers", metavar="REVIEWERS", default=",joe.gregorio@gmail.com", help="Add reviewers (comma separated email addresses).") group.add_option("--cc", action="store", dest="cc", metavar="CC", default="gdata-python-client-library-contributors@googlegroups.com", help="Add CC (comma separated email addresses).") group.add_option("--private", action="store_true", dest="private", default=False, help="Make the issue restricted to reviewers and those CCed") # Upload options group = parser.add_option_group("Patch options") group.add_option("-m", "--message", action="store", dest="message", metavar="MESSAGE", default=None, help="A message to identify the patch. " "Will prompt if omitted.") group.add_option("-i", "--issue", type="int", action="store", metavar="ISSUE", default=None, help="Issue number to which to add. Defaults to new issue.") group.add_option("--base_url", action="store", dest="base_url", default=None, help="Base repository URL (listed as \"Base URL\" when " "viewing issue). If omitted, will be guessed automatically " "for SVN repos and left blank for others.") group.add_option("--download_base", action="store_true", dest="download_base", default=False, help="Base files will be downloaded by the server " "(side-by-side diffs may not work on files with CRs).") group.add_option("--rev", action="store", dest="revision", metavar="REV", default=None, help="Base revision/branch/tree to diff against. Use " "rev1:rev2 range to review already committed changeset.") group.add_option("--send_mail", action="store_true", dest="send_mail", default=True, help="Send notification email to reviewers.") group.add_option("--vcs", action="store", dest="vcs", metavar="VCS", default=None, help=("Version control system (optional, usually upload.py " "already guesses the right VCS).")) group.add_option("--emulate_svn_auto_props", action="store_true", dest="emulate_svn_auto_props", default=False, help=("Emulate Subversion's auto properties feature.")) def GetRpcServer(options): """Returns an instance of an AbstractRpcServer. Returns: A new AbstractRpcServer, on which RPC calls can be made. """ rpc_server_class = HttpRpcServer def GetUserCredentials(): """Prompts the user for a username and password.""" email = options.email if email is None: email = GetEmail("Email (login for uploading to %s)" % options.server) password = getpass.getpass("Password for %s: " % email) return (email, password) # If this is the dev_appserver, use fake authentication. host = (options.host or options.server).lower() if host == "localhost" or host.startswith("localhost:"): email = options.email if email is None: email = "test@example.com" logging.info("Using debug user %s. Override with --email" % email) server = rpc_server_class( options.server, lambda: (email, "password"), host_override=options.host, extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email}, save_cookies=options.save_cookies) # Don't try to talk to ClientLogin. server.authenticated = True return server return rpc_server_class(options.server, GetUserCredentials, host_override=options.host, save_cookies=options.save_cookies) def EncodeMultipartFormData(fields, files): """Encode form fields for multipart/form-data. Args: fields: A sequence of (name, value) elements for regular form fields. files: A sequence of (name, filename, value) elements for data to be uploaded as files. Returns: (content_type, body) ready for httplib.HTTP instance. Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 """ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' CRLF = '\r\n' lines = [] for (key, value) in fields: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"' % key) lines.append('') lines.append(value) for (key, filename, value) in files: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)) lines.append('Content-Type: %s' % GetContentType(filename)) lines.append('') lines.append(value) lines.append('--' + BOUNDARY + '--') lines.append('') body = CRLF.join(lines) content_type = 'multipart/form-data; boundary=%s' % BOUNDARY return content_type, body def GetContentType(filename): """Helper to guess the content-type from the filename.""" return mimetypes.guess_type(filename)[0] or 'application/octet-stream' # Use a shell for subcommands on Windows to get a PATH search. use_shell = sys.platform.startswith("win") def RunShellWithReturnCode(command, print_output=False, universal_newlines=True, env=os.environ): """Executes a command and returns the output from stdout and the return code. Args: command: Command to execute. print_output: If True, the output is printed to stdout. If False, both stdout and stderr are ignored. universal_newlines: Use universal_newlines flag (default: True). Returns: Tuple (output, return code) """ logging.info("Running %s", command) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=use_shell, universal_newlines=universal_newlines, env=env) if print_output: output_array = [] while True: line = p.stdout.readline() if not line: break print line.strip("\n") output_array.append(line) output = "".join(output_array) else: output = p.stdout.read() p.wait() errout = p.stderr.read() if print_output and errout: print >>sys.stderr, errout p.stdout.close() p.stderr.close() return output, p.returncode def RunShell(command, silent_ok=False, universal_newlines=True, print_output=False, env=os.environ): data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env) if retcode: ErrorExit("Got error status from %s:\n%s" % (command, data)) if not silent_ok and not data: ErrorExit("No output from %s" % command) return data class VersionControlSystem(object): """Abstract base class providing an interface to the VCS.""" def __init__(self, options): """Constructor. Args: options: Command line options. """ self.options = options def GenerateDiff(self, args): """Return the current diff as a string. Args: args: Extra arguments to pass to the diff command. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def CheckForUnknownFiles(self): """Show an "are you sure?" prompt if there are unknown files.""" unknown_files = self.GetUnknownFiles() if unknown_files: print "The following files are not added to version control:" for line in unknown_files: print line prompt = "Are you sure to continue?(y/N) " answer = raw_input(prompt).strip() if answer != "y": ErrorExit("User aborted") def GetBaseFile(self, filename): """Get the content of the upstream version of a file. Returns: A tuple (base_content, new_content, is_binary, status) base_content: The contents of the base file. new_content: For text files, this is empty. For binary files, this is the contents of the new file, since the diff output won't contain information to reconstruct the current file. is_binary: True iff the file is binary. status: The status of the file. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetBaseFiles(self, diff): """Helper that calls GetBase file for each file in the patch. Returns: A dictionary that maps from filename to GetBaseFile's tuple. Filenames are retrieved based on lines that start with "Index:" or "Property changes on:". """ files = {} for line in diff.splitlines(True): if line.startswith('Index:') or line.startswith('Property changes on:'): unused, filename = line.split(':', 1) # On Windows if a file has property changes its filename uses '\' # instead of '/'. filename = filename.strip().replace('\\', '/') files[filename] = self.GetBaseFile(filename) return files def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, files): """Uploads the base files (and if necessary, the current ones as well).""" def UploadFile(filename, file_id, content, is_binary, status, is_base): """Uploads a file to the server.""" file_too_large = False if is_base: type = "base" else: type = "current" if len(content) > MAX_UPLOAD_SIZE: print ("Not uploading the %s file for %s because it's too large." % (type, filename)) file_too_large = True content = "" checksum = md5(content).hexdigest() if options.verbose > 0 and not file_too_large: print "Uploading %s file for %s" % (type, filename) url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) form_fields = [("filename", filename), ("status", status), ("checksum", checksum), ("is_binary", str(is_binary)), ("is_current", str(not is_base)), ] if file_too_large: form_fields.append(("file_too_large", "1")) if options.email: form_fields.append(("user", options.email)) ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)]) response_body = rpc_server.Send(url, body, content_type=ctype) if not response_body.startswith("OK"): StatusUpdate(" --> %s" % response_body) sys.exit(1) patches = dict() [patches.setdefault(v, k) for k, v in patch_list] for filename in patches.keys(): base_content, new_content, is_binary, status = files[filename] file_id_str = patches.get(filename) if file_id_str.find("nobase") != -1: base_content = None file_id_str = file_id_str[file_id_str.rfind("_") + 1:] file_id = int(file_id_str) if base_content != None: UploadFile(filename, file_id, base_content, is_binary, status, True) if new_content != None: UploadFile(filename, file_id, new_content, is_binary, status, False) def IsImage(self, filename): """Returns true if the filename has an image extension.""" mimetype = mimetypes.guess_type(filename)[0] if not mimetype: return False return mimetype.startswith("image/") def IsBinary(self, filename): """Returns true if the guessed mimetyped isnt't in text group.""" mimetype = mimetypes.guess_type(filename)[0] if not mimetype: return False # e.g. README, "real" binaries usually have an extension # special case for text files which don't start with text/ if mimetype in TEXT_MIMETYPES: return False return not mimetype.startswith("text/") class SubversionVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Subversion.""" def __init__(self, options): super(SubversionVCS, self).__init__(options) if self.options.revision: match = re.match(r"(\d+)(:(\d+))?", self.options.revision) if not match: ErrorExit("Invalid Subversion revision %s." % self.options.revision) self.rev_start = match.group(1) self.rev_end = match.group(3) else: self.rev_start = self.rev_end = None # Cache output from "svn list -r REVNO dirname". # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). self.svnls_cache = {} # Base URL is required to fetch files deleted in an older revision. # Result is cached to not guess it over and over again in GetBaseFile(). required = self.options.download_base or self.options.revision is not None self.svn_base = self._GuessBase(required) def GuessBase(self, required): """Wrapper for _GuessBase.""" return self.svn_base def _GuessBase(self, required): """Returns the SVN base URL. Args: required: If true, exits if the url can't be guessed, otherwise None is returned. """ info = RunShell(["svn", "info"]) for line in info.splitlines(): words = line.split() if len(words) == 2 and words[0] == "URL:": url = words[1] scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) username, netloc = urllib.splituser(netloc) if username: logging.info("Removed username from base URL") if netloc.endswith("svn.python.org"): if netloc == "svn.python.org": if path.startswith("/projects/"): path = path[9:] elif netloc != "pythondev@svn.python.org": ErrorExit("Unrecognized Python URL: %s" % url) base = "http://svn.python.org/view/*checkout*%s/" % path logging.info("Guessed Python base = %s", base) elif netloc.endswith("svn.collab.net"): if path.startswith("/repos/"): path = path[6:] base = "http://svn.collab.net/viewvc/*checkout*%s/" % path logging.info("Guessed CollabNet base = %s", base) elif netloc.endswith(".googlecode.com"): path = path + "/" base = urlparse.urlunparse(("http", netloc, path, params, query, fragment)) logging.info("Guessed Google Code base = %s", base) else: path = path + "/" base = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) logging.info("Guessed base = %s", base) return base if required: ErrorExit("Can't find URL in output from svn info") return None def GenerateDiff(self, args): cmd = ["svn", "diff"] if self.options.revision: cmd += ["-r", self.options.revision] cmd.extend(args) data = RunShell(cmd) count = 0 for line in data.splitlines(): if line.startswith("Index:") or line.startswith("Property changes on:"): count += 1 logging.info(line) if not count: ErrorExit("No valid patches found in output from svn diff") return data def _CollapseKeywords(self, content, keyword_str): """Collapses SVN keywords.""" # svn cat translates keywords but svn diff doesn't. As a result of this # behavior patching.PatchChunks() fails with a chunk mismatch error. # This part was originally written by the Review Board development team # who had the same problem (http://reviews.review-board.org/r/276/). # Mapping of keywords to known aliases svn_keywords = { # Standard keywords 'Date': ['Date', 'LastChangedDate'], 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], 'Author': ['Author', 'LastChangedBy'], 'HeadURL': ['HeadURL', 'URL'], 'Id': ['Id'], # Aliases 'LastChangedDate': ['LastChangedDate', 'Date'], 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], 'LastChangedBy': ['LastChangedBy', 'Author'], 'URL': ['URL', 'HeadURL'], } def repl(m): if m.group(2): return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) return "$%s$" % m.group(1) keywords = [keyword for name in keyword_str.split(" ") for keyword in svn_keywords.get(name, [])] return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) def GetUnknownFiles(self): status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) unknown_files = [] for line in status.split("\n"): if line and line[0] == "?": unknown_files.append(line) return unknown_files def ReadFile(self, filename): """Returns the contents of a file.""" file = open(filename, 'rb') result = "" try: result = file.read() finally: file.close() return result def GetStatus(self, filename): """Returns the status of a file.""" if not self.options.revision: status = RunShell(["svn", "status", "--ignore-externals", filename]) if not status: ErrorExit("svn status returned no output for %s" % filename) status_lines = status.splitlines() # If file is in a cl, the output will begin with # "\n--- Changelist 'cl_name':\n". See # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt if (len(status_lines) == 3 and not status_lines[0] and status_lines[1].startswith("--- Changelist")): status = status_lines[2] else: status = status_lines[0] # If we have a revision to diff against we need to run "svn list" # for the old and the new revision and compare the results to get # the correct status for a file. else: dirname, relfilename = os.path.split(filename) if dirname not in self.svnls_cache: cmd = ["svn", "list", "-r", self.rev_start, dirname or "."] out, returncode = RunShellWithReturnCode(cmd) if returncode: ErrorExit("Failed to get status for %s." % filename) old_files = out.splitlines() args = ["svn", "list"] if self.rev_end: args += ["-r", self.rev_end] cmd = args + [dirname or "."] out, returncode = RunShellWithReturnCode(cmd) if returncode: ErrorExit("Failed to run command %s" % cmd) self.svnls_cache[dirname] = (old_files, out.splitlines()) old_files, new_files = self.svnls_cache[dirname] if relfilename in old_files and relfilename not in new_files: status = "D " elif relfilename in old_files and relfilename in new_files: status = "M " else: status = "A " return status def GetBaseFile(self, filename): status = self.GetStatus(filename) base_content = None new_content = None # If a file is copied its status will be "A +", which signifies # "addition-with-history". See "svn st" for more information. We need to # upload the original file or else diff parsing will fail if the file was # edited. if status[0] == "A" and status[3] != "+": # We'll need to upload the new content if we're adding a binary file # since diff's output won't contain it. mimetype = RunShell(["svn", "propget", "svn:mime-type", filename], silent_ok=True) base_content = "" is_binary = bool(mimetype) and not mimetype.startswith("text/") if is_binary and self.IsImage(filename): new_content = self.ReadFile(filename) elif (status[0] in ("M", "D", "R") or (status[0] == "A" and status[3] == "+") or # Copied file. (status[0] == " " and status[1] == "M")): # Property change. args = [] if self.options.revision: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: # Don't change filename, it's needed later. url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:mime-type", url] mimetype, returncode = RunShellWithReturnCode(cmd) if returncode: # File does not exist in the requested revision. # Reset mimetype, it contains an error message. mimetype = "" get_base = False is_binary = bool(mimetype) and not mimetype.startswith("text/") if status[0] == " ": # Empty base content just to force an upload. base_content = "" elif is_binary: if self.IsImage(filename): get_base = True if status[0] == "M": if not self.rev_end: new_content = self.ReadFile(filename) else: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) new_content = RunShell(["svn", "cat", url], universal_newlines=True, silent_ok=True) else: base_content = "" else: get_base = True if get_base: if is_binary: universal_newlines = False else: universal_newlines = True if self.rev_start: # "svn cat -r REV delete_file.txt" doesn't work. cat requires # the full URL with "@REV" appended instead of using "-r" option. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) base_content = RunShell(["svn", "cat", url], universal_newlines=universal_newlines, silent_ok=True) else: base_content = RunShell(["svn", "cat", filename], universal_newlines=universal_newlines, silent_ok=True) if not is_binary: args = [] if self.rev_start: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:keywords", url] keywords, returncode = RunShellWithReturnCode(cmd) if keywords and not returncode: base_content = self._CollapseKeywords(base_content, keywords) else: StatusUpdate("svn status returned unexpected output: %s" % status) sys.exit(1) return base_content, new_content, is_binary, status[0:5] class GitVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Git.""" def __init__(self, options): super(GitVCS, self).__init__(options) # Map of filename -> (hash before, hash after) of base file. # Hashes for "no such file" are represented as None. self.hashes = {} # Map of new filename -> old filename for renames. self.renames = {} def GenerateDiff(self, extra_args): # This is more complicated than svn's GenerateDiff because we must convert # the diff output to include an svn-style "Index:" line as well as record # the hashes of the files, so we can upload them along with our diff. # Special used by git to indicate "no such content". NULL_HASH = "0"*40 extra_args = extra_args[:] if self.options.revision: extra_args = [self.options.revision] + extra_args # --no-ext-diff is broken in some versions of Git, so try to work around # this by overriding the environment (but there is still a problem if the # git config key "diff.external" is used). env = os.environ.copy() if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF'] gitdiff = RunShell(["git", "diff", "--no-ext-diff", "--full-index", "-M"] + extra_args, env=env) def IsFileNew(filename): return filename in self.hashes and self.hashes[filename][0] is None def AddSubversionPropertyChange(filename): """Add svn's property change information into the patch if given file is new file. We use Subversion's auto-props setting to retrieve its property. See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for Subversion's [auto-props] setting. """ if self.options.emulate_svn_auto_props and IsFileNew(filename): svnprops = GetSubversionPropertyChanges(filename) if svnprops: svndiff.append("\n" + svnprops + "\n") svndiff = [] filecount = 0 filename = None for line in gitdiff.splitlines(): match = re.match(r"diff --git a/(.*) b/(.*)$", line) if match: # Add auto property here for previously seen file. if filename is not None: AddSubversionPropertyChange(filename) filecount += 1 # Intentionally use the "after" filename so we can show renames. filename = match.group(2) svndiff.append("Index: %s\n" % filename) if match.group(1) != match.group(2): self.renames[match.group(2)] = match.group(1) else: # The "index" line in a git diff looks like this (long hashes elided): # index 82c0d44..b2cee3f 100755 # We want to save the left hash, as that identifies the base file. match = re.match(r"index (\w+)\.\.(\w+)", line) if match: before, after = (match.group(1), match.group(2)) if before == NULL_HASH: before = None if after == NULL_HASH: after = None self.hashes[filename] = (before, after) svndiff.append(line + "\n") if not filecount: ErrorExit("No valid patches found in output from git diff") # Add auto property for the last seen file. assert filename is not None AddSubversionPropertyChange(filename) return "".join(svndiff) def GetUnknownFiles(self): status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], silent_ok=True) return status.splitlines() def GetFileContent(self, file_hash, is_binary): """Returns the content of a file identified by its git hash.""" data, retcode = RunShellWithReturnCode(["git", "show", file_hash], universal_newlines=not is_binary) if retcode: ErrorExit("Got error status from 'git show %s'" % file_hash) return data def GetBaseFile(self, filename): hash_before, hash_after = self.hashes.get(filename, (None,None)) base_content = None new_content = None is_binary = self.IsBinary(filename) status = None if filename in self.renames: status = "A +" # Match svn attribute name for renames. if filename not in self.hashes: # If a rename doesn't change the content, we never get a hash. base_content = RunShell(["git", "show", "HEAD:" + filename]) elif not hash_before: status = "A" base_content = "" elif not hash_after: status = "D" else: status = "M" is_image = self.IsImage(filename) # Grab the before/after content if we need it. # We should include file contents if it's text or it's an image. if not is_binary or is_image: # Grab the base content if we don't have it already. if base_content is None and hash_before: base_content = self.GetFileContent(hash_before, is_binary) # Only include the "after" file if it's an image; otherwise it # it is reconstructed from the diff. if is_image and hash_after: new_content = self.GetFileContent(hash_after, is_binary) return (base_content, new_content, is_binary, status) class MercurialVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Mercurial.""" def __init__(self, options, repo_dir): super(MercurialVCS, self).__init__(options) # Absolute path to repository (we can be in a subdir) self.repo_dir = os.path.normpath(repo_dir) # Compute the subdir cwd = os.path.normpath(os.getcwd()) assert cwd.startswith(self.repo_dir) self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") if self.options.revision: self.base_rev = self.options.revision else: self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() def _GetRelPath(self, filename): """Get relative path of a file according to the current directory, given its logical path in the repo.""" assert filename.startswith(self.subdir), (filename, self.subdir) return filename[len(self.subdir):].lstrip(r"\/") def GenerateDiff(self, extra_args): # If no file specified, restrict to the current subdir extra_args = extra_args or ["."] cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args data = RunShell(cmd, silent_ok=True) svndiff = [] filecount = 0 for line in data.splitlines(): m = re.match("diff --git a/(\S+) b/(\S+)", line) if m: # Modify line to make it look like as it comes from svn diff. # With this modification no changes on the server side are required # to make upload.py work with Mercurial repos. # NOTE: for proper handling of moved/copied files, we have to use # the second filename. filename = m.group(2) svndiff.append("Index: %s" % filename) svndiff.append("=" * 67) filecount += 1 logging.info(line) else: svndiff.append(line) if not filecount: ErrorExit("No valid patches found in output from hg diff") return "\n".join(svndiff) + "\n" def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" args = [] status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], silent_ok=True) unknown_files = [] for line in status.splitlines(): st, fn = line.split(" ", 1) if st == "?": unknown_files.append(fn) return unknown_files def GetBaseFile(self, filename): # "hg status" and "hg cat" both take a path relative to the current subdir # rather than to the repo root, but "hg diff" has given us the full path # to the repo root. base_content = "" new_content = None is_binary = False oldrelpath = relpath = self._GetRelPath(filename) # "hg status -C" returns two lines for moved/copied files, one otherwise out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) out = out.splitlines() # HACK: strip error message about missing file/directory if it isn't in # the working copy if out[0].startswith('%s: ' % relpath): out = out[1:] if len(out) > 1: # Moved/copied => considered as modified, use old filename to # retrieve base contents oldrelpath = out[1].strip() status = "M" else: status, _ = out[0].split(' ', 1) if ":" in self.base_rev: base_rev = self.base_rev.split(":", 1)[0] else: base_rev = self.base_rev if status != "A": base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True) is_binary = "\0" in base_content # Mercurial's heuristic if status != "R": new_content = open(relpath, "rb").read() is_binary = is_binary or "\0" in new_content if is_binary and base_content: # Fetch again without converting newlines base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True, universal_newlines=False) if not is_binary or not self.IsImage(relpath): new_content = None return base_content, new_content, is_binary, status # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. def SplitPatch(data): """Splits a patch into separate pieces for each file. Args: data: A string containing the output of svn diff. Returns: A list of 2-tuple (filename, text) where text is the svn diff output pertaining to filename. """ patches = [] filename = None diff = [] for line in data.splitlines(True): new_filename = None if line.startswith('Index:'): unused, new_filename = line.split(':', 1) new_filename = new_filename.strip() elif line.startswith('Property changes on:'): unused, temp_filename = line.split(':', 1) # When a file is modified, paths use '/' between directories, however # when a property is modified '\' is used on Windows. Make them the same # otherwise the file shows up twice. temp_filename = temp_filename.strip().replace('\\', '/') if temp_filename != filename: # File has property changes but no modifications, create a new diff. new_filename = temp_filename if new_filename: if filename and diff: patches.append((filename, ''.join(diff))) filename = new_filename diff = [line] continue if diff is not None: diff.append(line) if filename and diff: patches.append((filename, ''.join(diff))) return patches def UploadSeparatePatches(issue, rpc_server, patchset, data, options): """Uploads a separate patch for each file in the diff output. Returns a list of [patch_key, filename] for each file. """ patches = SplitPatch(data) rv = [] for patch in patches: if len(patch[1]) > MAX_UPLOAD_SIZE: print ("Not uploading the patch for " + patch[0] + " because the file is too large.") continue form_fields = [("filename", patch[0])] if not options.download_base: form_fields.append(("content_upload", "1")) files = [("data", "data.diff", patch[1])] ctype, body = EncodeMultipartFormData(form_fields, files) url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) print "Uploading patch for " + patch[0] response_body = rpc_server.Send(url, body, content_type=ctype) lines = response_body.splitlines() if not lines or lines[0] != "OK": StatusUpdate(" --> %s" % response_body) sys.exit(1) rv.append([lines[1], patch[0]]) return rv def GuessVCSName(): """Helper to guess the version control system. This examines the current directory, guesses which VersionControlSystem we're using, and returns an string indicating which VCS is detected. Returns: A pair (vcs, output). vcs is a string indicating which VCS was detected and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, or VCS_UNKNOWN. output is a string containing any interesting output from the vcs detection routine, or None if there is nothing interesting. """ # Mercurial has a command to get the base directory of a repository # Try running it, but don't die if we don't have hg installed. # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. try: out, returncode = RunShellWithReturnCode(["hg", "root"]) if returncode == 0: return (VCS_MERCURIAL, out.strip()) except OSError, (errno, message): if errno != 2: # ENOENT -- they don't have hg installed. raise # Subversion has a .svn in all working directories. if os.path.isdir('.svn'): logging.info("Guessed VCS = Subversion") return (VCS_SUBVERSION, None) # Git has a command to test if you're in a git tree. # Try running it, but don't die if we don't have git installed. try: out, returncode = RunShellWithReturnCode(["git", "rev-parse", "--is-inside-work-tree"]) if returncode == 0: return (VCS_GIT, None) except OSError, (errno, message): if errno != 2: # ENOENT -- they don't have git installed. raise return (VCS_UNKNOWN, None) def GuessVCS(options): """Helper to guess the version control system. This verifies any user-specified VersionControlSystem (by command line or environment variable). If the user didn't specify one, this examines the current directory, guesses which VersionControlSystem we're using, and returns an instance of the appropriate class. Exit with an error if we can't figure it out. Returns: A VersionControlSystem instance. Exits if the VCS can't be guessed. """ vcs = options.vcs if not vcs: vcs = os.environ.get("CODEREVIEW_VCS") if vcs: v = VCS_ABBREVIATIONS.get(vcs.lower()) if v is None: ErrorExit("Unknown version control system %r specified." % vcs) (vcs, extra_output) = (v, None) else: (vcs, extra_output) = GuessVCSName() if vcs == VCS_MERCURIAL: if extra_output is None: extra_output = RunShell(["hg", "root"]).strip() return MercurialVCS(options, extra_output) elif vcs == VCS_SUBVERSION: return SubversionVCS(options) elif vcs == VCS_GIT: return GitVCS(options) ErrorExit(("Could not guess version control system. " "Are you in a working copy directory?")) def CheckReviewer(reviewer): """Validate a reviewer -- either a nickname or an email addres. Args: reviewer: A nickname or an email address. Calls ErrorExit() if it is an invalid email address. """ if "@" not in reviewer: return # Assume nickname parts = reviewer.split("@") if len(parts) > 2: ErrorExit("Invalid email address: %r" % reviewer) assert len(parts) == 2 if "." not in parts[1]: ErrorExit("Invalid email address: %r" % reviewer) def LoadSubversionAutoProperties(): """Returns the content of [auto-props] section of Subversion's config file as a dictionary. Returns: A dictionary whose key-value pair corresponds the [auto-props] section's key-value pair. In following cases, returns empty dictionary: - config file doesn't exist, or - 'enable-auto-props' is not set to 'true-like-value' in [miscellany]. """ # Todo(hayato): Windows users might use different path for configuration file. subversion_config = os.path.expanduser("~/.subversion/config") if not os.path.exists(subversion_config): return {} config = ConfigParser.ConfigParser() config.read(subversion_config) if (config.has_section("miscellany") and config.has_option("miscellany", "enable-auto-props") and config.getboolean("miscellany", "enable-auto-props") and config.has_section("auto-props")): props = {} for file_pattern in config.options("auto-props"): props[file_pattern] = ParseSubversionPropertyValues( config.get("auto-props", file_pattern)) return props else: return {} def ParseSubversionPropertyValues(props): """Parse the given property value which comes from [auto-props] section and returns a list whose element is a (svn_prop_key, svn_prop_value) pair. See the following doctest for example. >>> ParseSubversionPropertyValues('svn:eol-style=LF') [('svn:eol-style', 'LF')] >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg') [('svn:mime-type', 'image/jpeg')] >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable') [('svn:eol-style', 'LF'), ('svn:executable', '*')] """ key_value_pairs = [] for prop in props.split(";"): key_value = prop.split("=") assert len(key_value) <= 2 if len(key_value) == 1: # If value is not given, use '*' as a Subversion's convention. key_value_pairs.append((key_value[0], "*")) else: key_value_pairs.append((key_value[0], key_value[1])) return key_value_pairs def GetSubversionPropertyChanges(filename): """Return a Subversion's 'Property changes on ...' string, which is used in the patch file. Args: filename: filename whose property might be set by [auto-props] config. Returns: A string like 'Property changes on |filename| ...' if given |filename| matches any entries in [auto-props] section. None, otherwise. """ global svn_auto_props_map if svn_auto_props_map is None: svn_auto_props_map = LoadSubversionAutoProperties() all_props = [] for file_pattern, props in svn_auto_props_map.items(): if fnmatch.fnmatch(filename, file_pattern): all_props.extend(props) if all_props: return FormatSubversionPropertyChanges(filename, all_props) return None def FormatSubversionPropertyChanges(filename, props): """Returns Subversion's 'Property changes on ...' strings using given filename and properties. Args: filename: filename props: A list whose element is a (svn_prop_key, svn_prop_value) pair. Returns: A string which can be used in the patch file for Subversion. See the following doctest for example. >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')]) Property changes on: foo.cc ___________________________________________________________________ Added: svn:eol-style + LF <BLANKLINE> """ prop_changes_lines = [ "Property changes on: %s" % filename, "___________________________________________________________________"] for key, value in props: prop_changes_lines.append("Added: " + key) prop_changes_lines.append(" + " + value) return "\n".join(prop_changes_lines) + "\n" def RealMain(argv, data=None): """The real main function. Args: argv: Command line arguments. data: Diff contents. If None (default) the diff is generated by the VersionControlSystem implementation returned by GuessVCS(). Returns: A 2-tuple (issue id, patchset id). The patchset id is None if the base files are not uploaded by this script (applies only to SVN checkouts). """ logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" "%(lineno)s %(message)s ")) os.environ['LC_ALL'] = 'C' options, args = parser.parse_args(argv[1:]) global verbosity verbosity = options.verbose if verbosity >= 3: logging.getLogger().setLevel(logging.DEBUG) elif verbosity >= 2: logging.getLogger().setLevel(logging.INFO) vcs = GuessVCS(options) base = options.base_url if isinstance(vcs, SubversionVCS): # Guessing the base field is only supported for Subversion. # Note: Fetching base files may become deprecated in future releases. guessed_base = vcs.GuessBase(options.download_base) if base: if guessed_base and base != guessed_base: print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \ (base, guessed_base) else: base = guessed_base if not base and options.download_base: options.download_base = True logging.info("Enabled upload of base file") if not options.assume_yes: vcs.CheckForUnknownFiles() if data is None: data = vcs.GenerateDiff(args) files = vcs.GetBaseFiles(data) if verbosity >= 1: print "Upload server:", options.server, "(change with -s/--server)" if options.issue: prompt = "Message describing this patch set: " else: prompt = "New issue subject: " message = options.message or raw_input(prompt).strip() if not message: ErrorExit("A non-empty message is required") rpc_server = GetRpcServer(options) form_fields = [("subject", message)] if base: form_fields.append(("base", base)) if options.issue: form_fields.append(("issue", str(options.issue))) if options.email: form_fields.append(("user", options.email)) if options.reviewers: for reviewer in options.reviewers.split(','): CheckReviewer(reviewer) form_fields.append(("reviewers", options.reviewers)) if options.cc: for cc in options.cc.split(','): CheckReviewer(cc) form_fields.append(("cc", options.cc)) description = options.description if options.description_file: if options.description: ErrorExit("Can't specify description and description_file") file = open(options.description_file, 'r') description = file.read() file.close() if description: form_fields.append(("description", description)) # Send a hash of all the base file so the server can determine if a copy # already exists in an earlier patchset. base_hashes = "" for file, info in files.iteritems(): if not info[0] is None: checksum = md5(info[0]).hexdigest() if base_hashes: base_hashes += "|" base_hashes += checksum + ":" + file form_fields.append(("base_hashes", base_hashes)) if options.private: if options.issue: print "Warning: Private flag ignored when updating an existing issue." else: form_fields.append(("private", "1")) # If we're uploading base files, don't send the email before the uploads, so # that it contains the file status. if options.send_mail and options.download_base: form_fields.append(("send_mail", "1")) if not options.download_base: form_fields.append(("content_upload", "1")) if len(data) > MAX_UPLOAD_SIZE: print "Patch is large, so uploading file patches separately." uploaded_diff_file = [] form_fields.append(("separate_patches", "1")) else: uploaded_diff_file = [("data", "data.diff", data)] ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) response_body = rpc_server.Send("/upload", body, content_type=ctype) patchset = None if not options.download_base or not uploaded_diff_file: lines = response_body.splitlines() if len(lines) >= 2: msg = lines[0] patchset = lines[1].strip() patches = [x.split(" ", 1) for x in lines[2:]] else: msg = response_body else: msg = response_body StatusUpdate(msg) if not response_body.startswith("Issue created.") and \ not response_body.startswith("Issue updated."): sys.exit(0) issue = msg[msg.rfind("/")+1:] if not uploaded_diff_file: result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) if not options.download_base: patches = result if not options.download_base: vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) if options.send_mail: rpc_server.Send("/" + issue + "/mail", payload="") return issue, patchset def main(): try: RealMain(sys.argv) except KeyboardInterrupt: print StatusUpdate("Interrupted.") sys.exit(1) if __name__ == "__main__": main()
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from distutils.core import setup required = [] if sys.version_info[:3] < (2, 5, 0): required.append('elementtree') setup( name='gdata', version='2.0.14', description='Python client library for Google data APIs', long_description = """\ The Google data Python client library makes it easy to interact with Google services through the Google Data APIs. This library provides data models and service modules for the the following Google data services: - Google Calendar data API - Google Contacts data API - Google Spreadsheets data API - Google Document List data APIs - Google Base data API - Google Apps Provisioning API - Google Apps Email Migration API - Google Apps Email Settings API - Picasa Web Albums Data API - Google Code Search Data API - YouTube Data API - Google Webmaster Tools Data API - Blogger Data API - Google Health API - Google Book Search API - Google Analytics API - Google Finance API - Google Sites Data API - Google Content API For Shopping - Google App Marketplace API - core Google data API functionality The core Google data code provides sufficient functionality to use this library with any Google data API (even if a module hasn't been written for it yet). For example, this client can be used with the Notebook API. This library may also be used with any Atom Publishing Protocol service (AtomPub). """, author='Jeffrey Scudder', author_email='j.s@google.com', license='Apache 2.0', url='http://code.google.com/p/gdata-python-client/', packages=[ 'atom', 'gdata', 'gdata.Crypto', 'gdata.Crypto.Cipher', 'gdata.Crypto.Hash', 'gdata.Crypto.Protocol', 'gdata.Crypto.PublicKey', 'gdata.Crypto.Util', 'gdata.acl', 'gdata.alt', 'gdata.analytics', 'gdata.apps', 'gdata.apps.adminsettings', 'gdata.apps.audit', 'gdata.apps.emailsettings', 'gdata.apps.groups', 'gdata.apps.migration', 'gdata.apps.organization', 'gdata.base', 'gdata.blogger', 'gdata.books', 'gdata.calendar', 'gdata.calendar_resource', 'gdata.codesearch', 'gdata.contacts', 'gdata.contentforshopping', 'gdata.docs', 'gdata.dublincore', 'gdata.exif', 'gdata.finance', 'gdata.geo', 'gdata.health', 'gdata.media', 'gdata.notebook', 'gdata.oauth', 'gdata.opensearch', 'gdata.photos', 'gdata.projecthosting', 'gdata.sites', 'gdata.spreadsheet', 'gdata.spreadsheets', 'gdata.tlslite', 'gdata.tlslite.integration', 'gdata.tlslite.utils', 'gdata.webmastertools', 'gdata.youtube', ], package_dir = {'gdata':'src/gdata', 'atom':'src/atom'}, install_requires=required )
Python
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Classes to interact with the Blogger server.""" __author__ = 'api.jscudder (Jeffrey Scudder)' import gdata.service import gdata.blogger class BloggerService(gdata.service.GDataService): def __init__(self, email=None, password=None, source=None, server='www.blogger.com', **kwargs): """Creates a client for the Blogger service. Args: email: string (optional) The user's email address, used for authentication. password: string (optional) The user's password. source: string (optional) The name of the user's application. server: string (optional) The name of the server to which a connection will be opened. Default value: 'www.blogger.com'. **kwargs: The other parameters to pass to gdata.service.GDataService constructor. """ gdata.service.GDataService.__init__( self, email=email, password=password, service='blogger', source=source, server=server, **kwargs) def GetBlogFeed(self, uri=None): """Retrieve a list of the blogs to which the current user may manage.""" if not uri: uri = '/feeds/default/blogs' return self.Get(uri, converter=gdata.blogger.BlogFeedFromString) def GetBlogCommentFeed(self, blog_id=None, uri=None): """Retrieve a list of the comments for this blog.""" if blog_id: uri = '/feeds/%s/comments/default' % blog_id return self.Get(uri, converter=gdata.blogger.CommentFeedFromString) def GetBlogPostFeed(self, blog_id=None, uri=None): if blog_id: uri = '/feeds/%s/posts/default' % blog_id return self.Get(uri, converter=gdata.blogger.BlogPostFeedFromString) def GetPostCommentFeed(self, blog_id=None, post_id=None, uri=None): """Retrieve a list of the comments for this particular blog post.""" if blog_id and post_id: uri = '/feeds/%s/%s/comments/default' % (blog_id, post_id) return self.Get(uri, converter=gdata.blogger.CommentFeedFromString) def AddPost(self, entry, blog_id=None, uri=None): if blog_id: uri = '/feeds/%s/posts/default' % blog_id return self.Post(entry, uri, converter=gdata.blogger.BlogPostEntryFromString) def UpdatePost(self, entry, uri=None): if not uri: uri = entry.GetEditLink().href return self.Put(entry, uri, converter=gdata.blogger.BlogPostEntryFromString) def DeletePost(self, entry=None, uri=None): if not uri: uri = entry.GetEditLink().href return self.Delete(uri) def AddComment(self, comment_entry, blog_id=None, post_id=None, uri=None): """Adds a new comment to the specified blog post.""" if blog_id and post_id: uri = '/feeds/%s/%s/comments/default' % (blog_id, post_id) return self.Post(comment_entry, uri, converter=gdata.blogger.CommentEntryFromString) def DeleteComment(self, entry=None, uri=None): if not uri: uri = entry.GetEditLink().href return self.Delete(uri) class BlogQuery(gdata.service.Query): def __init__(self, feed=None, params=None, categories=None, blog_id=None): """Constructs a query object for the list of a user's Blogger blogs. Args: feed: str (optional) The beginning of the URL to be queried. If the feed is not set, and there is no blog_id passed in, the default value is used ('/feeds/default/blogs'). params: dict (optional) categories: list (optional) blog_id: str (optional) """ if not feed and blog_id: feed = '/feeds/default/blogs/%s' % blog_id elif not feed: feed = '/feeds/default/blogs' gdata.service.Query.__init__(self, feed=feed, params=params, categories=categories) class BlogPostQuery(gdata.service.Query): def __init__(self, feed=None, params=None, categories=None, blog_id=None, post_id=None): if not feed and blog_id and post_id: feed = '/feeds/%s/posts/default/%s' % (blog_id, post_id) elif not feed and blog_id: feed = '/feeds/%s/posts/default' % blog_id gdata.service.Query.__init__(self, feed=feed, params=params, categories=categories) class BlogCommentQuery(gdata.service.Query): def __init__(self, feed=None, params=None, categories=None, blog_id=None, post_id=None, comment_id=None): if not feed and blog_id and comment_id: feed = '/feeds/%s/comments/default/%s' % (blog_id, comment_id) elif not feed and blog_id and post_id: feed = '/feeds/%s/%s/comments/default' % (blog_id, post_id) elif not feed and blog_id: feed = '/feeds/%s/comments/default' % blog_id gdata.service.Query.__init__(self, feed=feed, params=params, categories=categories)
Python
#!/usr/bin/env python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains a client to communicate with the Blogger servers. For documentation on the Blogger API, see: http://code.google.com/apis/blogger/ """ __author__ = 'j.s@google.com (Jeff Scudder)' import gdata.client import gdata.gauth import gdata.blogger.data import atom.data import atom.http_core # List user's blogs, takes a user ID, or 'default'. BLOGS_URL = 'http://www.blogger.com/feeds/%s/blogs' # Takes a blog ID. BLOG_POST_URL = 'http://www.blogger.com/feeds/%s/posts/default' # Takes a blog ID. BLOG_PAGE_URL = 'http://www.blogger.com/feeds/%s/pages/default' # Takes a blog ID and post ID. BLOG_POST_COMMENTS_URL = 'http://www.blogger.com/feeds/%s/%s/comments/default' # Takes a blog ID. BLOG_COMMENTS_URL = 'http://www.blogger.com/feeds/%s/comments/default' # Takes a blog ID. BLOG_ARCHIVE_URL = 'http://www.blogger.com/feeds/%s/archive/full' class BloggerClient(gdata.client.GDClient): api_version = '2' auth_service = 'blogger' auth_scopes = gdata.gauth.AUTH_SCOPES['blogger'] def get_blogs(self, user_id='default', auth_token=None, desired_class=gdata.blogger.data.BlogFeed, **kwargs): return self.get_feed(BLOGS_URL % user_id, auth_token=auth_token, desired_class=desired_class, **kwargs) GetBlogs = get_blogs def get_posts(self, blog_id, auth_token=None, desired_class=gdata.blogger.data.BlogPostFeed, query=None, **kwargs): return self.get_feed(BLOG_POST_URL % blog_id, auth_token=auth_token, desired_class=desired_class, query=query, **kwargs) GetPosts = get_posts def get_pages(self, blog_id, auth_token=None, desired_class=gdata.blogger.data.BlogPageFeed, query=None, **kwargs): return self.get_feed(BLOG_PAGE_URL % blog_id, auth_token=auth_token, desired_class=desired_class, query=query, **kwargs) GetPages = get_pages def get_post_comments(self, blog_id, post_id, auth_token=None, desired_class=gdata.blogger.data.CommentFeed, query=None, **kwargs): return self.get_feed(BLOG_POST_COMMENTS_URL % (blog_id, post_id), auth_token=auth_token, desired_class=desired_class, query=query, **kwargs) GetPostComments = get_post_comments def get_blog_comments(self, blog_id, auth_token=None, desired_class=gdata.blogger.data.CommentFeed, query=None, **kwargs): return self.get_feed(BLOG_COMMENTS_URL % blog_id, auth_token=auth_token, desired_class=desired_class, query=query, **kwargs) GetBlogComments = get_blog_comments def get_blog_archive(self, blog_id, auth_token=None, **kwargs): return self.get_feed(BLOG_ARCHIVE_URL % blog_id, auth_token=auth_token, **kwargs) GetBlogArchive = get_blog_archive def add_post(self, blog_id, title, body, labels=None, draft=False, auth_token=None, title_type='text', body_type='html', **kwargs): # Construct an atom Entry for the blog post to be sent to the server. new_entry = gdata.blogger.data.BlogPost( title=atom.data.Title(text=title, type=title_type), content=atom.data.Content(text=body, type=body_type)) if labels: for label in labels: new_entry.add_label(label) if draft: new_entry.control = atom.data.Control(draft=atom.data.Draft(text='yes')) return self.post(new_entry, BLOG_POST_URL % blog_id, auth_token=auth_token, **kwargs) AddPost = add_post def add_page(self, blog_id, title, body, draft=False, auth_token=None, title_type='text', body_type='html', **kwargs): new_entry = gdata.blogger.data.BlogPage( title=atom.data.Title(text=title, type=title_type), content=atom.data.Content(text=body, type=body_type)) if draft: new_entry.control = atom.data.Control(draft=atom.data.Draft(text='yes')) return self.post(new_entry, BLOG_PAGE_URL % blog_id, auth_token=auth_token, **kwargs) AddPage = add_page def add_comment(self, blog_id, post_id, body, auth_token=None, title_type='text', body_type='html', **kwargs): new_entry = gdata.blogger.data.Comment( content=atom.data.Content(text=body, type=body_type)) return self.post(new_entry, BLOG_POST_COMMENTS_URL % (blog_id, post_id), auth_token=auth_token, **kwargs) AddComment = add_comment def update(self, entry, auth_token=None, **kwargs): # The Blogger API does not currently support ETags, so for now remove # the ETag before performing an update. old_etag = entry.etag entry.etag = None response = gdata.client.GDClient.update(self, entry, auth_token=auth_token, **kwargs) entry.etag = old_etag return response Update = update def delete(self, entry_or_uri, auth_token=None, **kwargs): if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)): return gdata.client.GDClient.delete(self, entry_or_uri, auth_token=auth_token, **kwargs) # The Blogger API does not currently support ETags, so for now remove # the ETag before performing a delete. old_etag = entry_or_uri.etag entry_or_uri.etag = None response = gdata.client.GDClient.delete(self, entry_or_uri, auth_token=auth_token, **kwargs) # TODO: if GDClient.delete raises and exception, the entry's etag may be # left as None. Should revisit this logic. entry_or_uri.etag = old_etag return response Delete = delete class Query(gdata.client.Query): def __init__(self, order_by=None, **kwargs): gdata.client.Query.__init__(self, **kwargs) self.order_by = order_by def modify_request(self, http_request): gdata.client._add_query_param('orderby', self.order_by, http_request) gdata.client.Query.modify_request(self, http_request) ModifyRequest = modify_request
Python
#!/usr/bin/env python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data model classes for parsing and generating XML for the Blogger API.""" __author__ = 'j.s@google.com (Jeff Scudder)' import re import urlparse import atom.core import gdata.data LABEL_SCHEME = 'http://www.blogger.com/atom/ns#' THR_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s' BLOG_NAME_PATTERN = re.compile('(http://)(\w*)') BLOG_ID_PATTERN = re.compile('(tag:blogger.com,1999:blog-)(\w*)') BLOG_ID2_PATTERN = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)') POST_ID_PATTERN = re.compile( '(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)') PAGE_ID_PATTERN = re.compile( '(tag:blogger.com,1999:blog-)(\w*)(.page-)(\w*)') COMMENT_ID_PATTERN = re.compile('.*-(\w*)$') class BloggerEntry(gdata.data.GDEntry): """Adds convenience methods inherited by all Blogger entries.""" def get_blog_id(self): """Extracts the Blogger id of this blog. This method is useful when contructing URLs by hand. The blog id is often used in blogger operation URLs. This should not be confused with the id member of a BloggerBlog. The id element is the Atom id XML element. The blog id which this method returns is a part of the Atom id. Returns: The blog's unique id as a string. """ if self.id.text: match = BLOG_ID_PATTERN.match(self.id.text) if match: return match.group(2) else: return BLOG_ID2_PATTERN.match(self.id.text).group(2) return None GetBlogId = get_blog_id def get_blog_name(self): """Finds the name of this blog as used in the 'alternate' URL. An alternate URL is in the form 'http://blogName.blogspot.com/'. For an entry representing the above example, this method would return 'blogName'. Returns: The blog's URL name component as a string. """ for link in self.link: if link.rel == 'alternate': return urlparse.urlparse(link.href)[1].split(".", 1)[0] return None GetBlogName = get_blog_name class Blog(BloggerEntry): """Represents a blog which belongs to the user.""" class BlogFeed(gdata.data.GDFeed): entry = [Blog] class BlogPost(BloggerEntry): """Represents a single post on a blog.""" def add_label(self, label): """Adds a label to the blog post. The label is represented by an Atom category element, so this method is shorthand for appending a new atom.Category object. Args: label: str """ self.category.append(atom.data.Category(scheme=LABEL_SCHEME, term=label)) AddLabel = add_label def get_post_id(self): """Extracts the postID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return POST_ID_PATTERN.match(self.id.text).group(4) return None GetPostId = get_post_id class BlogPostFeed(gdata.data.GDFeed): entry = [BlogPost] class BlogPage(BloggerEntry): """Represents a single page on a blog.""" def get_page_id(self): """Extracts the pageID string from entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return PAGE_ID_PATTERN.match(self.id.text).group(4) return None GetPageId = get_page_id class BlogPageFeed(gdata.data.GDFeed): entry = [BlogPage] class InReplyTo(atom.core.XmlElement): _qname = THR_TEMPLATE % 'in-reply-to' href = 'href' ref = 'ref' source = 'source' type = 'type' class Comment(BloggerEntry): """Blog post comment entry in a feed listing comments on a post or blog.""" in_reply_to = InReplyTo def get_comment_id(self): """Extracts the commentID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return COMMENT_ID_PATTERN.match(self.id.text).group(1) return None GetCommentId = get_comment_id class CommentFeed(gdata.data.GDFeed): entry = [Comment]
Python
#!/usr/bin/python # # Copyright (C) 2007, 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains extensions to Atom objects used with Blogger.""" __author__ = 'api.jscudder (Jeffrey Scudder)' import atom import gdata import re LABEL_SCHEME = 'http://www.blogger.com/atom/ns#' THR_NAMESPACE = 'http://purl.org/syndication/thread/1.0' class BloggerEntry(gdata.GDataEntry): """Adds convenience methods inherited by all Blogger entries.""" blog_name_pattern = re.compile('(http://)(\w*)') blog_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)') blog_id2_pattern = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)') def GetBlogId(self): """Extracts the Blogger id of this blog. This method is useful when contructing URLs by hand. The blog id is often used in blogger operation URLs. This should not be confused with the id member of a BloggerBlog. The id element is the Atom id XML element. The blog id which this method returns is a part of the Atom id. Returns: The blog's unique id as a string. """ if self.id.text: match = self.blog_id_pattern.match(self.id.text) if match: return match.group(2) else: return self.blog_id2_pattern.match(self.id.text).group(2) return None def GetBlogName(self): """Finds the name of this blog as used in the 'alternate' URL. An alternate URL is in the form 'http://blogName.blogspot.com/'. For an entry representing the above example, this method would return 'blogName'. Returns: The blog's URL name component as a string. """ for link in self.link: if link.rel == 'alternate': return self.blog_name_pattern.match(link.href).group(2) return None class BlogEntry(BloggerEntry): """Describes a blog entry in the feed listing a user's blogs.""" def BlogEntryFromString(xml_string): return atom.CreateClassFromXMLString(BlogEntry, xml_string) class BlogFeed(gdata.GDataFeed): """Describes a feed of a user's blogs.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogEntry]) def BlogFeedFromString(xml_string): return atom.CreateClassFromXMLString(BlogFeed, xml_string) class BlogPostEntry(BloggerEntry): """Describes a blog post entry in the feed of a blog's posts.""" post_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)') def AddLabel(self, label): """Adds a label to the blog post. The label is represented by an Atom category element, so this method is shorthand for appending a new atom.Category object. Args: label: str """ self.category.append(atom.Category(scheme=LABEL_SCHEME, term=label)) def GetPostId(self): """Extracts the postID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return self.post_id_pattern.match(self.id.text).group(4) return None def BlogPostEntryFromString(xml_string): return atom.CreateClassFromXMLString(BlogPostEntry, xml_string) class BlogPostFeed(gdata.GDataFeed): """Describes a feed of a blog's posts.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogPostEntry]) def BlogPostFeedFromString(xml_string): return atom.CreateClassFromXMLString(BlogPostFeed, xml_string) class InReplyTo(atom.AtomBase): _tag = 'in-reply-to' _namespace = THR_NAMESPACE _attributes = atom.AtomBase._attributes.copy() _attributes['href'] = 'href' _attributes['ref'] = 'ref' _attributes['source'] = 'source' _attributes['type'] = 'type' def __init__(self, href=None, ref=None, source=None, type=None, extension_elements=None, extension_attributes=None, text=None): self.href = href self.ref = ref self.source = source self.type = type self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} self.text = text def InReplyToFromString(xml_string): return atom.CreateClassFromXMLString(InReplyTo, xml_string) class CommentEntry(BloggerEntry): """Describes a blog post comment entry in the feed of a blog post's comments.""" _children = BloggerEntry._children.copy() _children['{%s}in-reply-to' % THR_NAMESPACE] = ('in_reply_to', InReplyTo) comment_id_pattern = re.compile('.*-(\w*)$') def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, control=None, title=None, updated=None, in_reply_to=None, extension_elements=None, extension_attributes=None, text=None): BloggerEntry.__init__(self, author=author, category=category, content=content, contributor=contributor, atom_id=atom_id, link=link, published=published, rights=rights, source=source, summary=summary, control=control, title=title, updated=updated, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) self.in_reply_to = in_reply_to def GetCommentId(self): """Extracts the commentID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return self.comment_id_pattern.match(self.id.text).group(1) return None def CommentEntryFromString(xml_string): return atom.CreateClassFromXMLString(CommentEntry, xml_string) class CommentFeed(gdata.GDataFeed): """Describes a feed of a blog post's comments.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CommentEntry]) def CommentFeedFromString(xml_string): return atom.CreateClassFromXMLString(CommentFeed, xml_string)
Python
#!/usr/bin/python # # Copyright (C) 2008 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Allow Google Apps domain administrators to manage groups, group members and group owners. GroupsService: Provides methods to manage groups, members and owners. """ __author__ = 'google-apps-apis@googlegroups.com' import urllib import gdata.apps import gdata.apps.service import gdata.service API_VER = '2.0' BASE_URL = '/a/feeds/group/' + API_VER + '/%s' GROUP_MEMBER_URL = BASE_URL + '?member=%s' GROUP_MEMBER_DIRECT_URL = GROUP_MEMBER_URL + '&directOnly=%s' GROUP_ID_URL = BASE_URL + '/%s' MEMBER_URL = BASE_URL + '/%s/member' MEMBER_WITH_SUSPENDED_URL = MEMBER_URL + '?includeSuspendedUsers=%s' MEMBER_ID_URL = MEMBER_URL + '/%s' OWNER_URL = BASE_URL + '/%s/owner' OWNER_WITH_SUSPENDED_URL = OWNER_URL + '?includeSuspendedUsers=%s' OWNER_ID_URL = OWNER_URL + '/%s' PERMISSION_OWNER = 'Owner' PERMISSION_MEMBER = 'Member' PERMISSION_DOMAIN = 'Domain' PERMISSION_ANYONE = 'Anyone' class GroupsService(gdata.apps.service.PropertyService): """Client for the Google Apps Groups service.""" def _ServiceUrl(self, service_type, is_existed, group_id, member_id, owner_email, direct_only=False, domain=None, suspended_users=False): if domain is None: domain = self.domain if service_type == 'group': if group_id != '' and is_existed: return GROUP_ID_URL % (domain, group_id) elif member_id != '': if direct_only: return GROUP_MEMBER_DIRECT_URL % (domain, urllib.quote_plus(member_id), self._Bool2Str(direct_only)) else: return GROUP_MEMBER_URL % (domain, urllib.quote_plus(member_id)) else: return BASE_URL % (domain) if service_type == 'member': if member_id != '' and is_existed: return MEMBER_ID_URL % (domain, group_id, urllib.quote_plus(member_id)) elif suspended_users: return MEMBER_WITH_SUSPENDED_URL % (domain, group_id, self._Bool2Str(suspended_users)) else: return MEMBER_URL % (domain, group_id) if service_type == 'owner': if owner_email != '' and is_existed: return OWNER_ID_URL % (domain, group_id, urllib.quote_plus(owner_email)) elif suspended_users: return OWNER_WITH_SUSPENDED_URL % (domain, group_id, self._Bool2Str(suspended_users)) else: return OWNER_URL % (domain, group_id) def _Bool2Str(self, b): if b is None: return None return str(b is True).lower() def _IsExisted(self, uri): try: self._GetProperties(uri) return True except gdata.apps.service.AppsForYourDomainException, e: if e.error_code == gdata.apps.service.ENTITY_DOES_NOT_EXIST: return False else: raise e def CreateGroup(self, group_id, group_name, description, email_permission): """Create a group. Args: group_id: The ID of the group (e.g. us-sales). group_name: The name of the group. description: A description of the group email_permission: The subscription permission of the group. Returns: A dict containing the result of the create operation. """ uri = self._ServiceUrl('group', False, group_id, '', '') properties = {} properties['groupId'] = group_id properties['groupName'] = group_name properties['description'] = description properties['emailPermission'] = email_permission return self._PostProperties(uri, properties) def UpdateGroup(self, group_id, group_name, description, email_permission): """Update a group's name, description and/or permission. Args: group_id: The ID of the group (e.g. us-sales). group_name: The name of the group. description: A description of the group email_permission: The subscription permission of the group. Returns: A dict containing the result of the update operation. """ uri = self._ServiceUrl('group', True, group_id, '', '') properties = {} properties['groupId'] = group_id properties['groupName'] = group_name properties['description'] = description properties['emailPermission'] = email_permission return self._PutProperties(uri, properties) def RetrieveGroup(self, group_id): """Retrieve a group based on its ID. Args: group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the retrieve operation. """ uri = self._ServiceUrl('group', True, group_id, '', '') return self._GetProperties(uri) def RetrieveAllGroups(self): """Retrieve all groups in the domain. Args: None Returns: A list containing the result of the retrieve operation. """ uri = self._ServiceUrl('group', True, '', '', '') return self._GetPropertiesList(uri) def RetrievePageOfGroups(self, start_group=None): """Retrieve one page of groups in the domain. Args: start_group: The key to continue for pagination through all groups. Returns: A feed object containing the result of the retrieve operation. """ uri = self._ServiceUrl('group', True, '', '', '') if start_group is not None: uri += "?start="+start_group property_feed = self._GetPropertyFeed(uri) return property_feed def RetrieveGroups(self, member_id, direct_only=False): """Retrieve all groups that belong to the given member_id. Args: member_id: The member's email address (e.g. member@example.com). direct_only: Boolean whether only return groups that this member directly belongs to. Returns: A list containing the result of the retrieve operation. """ uri = self._ServiceUrl('group', True, '', member_id, '', direct_only=direct_only) return self._GetPropertiesList(uri) def DeleteGroup(self, group_id): """Delete a group based on its ID. Args: group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the delete operation. """ uri = self._ServiceUrl('group', True, group_id, '', '') return self._DeleteProperties(uri) def AddMemberToGroup(self, member_id, group_id): """Add a member to a group. Args: member_id: The member's email address (e.g. member@example.com). group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the add operation. """ uri = self._ServiceUrl('member', False, group_id, member_id, '') properties = {} properties['memberId'] = member_id return self._PostProperties(uri, properties) def IsMember(self, member_id, group_id): """Check whether the given member already exists in the given group. Args: member_id: The member's email address (e.g. member@example.com). group_id: The ID of the group (e.g. us-sales). Returns: True if the member exists in the group. False otherwise. """ uri = self._ServiceUrl('member', True, group_id, member_id, '') return self._IsExisted(uri) def RetrieveMember(self, member_id, group_id): """Retrieve the given member in the given group. Args: member_id: The member's email address (e.g. member@example.com). group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the retrieve operation. """ uri = self._ServiceUrl('member', True, group_id, member_id, '') return self._GetProperties(uri) def RetrieveAllMembers(self, group_id, suspended_users=False): """Retrieve all members in the given group. Args: group_id: The ID of the group (e.g. us-sales). suspended_users: A boolean; should we include any suspended users in the membership list returned? Returns: A list containing the result of the retrieve operation. """ uri = self._ServiceUrl('member', True, group_id, '', '', suspended_users=suspended_users) return self._GetPropertiesList(uri) def RetrievePageOfMembers(self, group_id, suspended_users=False, start=None): """Retrieve one page of members of a given group. Args: group_id: The ID of the group (e.g. us-sales). suspended_users: A boolean; should we include any suspended users in the membership list returned? start: The key to continue for pagination through all members. Returns: A feed object containing the result of the retrieve operation. """ uri = self._ServiceUrl('member', True, group_id, '', '', suspended_users=suspended_users) if start is not None: if suspended_users: uri += "&start="+start else: uri += "?start="+start property_feed = self._GetPropertyFeed(uri) return property_feed def RemoveMemberFromGroup(self, member_id, group_id): """Remove the given member from the given group. Args: member_id: The member's email address (e.g. member@example.com). group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the remove operation. """ uri = self._ServiceUrl('member', True, group_id, member_id, '') return self._DeleteProperties(uri) def AddOwnerToGroup(self, owner_email, group_id): """Add an owner to a group. Args: owner_email: The email address of a group owner. group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the add operation. """ uri = self._ServiceUrl('owner', False, group_id, '', owner_email) properties = {} properties['email'] = owner_email return self._PostProperties(uri, properties) def IsOwner(self, owner_email, group_id): """Check whether the given member an owner of the given group. Args: owner_email: The email address of a group owner. group_id: The ID of the group (e.g. us-sales). Returns: True if the member is an owner of the given group. False otherwise. """ uri = self._ServiceUrl('owner', True, group_id, '', owner_email) return self._IsExisted(uri) def RetrieveOwner(self, owner_email, group_id): """Retrieve the given owner in the given group. Args: owner_email: The email address of a group owner. group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the retrieve operation. """ uri = self._ServiceUrl('owner', True, group_id, '', owner_email) return self._GetProperties(uri) def RetrieveAllOwners(self, group_id, suspended_users=False): """Retrieve all owners of the given group. Args: group_id: The ID of the group (e.g. us-sales). suspended_users: A boolean; should we include any suspended users in the ownership list returned? Returns: A list containing the result of the retrieve operation. """ uri = self._ServiceUrl('owner', True, group_id, '', '', suspended_users=suspended_users) return self._GetPropertiesList(uri) def RetrievePageOfOwners(self, group_id, suspended_users=False, start=None): """Retrieve one page of owners of the given group. Args: group_id: The ID of the group (e.g. us-sales). suspended_users: A boolean; should we include any suspended users in the ownership list returned? start: The key to continue for pagination through all owners. Returns: A feed object containing the result of the retrieve operation. """ uri = self._ServiceUrl('owner', True, group_id, '', '', suspended_users=suspended_users) if start is not None: if suspended_users: uri += "&start="+start else: uri += "?start="+start property_feed = self._GetPropertyFeed(uri) return property_feed def RemoveOwnerFromGroup(self, owner_email, group_id): """Remove the given owner from the given group. Args: owner_email: The email address of a group owner. group_id: The ID of the group (e.g. us-sales). Returns: A dict containing the result of the remove operation. """ uri = self._ServiceUrl('owner', True, group_id, '', owner_email) return self._DeleteProperties(uri)
Python
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains the methods to import mail via Google Apps Email Migration API. MigrationService: Provides methods to import mail. """ __author__ = ('google-apps-apis@googlegroups.com', 'pti@google.com (Prashant Tiwari)') import base64 import threading import time from atom.service import deprecation from gdata.apps import migration from gdata.apps.migration import MailEntryProperties import gdata.apps.service import gdata.service API_VER = '2.0' class MigrationService(gdata.apps.service.AppsService): """Client for the EMAPI migration service. Use either ImportMail to import one message at a time, or AddMailEntry and ImportMultipleMails to import a bunch of messages at a time. """ def __init__(self, email=None, password=None, domain=None, source=None, server='apps-apis.google.com', additional_headers=None): gdata.apps.service.AppsService.__init__( self, email=email, password=password, domain=domain, source=source, server=server, additional_headers=additional_headers) self.mail_batch = migration.BatchMailEventFeed() self.mail_entries = [] self.exceptions = 0 def _BaseURL(self): return '/a/feeds/migration/%s/%s' % (API_VER, self.domain) def ImportMail(self, user_name, mail_message, mail_item_properties, mail_labels): """Imports a single mail message. Args: user_name: The username to import messages to. mail_message: An RFC822 format email message. mail_item_properties: A list of Gmail properties to apply to the message. mail_labels: A list of labels to apply to the message. Returns: A MailEntry representing the successfully imported message. Raises: AppsForYourDomainException: An error occurred importing the message. """ uri = '%s/%s/mail' % (self._BaseURL(), user_name) mail_entry = migration.MailEntry() mail_entry.rfc822_msg = migration.Rfc822Msg(text=(base64.b64encode( mail_message))) mail_entry.rfc822_msg.encoding = 'base64' mail_entry.mail_item_property = map( lambda x: migration.MailItemProperty(value=x), mail_item_properties) mail_entry.label = map(lambda x: migration.Label(label_name=x), mail_labels) try: return migration.MailEntryFromString(str(self.Post(mail_entry, uri))) except gdata.service.RequestError, e: # Store the number of failed imports when importing several at a time self.exceptions += 1 raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def AddBatchEntry(self, mail_message, mail_item_properties, mail_labels): """Adds a message to the current batch that you later will submit. Deprecated, use AddMailEntry instead Args: mail_message: An RFC822 format email message. mail_item_properties: A list of Gmail properties to apply to the message. mail_labels: A list of labels to apply to the message. Returns: The length of the MailEntry representing the message. """ deprecation("calling deprecated method AddBatchEntry") mail_entry = migration.BatchMailEntry() mail_entry.rfc822_msg = migration.Rfc822Msg(text=(base64.b64encode( mail_message))) mail_entry.rfc822_msg.encoding = 'base64' mail_entry.mail_item_property = map( lambda x: migration.MailItemProperty(value=x), mail_item_properties) mail_entry.label = map(lambda x: migration.Label(label_name=x), mail_labels) self.mail_batch.AddBatchEntry(mail_entry) return len(str(mail_entry)) def SubmitBatch(self, user_name): """Sends all the mail items you have added to the batch to the server. Deprecated, use ImportMultipleMails instead Args: user_name: The username to import messages to. Returns: An HTTPResponse from the web service call. Raises: AppsForYourDomainException: An error occurred importing the batch. """ deprecation("calling deprecated method SubmitBatch") uri = '%s/%s/mail/batch' % (self._BaseURL(), user_name) try: self.result = self.Post(self.mail_batch, uri, converter=migration.BatchMailEventFeedFromString) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) self.mail_batch = migration.BatchMailEventFeed() return self.result def AddMailEntry(self, mail_message, mail_item_properties=None, mail_labels=None, identifier=None): """Prepares a list of mail messages to import using ImportMultipleMails. Args: mail_message: An RFC822 format email message as a string. mail_item_properties: List of Gmail properties to apply to the message. mail_labels: List of Gmail labels to apply to the message. identifier: The optional file identifier string Returns: The number of email messages to be imported. """ mail_entry_properties = MailEntryProperties( mail_message=mail_message, mail_item_properties=mail_item_properties, mail_labels=mail_labels, identifier=identifier) self.mail_entries.append(mail_entry_properties) return len(self.mail_entries) def ImportMultipleMails(self, user_name, threads_per_batch=20): """Launches separate threads to import every message added by AddMailEntry. Args: user_name: The user account name to import messages to. threads_per_batch: Number of messages to import at a time. Returns: The number of email messages that were successfully migrated. Raises: Exception: An error occurred while importing mails. """ num_entries = len(self.mail_entries) if not num_entries: return 0 threads = [] for mail_entry_properties in self.mail_entries: t = threading.Thread(name=mail_entry_properties.identifier, target=self.ImportMail, args=(user_name, mail_entry_properties.mail_message, mail_entry_properties.mail_item_properties, mail_entry_properties.mail_labels)) threads.append(t) try: # Determine the number of batches needed with threads_per_batch in each batches = num_entries / threads_per_batch + ( 0 if num_entries % threads_per_batch == 0 else 1) batch_min = 0 # Start the threads, one batch at a time for batch in range(batches): batch_max = ((batch + 1) * threads_per_batch if (batch + 1) * threads_per_batch < num_entries else num_entries) for i in range(batch_min, batch_max): threads[i].start() time.sleep(1) for i in range(batch_min, batch_max): threads[i].join() batch_min = batch_max self.mail_entries = [] except Exception, e: raise Exception(e.args[0]) else: return num_entries - self.exceptions
Python
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains objects used with Google Apps.""" __author__ = 'google-apps-apis@googlegroups.com' import atom import gdata # XML namespaces which are often used in Google Apps entity. APPS_NAMESPACE = 'http://schemas.google.com/apps/2006' APPS_TEMPLATE = '{http://schemas.google.com/apps/2006}%s' class Rfc822Msg(atom.AtomBase): """The Migration rfc822Msg element.""" _tag = 'rfc822Msg' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['encoding'] = 'encoding' def __init__(self, extension_elements=None, extension_attributes=None, text=None): self.text = text self.encoding = 'base64' self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def Rfc822MsgFromString(xml_string): """Parse in the Rrc822 message from the XML definition.""" return atom.CreateClassFromXMLString(Rfc822Msg, xml_string) class MailItemProperty(atom.AtomBase): """The Migration mailItemProperty element.""" _tag = 'mailItemProperty' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['value'] = 'value' def __init__(self, value=None, extension_elements=None, extension_attributes=None, text=None): self.value = value self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def MailItemPropertyFromString(xml_string): """Parse in the MailItemProperiy from the XML definition.""" return atom.CreateClassFromXMLString(MailItemProperty, xml_string) class Label(atom.AtomBase): """The Migration label element.""" _tag = 'label' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['labelName'] = 'label_name' def __init__(self, label_name=None, extension_elements=None, extension_attributes=None, text=None): self.label_name = label_name self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def LabelFromString(xml_string): """Parse in the mailItemProperty from the XML definition.""" return atom.CreateClassFromXMLString(Label, xml_string) class MailEntry(gdata.GDataEntry): """A Google Migration flavor of an Atom Entry.""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}rfc822Msg' % APPS_NAMESPACE] = ('rfc822_msg', Rfc822Msg) _children['{%s}mailItemProperty' % APPS_NAMESPACE] = ('mail_item_property', [MailItemProperty]) _children['{%s}label' % APPS_NAMESPACE] = ('label', [Label]) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, rfc822_msg=None, mail_item_property=None, label=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.rfc822_msg = rfc822_msg self.mail_item_property = mail_item_property self.label = label self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def MailEntryFromString(xml_string): """Parse in the MailEntry from the XML definition.""" return atom.CreateClassFromXMLString(MailEntry, xml_string) class BatchMailEntry(gdata.BatchEntry): """A Google Migration flavor of an Atom Entry.""" _tag = gdata.BatchEntry._tag _namespace = gdata.BatchEntry._namespace _children = gdata.BatchEntry._children.copy() _attributes = gdata.BatchEntry._attributes.copy() _children['{%s}rfc822Msg' % APPS_NAMESPACE] = ('rfc822_msg', Rfc822Msg) _children['{%s}mailItemProperty' % APPS_NAMESPACE] = ('mail_item_property', [MailItemProperty]) _children['{%s}label' % APPS_NAMESPACE] = ('label', [Label]) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, rfc822_msg=None, mail_item_property=None, label=None, batch_operation=None, batch_id=None, batch_status=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.BatchEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, batch_operation=batch_operation, batch_id=batch_id, batch_status=batch_status, title=title, updated=updated) self.rfc822_msg = rfc822_msg or None self.mail_item_property = mail_item_property or [] self.label = label or [] self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def BatchMailEntryFromString(xml_string): """Parse in the BatchMailEntry from the XML definition.""" return atom.CreateClassFromXMLString(BatchMailEntry, xml_string) class BatchMailEventFeed(gdata.BatchFeed): """A Migration event feed flavor of an Atom Feed.""" _tag = gdata.BatchFeed._tag _namespace = gdata.BatchFeed._namespace _children = gdata.BatchFeed._children.copy() _attributes = gdata.BatchFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BatchMailEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, interrupted=None, extension_elements=None, extension_attributes=None, text=None): gdata.BatchFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, interrupted=interrupted, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) class MailEntryProperties(object): """Represents a mail message and its attributes.""" def __init__(self, mail_message=None, mail_item_properties=None, mail_labels=None, identifier=None): self.mail_message = mail_message self.mail_item_properties = mail_item_properties or [] self.mail_labels = mail_labels or [] self.identifier = identifier def BatchMailEventFeedFromString(xml_string): """Parse in the BatchMailEventFeed from the XML definition.""" return atom.CreateClassFromXMLString(BatchMailEventFeed, xml_string)
Python
#!/usr/bin/python # # Copyright (C) 2007 SIOS Technology, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'tmatsuo@sios.com (Takashi MATSUO)' try: from xml.etree import cElementTree as ElementTree except ImportError: try: import cElementTree as ElementTree except ImportError: try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import urllib import gdata import atom.service import gdata.service import gdata.apps import atom API_VER="2.0" HTTP_OK=200 UNKOWN_ERROR=1000 USER_DELETED_RECENTLY=1100 USER_SUSPENDED=1101 DOMAIN_USER_LIMIT_EXCEEDED=1200 DOMAIN_ALIAS_LIMIT_EXCEEDED=1201 DOMAIN_SUSPENDED=1202 DOMAIN_FEATURE_UNAVAILABLE=1203 ENTITY_EXISTS=1300 ENTITY_DOES_NOT_EXIST=1301 ENTITY_NAME_IS_RESERVED=1302 ENTITY_NAME_NOT_VALID=1303 INVALID_GIVEN_NAME=1400 INVALID_FAMILY_NAME=1401 INVALID_PASSWORD=1402 INVALID_USERNAME=1403 INVALID_HASH_FUNCTION_NAME=1404 INVALID_HASH_DIGGEST_LENGTH=1405 INVALID_EMAIL_ADDRESS=1406 INVALID_QUERY_PARAMETER_VALUE=1407 TOO_MANY_RECIPIENTS_ON_EMAIL_LIST=1500 DEFAULT_QUOTA_LIMIT='2048' class Error(Exception): pass class AppsForYourDomainException(Error): def __init__(self, response): Error.__init__(self, response) try: self.element_tree = ElementTree.fromstring(response['body']) self.error_code = int(self.element_tree[0].attrib['errorCode']) self.reason = self.element_tree[0].attrib['reason'] self.invalidInput = self.element_tree[0].attrib['invalidInput'] except: self.error_code = UNKOWN_ERROR class AppsService(gdata.service.GDataService): """Client for the Google Apps Provisioning service.""" def __init__(self, email=None, password=None, domain=None, source=None, server='apps-apis.google.com', additional_headers=None, **kwargs): """Creates a client for the Google Apps Provisioning service. Args: email: string (optional) The user's email address, used for authentication. password: string (optional) The user's password. domain: string (optional) The Google Apps domain name. source: string (optional) The name of the user's application. server: string (optional) The name of the server to which a connection will be opened. Default value: 'apps-apis.google.com'. **kwargs: The other parameters to pass to gdata.service.GDataService constructor. """ gdata.service.GDataService.__init__( self, email=email, password=password, service='apps', source=source, server=server, additional_headers=additional_headers, **kwargs) self.ssl = True self.port = 443 self.domain = domain def _baseURL(self): return "/a/feeds/%s" % self.domain def AddAllElementsFromAllPages(self, link_finder, func): """retrieve all pages and add all elements""" next = link_finder.GetNextLink() while next is not None: next_feed = self.Get(next.href, converter=func) for a_entry in next_feed.entry: link_finder.entry.append(a_entry) next = next_feed.GetNextLink() return link_finder def RetrievePageOfEmailLists(self, start_email_list_name=None, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve one page of email list""" uri = "%s/emailList/%s" % (self._baseURL(), API_VER) if start_email_list_name is not None: uri += "?startEmailListName=%s" % start_email_list_name try: return gdata.apps.EmailListFeedFromString(str(self.GetWithRetries( uri, num_retries=num_retries, delay=delay, backoff=backoff))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def GetGeneratorForAllEmailLists( self, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve a generator for all emaillists in this domain.""" first_page = self.RetrievePageOfEmailLists(num_retries=num_retries, delay=delay, backoff=backoff) return self.GetGeneratorFromLinkFinder( first_page, gdata.apps.EmailListRecipientFeedFromString, num_retries=num_retries, delay=delay, backoff=backoff) def RetrieveAllEmailLists(self): """Retrieve all email list of a domain.""" ret = self.RetrievePageOfEmailLists() # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.EmailListFeedFromString) def RetrieveEmailList(self, list_name): """Retreive a single email list by the list's name.""" uri = "%s/emailList/%s/%s" % ( self._baseURL(), API_VER, list_name) try: return self.Get(uri, converter=gdata.apps.EmailListEntryFromString) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def RetrieveEmailLists(self, recipient): """Retrieve All Email List Subscriptions for an Email Address.""" uri = "%s/emailList/%s?recipient=%s" % ( self._baseURL(), API_VER, recipient) try: ret = gdata.apps.EmailListFeedFromString(str(self.Get(uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.EmailListFeedFromString) def RemoveRecipientFromEmailList(self, recipient, list_name): """Remove recipient from email list.""" uri = "%s/emailList/%s/%s/recipient/%s" % ( self._baseURL(), API_VER, list_name, recipient) try: self.Delete(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def RetrievePageOfRecipients(self, list_name, start_recipient=None, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve one page of recipient of an email list. """ uri = "%s/emailList/%s/%s/recipient" % ( self._baseURL(), API_VER, list_name) if start_recipient is not None: uri += "?startRecipient=%s" % start_recipient try: return gdata.apps.EmailListRecipientFeedFromString(str( self.GetWithRetries( uri, num_retries=num_retries, delay=delay, backoff=backoff))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def GetGeneratorForAllRecipients( self, list_name, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve a generator for all recipients of a particular emaillist.""" first_page = self.RetrievePageOfRecipients(list_name, num_retries=num_retries, delay=delay, backoff=backoff) return self.GetGeneratorFromLinkFinder( first_page, gdata.apps.EmailListRecipientFeedFromString, num_retries=num_retries, delay=delay, backoff=backoff) def RetrieveAllRecipients(self, list_name): """Retrieve all recipient of an email list.""" ret = self.RetrievePageOfRecipients(list_name) # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.EmailListRecipientFeedFromString) def AddRecipientToEmailList(self, recipient, list_name): """Add a recipient to a email list.""" uri = "%s/emailList/%s/%s/recipient" % ( self._baseURL(), API_VER, list_name) recipient_entry = gdata.apps.EmailListRecipientEntry() recipient_entry.who = gdata.apps.Who(email=recipient) try: return gdata.apps.EmailListRecipientEntryFromString( str(self.Post(recipient_entry, uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def DeleteEmailList(self, list_name): """Delete a email list""" uri = "%s/emailList/%s/%s" % (self._baseURL(), API_VER, list_name) try: self.Delete(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def CreateEmailList(self, list_name): """Create a email list. """ uri = "%s/emailList/%s" % (self._baseURL(), API_VER) email_list_entry = gdata.apps.EmailListEntry() email_list_entry.email_list = gdata.apps.EmailList(name=list_name) try: return gdata.apps.EmailListEntryFromString( str(self.Post(email_list_entry, uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def DeleteNickname(self, nickname): """Delete a nickname""" uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname) try: self.Delete(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def RetrievePageOfNicknames(self, start_nickname=None, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve one page of nicknames in the domain""" uri = "%s/nickname/%s" % (self._baseURL(), API_VER) if start_nickname is not None: uri += "?startNickname=%s" % start_nickname try: return gdata.apps.NicknameFeedFromString(str(self.GetWithRetries( uri, num_retries=num_retries, delay=delay, backoff=backoff))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def GetGeneratorForAllNicknames( self, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve a generator for all nicknames in this domain.""" first_page = self.RetrievePageOfNicknames(num_retries=num_retries, delay=delay, backoff=backoff) return self.GetGeneratorFromLinkFinder( first_page, gdata.apps.NicknameFeedFromString, num_retries=num_retries, delay=delay, backoff=backoff) def RetrieveAllNicknames(self): """Retrieve all nicknames in the domain""" ret = self.RetrievePageOfNicknames() # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.NicknameFeedFromString) def GetGeneratorForAllNicknamesOfAUser( self, user_name, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve a generator for all nicknames of a particular user.""" uri = "%s/nickname/%s?username=%s" % (self._baseURL(), API_VER, user_name) try: first_page = gdata.apps.NicknameFeedFromString(str(self.GetWithRetries( uri, num_retries=num_retries, delay=delay, backoff=backoff))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) return self.GetGeneratorFromLinkFinder( first_page, gdata.apps.NicknameFeedFromString, num_retries=num_retries, delay=delay, backoff=backoff) def RetrieveNicknames(self, user_name): """Retrieve nicknames of the user""" uri = "%s/nickname/%s?username=%s" % (self._baseURL(), API_VER, user_name) try: ret = gdata.apps.NicknameFeedFromString(str(self.Get(uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.NicknameFeedFromString) def RetrieveNickname(self, nickname): """Retrieve a nickname. Args: nickname: string The nickname to retrieve Returns: gdata.apps.NicknameEntry """ uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname) try: return gdata.apps.NicknameEntryFromString(str(self.Get(uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def CreateNickname(self, user_name, nickname): """Create a nickname""" uri = "%s/nickname/%s" % (self._baseURL(), API_VER) nickname_entry = gdata.apps.NicknameEntry() nickname_entry.login = gdata.apps.Login(user_name=user_name) nickname_entry.nickname = gdata.apps.Nickname(name=nickname) try: return gdata.apps.NicknameEntryFromString( str(self.Post(nickname_entry, uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def DeleteUser(self, user_name): """Delete a user account""" uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name) try: return self.Delete(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def UpdateUser(self, user_name, user_entry): """Update a user account.""" uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name) try: return gdata.apps.UserEntryFromString(str(self.Put(user_entry, uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def CreateUser(self, user_name, family_name, given_name, password, suspended='false', quota_limit=None, password_hash_function=None, change_password=None): """Create a user account. """ uri = "%s/user/%s" % (self._baseURL(), API_VER) user_entry = gdata.apps.UserEntry() user_entry.login = gdata.apps.Login( user_name=user_name, password=password, suspended=suspended, hash_function_name=password_hash_function, change_password=change_password) user_entry.name = gdata.apps.Name(family_name=family_name, given_name=given_name) if quota_limit is not None: user_entry.quota = gdata.apps.Quota(limit=str(quota_limit)) try: return gdata.apps.UserEntryFromString(str(self.Post(user_entry, uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def SuspendUser(self, user_name): user_entry = self.RetrieveUser(user_name) if user_entry.login.suspended != 'true': user_entry.login.suspended = 'true' user_entry = self.UpdateUser(user_name, user_entry) return user_entry def RestoreUser(self, user_name): user_entry = self.RetrieveUser(user_name) if user_entry.login.suspended != 'false': user_entry.login.suspended = 'false' user_entry = self.UpdateUser(user_name, user_entry) return user_entry def RetrieveUser(self, user_name): """Retrieve an user account. Args: user_name: string The user name to retrieve Returns: gdata.apps.UserEntry """ uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name) try: return gdata.apps.UserEntryFromString(str(self.Get(uri))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def RetrievePageOfUsers(self, start_username=None, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve one page of users in this domain.""" uri = "%s/user/%s" % (self._baseURL(), API_VER) if start_username is not None: uri += "?startUsername=%s" % start_username try: return gdata.apps.UserFeedFromString(str(self.GetWithRetries( uri, num_retries=num_retries, delay=delay, backoff=backoff))) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def GetGeneratorForAllUsers(self, num_retries=gdata.service.DEFAULT_NUM_RETRIES, delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF): """Retrieve a generator for all users in this domain.""" first_page = self.RetrievePageOfUsers(num_retries=num_retries, delay=delay, backoff=backoff) return self.GetGeneratorFromLinkFinder( first_page, gdata.apps.UserFeedFromString, num_retries=num_retries, delay=delay, backoff=backoff) def RetrieveAllUsers(self): """Retrieve all users in this domain. OBSOLETE""" ret = self.RetrievePageOfUsers() # pagination return self.AddAllElementsFromAllPages( ret, gdata.apps.UserFeedFromString) class PropertyService(gdata.service.GDataService): """Client for the Google Apps Property service.""" def __init__(self, email=None, password=None, domain=None, source=None, server='apps-apis.google.com', additional_headers=None): gdata.service.GDataService.__init__(self, email=email, password=password, service='apps', source=source, server=server, additional_headers=additional_headers) self.ssl = True self.port = 443 self.domain = domain def AddAllElementsFromAllPages(self, link_finder, func): """retrieve all pages and add all elements""" next = link_finder.GetNextLink() while next is not None: next_feed = self.Get(next.href, converter=func) for a_entry in next_feed.entry: link_finder.entry.append(a_entry) next = next_feed.GetNextLink() return link_finder def _GetPropertyEntry(self, properties): property_entry = gdata.apps.PropertyEntry() property = [] for name, value in properties.iteritems(): if name is not None and value is not None: property.append(gdata.apps.Property(name=name, value=value)) property_entry.property = property return property_entry def _PropertyEntry2Dict(self, property_entry): properties = {} for i, property in enumerate(property_entry.property): properties[property.name] = property.value return properties def _GetPropertyFeed(self, uri): try: return gdata.apps.PropertyFeedFromString(str(self.Get(uri))) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def _GetPropertiesList(self, uri): property_feed = self._GetPropertyFeed(uri) # pagination property_feed = self.AddAllElementsFromAllPages( property_feed, gdata.apps.PropertyFeedFromString) properties_list = [] for property_entry in property_feed.entry: properties_list.append(self._PropertyEntry2Dict(property_entry)) return properties_list def _GetProperties(self, uri): try: return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString( str(self.Get(uri)))) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def _PostProperties(self, uri, properties): property_entry = self._GetPropertyEntry(properties) try: return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString( str(self.Post(property_entry, uri)))) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def _PutProperties(self, uri, properties): property_entry = self._GetPropertyEntry(properties) try: return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString( str(self.Put(property_entry, uri)))) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def _DeleteProperties(self, uri): try: self.Delete(uri) except gdata.service.RequestError, e: raise gdata.apps.service.AppsForYourDomainException(e.args[0]) def _bool2str(b): if b is None: return None return str(b is True).lower()
Python
#!/usr/bin/python # # Copyright (C) 2008 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Allow Google Apps domain administrators to set users' email settings. EmailSettingsService: Set various email settings. """ __author__ = 'google-apps-apis@googlegroups.com' import gdata.apps import gdata.apps.service import gdata.service API_VER='2.0' # Forwarding and POP3 options KEEP='KEEP' ARCHIVE='ARCHIVE' DELETE='DELETE' ALL_MAIL='ALL_MAIL' MAIL_FROM_NOW_ON='MAIL_FROM_NOW_ON' class EmailSettingsService(gdata.apps.service.PropertyService): """Client for the Google Apps Email Settings service.""" def _serviceUrl(self, setting_id, username, domain=None): if domain is None: domain = self.domain return '/a/feeds/emailsettings/%s/%s/%s/%s' % (API_VER, domain, username, setting_id) def CreateLabel(self, username, label): """Create a label. Args: username: User to create label for. label: Label to create. Returns: A dict containing the result of the create operation. """ uri = self._serviceUrl('label', username) properties = {'label': label} return self._PostProperties(uri, properties) def CreateFilter(self, username, from_=None, to=None, subject=None, has_the_word=None, does_not_have_the_word=None, has_attachment=None, label=None, should_mark_as_read=None, should_archive=None): """Create a filter. Args: username: User to create filter for. from_: Filter from string. to: Filter to string. subject: Filter subject. has_the_word: Words to filter in. does_not_have_the_word: Words to filter out. has_attachment: Boolean for message having attachment. label: Label to apply. should_mark_as_read: Boolean for marking message as read. should_archive: Boolean for archiving message. Returns: A dict containing the result of the create operation. """ uri = self._serviceUrl('filter', username) properties = {} properties['from'] = from_ properties['to'] = to properties['subject'] = subject properties['hasTheWord'] = has_the_word properties['doesNotHaveTheWord'] = does_not_have_the_word properties['hasAttachment'] = gdata.apps.service._bool2str(has_attachment) properties['label'] = label properties['shouldMarkAsRead'] = gdata.apps.service._bool2str(should_mark_as_read) properties['shouldArchive'] = gdata.apps.service._bool2str(should_archive) return self._PostProperties(uri, properties) def CreateSendAsAlias(self, username, name, address, reply_to=None, make_default=None): """Create alias to send mail as. Args: username: User to create alias for. name: Name of alias. address: Email address to send from. reply_to: Email address to reply to. make_default: Boolean for whether this is the new default sending alias. Returns: A dict containing the result of the create operation. """ uri = self._serviceUrl('sendas', username) properties = {} properties['name'] = name properties['address'] = address properties['replyTo'] = reply_to properties['makeDefault'] = gdata.apps.service._bool2str(make_default) return self._PostProperties(uri, properties) def UpdateWebClipSettings(self, username, enable): """Update WebClip Settings Args: username: User to update forwarding for. enable: Boolean whether to enable Web Clip. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('webclip', username) properties = {} properties['enable'] = gdata.apps.service._bool2str(enable) return self._PutProperties(uri, properties) def UpdateForwarding(self, username, enable, forward_to=None, action=None): """Update forwarding settings. Args: username: User to update forwarding for. enable: Boolean whether to enable this forwarding rule. forward_to: Email address to forward to. action: Action to take after forwarding. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('forwarding', username) properties = {} properties['enable'] = gdata.apps.service._bool2str(enable) if enable is True: properties['forwardTo'] = forward_to properties['action'] = action return self._PutProperties(uri, properties) def UpdatePop(self, username, enable, enable_for=None, action=None): """Update POP3 settings. Args: username: User to update POP3 settings for. enable: Boolean whether to enable POP3. enable_for: Which messages to make available via POP3. action: Action to take after user retrieves email via POP3. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('pop', username) properties = {} properties['enable'] = gdata.apps.service._bool2str(enable) if enable is True: properties['enableFor'] = enable_for properties['action'] = action return self._PutProperties(uri, properties) def UpdateImap(self, username, enable): """Update IMAP settings. Args: username: User to update IMAP settings for. enable: Boolean whether to enable IMAP. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('imap', username) properties = {'enable': gdata.apps.service._bool2str(enable)} return self._PutProperties(uri, properties) def UpdateVacation(self, username, enable, subject=None, message=None, contacts_only=None): """Update vacation settings. Args: username: User to update vacation settings for. enable: Boolean whether to enable vacation responses. subject: Vacation message subject. message: Vacation message body. contacts_only: Boolean whether to send message only to contacts. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('vacation', username) properties = {} properties['enable'] = gdata.apps.service._bool2str(enable) if enable is True: properties['subject'] = subject properties['message'] = message properties['contactsOnly'] = gdata.apps.service._bool2str(contacts_only) return self._PutProperties(uri, properties) def UpdateSignature(self, username, signature): """Update signature. Args: username: User to update signature for. signature: Signature string. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('signature', username) properties = {'signature': signature} return self._PutProperties(uri, properties) def UpdateLanguage(self, username, language): """Update user interface language. Args: username: User to update language for. language: Language code. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('language', username) properties = {'language': language} return self._PutProperties(uri, properties) def UpdateGeneral(self, username, page_size=None, shortcuts=None, arrows=None, snippets=None, unicode=None): """Update general settings. Args: username: User to update general settings for. page_size: Number of messages to show. shortcuts: Boolean whether shortcuts are enabled. arrows: Boolean whether arrows are enabled. snippets: Boolean whether snippets are enabled. unicode: Wheter unicode is enabled. Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('general', username) properties = {} if page_size != None: properties['pageSize'] = str(page_size) if shortcuts != None: properties['shortcuts'] = gdata.apps.service._bool2str(shortcuts) if arrows != None: properties['arrows'] = gdata.apps.service._bool2str(arrows) if snippets != None: properties['snippets'] = gdata.apps.service._bool2str(snippets) if unicode != None: properties['unicode'] = gdata.apps.service._bool2str(unicode) return self._PutProperties(uri, properties)
Python
#!/usr/bin/python2.4 # # Copyright 2010 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """EmailSettingsClient simplifies Email Settings API calls. EmailSettingsClient extends gdata.client.GDClient to ease interaction with the Google Apps Email Settings API. These interactions include the ability to create labels, filters, aliases, and update web-clip, forwarding, POP, IMAP, vacation-responder, signature, language, and general settings, and retrieve labels, send-as, forwarding, pop, imap, vacation and signature settings. """ __author__ = 'Claudio Cherubino <ccherubino@google.com>' import gdata.apps.emailsettings.data import gdata.client # Email Settings URI template # The strings in this template are eventually replaced with the API version, # Google Apps domain name, username, and settingID, respectively. EMAIL_SETTINGS_URI_TEMPLATE = '/a/feeds/emailsettings/%s/%s/%s/%s' # The settingID value for the label requests SETTING_ID_LABEL = 'label' # The settingID value for the filter requests SETTING_ID_FILTER = 'filter' # The settingID value for the send-as requests SETTING_ID_SENDAS = 'sendas' # The settingID value for the webclip requests SETTING_ID_WEBCLIP = 'webclip' # The settingID value for the forwarding requests SETTING_ID_FORWARDING = 'forwarding' # The settingID value for the POP requests SETTING_ID_POP = 'pop' # The settingID value for the IMAP requests SETTING_ID_IMAP = 'imap' # The settingID value for the vacation responder requests SETTING_ID_VACATION_RESPONDER = 'vacation' # The settingID value for the signature requests SETTING_ID_SIGNATURE = 'signature' # The settingID value for the language requests SETTING_ID_LANGUAGE = 'language' # The settingID value for the general requests SETTING_ID_GENERAL = 'general' # The settingID value for the delegation requests SETTING_ID_DELEGATION = 'delegation' # The KEEP action for the email settings ACTION_KEEP = 'KEEP' # The ARCHIVE action for the email settings ACTION_ARCHIVE = 'ARCHIVE' # The DELETE action for the email settings ACTION_DELETE = 'DELETE' # The ALL_MAIL setting for POP enable_for property POP_ENABLE_FOR_ALL_MAIL = 'ALL_MAIL' # The MAIL_FROM_NOW_ON setting for POP enable_for property POP_ENABLE_FOR_MAIL_FROM_NOW_ON = 'MAIL_FROM_NOW_ON' class EmailSettingsClient(gdata.client.GDClient): """Client extension for the Google Email Settings API service. Attributes: host: string The hostname for the Email Settings API service. api_version: string The version of the Email Settings API. """ host = 'apps-apis.google.com' api_version = '2.0' auth_service = 'apps' auth_scopes = gdata.gauth.AUTH_SCOPES['apps'] ssl = True def __init__(self, domain, auth_token=None, **kwargs): """Constructs a new client for the Email Settings API. Args: domain: string The Google Apps domain with Email Settings. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the email settings. kwargs: The other parameters to pass to the gdata.client.GDClient constructor. """ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs) self.domain = domain def make_email_settings_uri(self, username, setting_id): """Creates the URI for the Email Settings API call. Using this client's Google Apps domain, create the URI to setup email settings for the given user in that domain. If params are provided, append them as GET params. Args: username: string The name of the user affected by this setting. setting_id: string The key of the setting to be configured. Returns: A string giving the URI for Email Settings API calls for this client's Google Apps domain. """ uri = EMAIL_SETTINGS_URI_TEMPLATE % (self.api_version, self.domain, username, setting_id) return uri MakeEmailSettingsUri = make_email_settings_uri def create_label(self, username, name, **kwargs): """Creates a label with the given properties. Args: username: string The name of the user. name: string The name of the label. kwargs: The other parameters to pass to gdata.client.GDClient.post(). Returns: gdata.apps.emailsettings.data.EmailSettingsLabel of the new resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_LABEL) new_label = gdata.apps.emailsettings.data.EmailSettingsLabel( uri=uri, name=name) return self.post(new_label, uri, **kwargs) CreateLabel = create_label def retrieve_labels(self, username, **kwargs): """Retrieves email labels for the specified username Args: username: string The name of the user to get the labels for Returns: A gdata.data.GDFeed of the user's email labels """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_LABEL) return self.GetFeed(uri, auth_token=None, query=None, **kwargs) RetrieveLabels = retrieve_labels def create_filter(self, username, from_address=None, to_address=None, subject=None, has_the_word=None, does_not_have_the_word=None, has_attachments=None, label=None, mark_as_read=None, archive=None, **kwargs): """Creates a filter with the given properties. Args: username: string The name of the user. from_address: string The source email address for the filter. to_address: string (optional) The destination email address for the filter. subject: string (optional) The value the email must have in its subject to be filtered. has_the_word: string (optional) The value the email must have in its subject or body to be filtered. does_not_have_the_word: string (optional) The value the email cannot have in its subject or body to be filtered. has_attachments: string (optional) A boolean string representing whether the email must have an attachment to be filtered. label: string (optional) The name of the label to apply to messages matching the filter criteria. mark_as_read: Boolean (optional) Whether or not to mark messages matching the filter criteria as read. archive: Boolean (optional) Whether or not to move messages matching to Archived state. kwargs: The other parameters to pass to gdata.client.GDClient.post(). Returns: gdata.apps.emailsettings.data.EmailSettingsFilter of the new resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_FILTER) new_filter = gdata.apps.emailsettings.data.EmailSettingsFilter( uri=uri, from_address=from_address, to_address=to_address, subject=subject, has_the_word=has_the_word, does_not_have_the_word=does_not_have_the_word, has_attachments=has_attachments, label=label, mark_as_read=mark_as_read, archive=archive) return self.post(new_filter, uri, **kwargs) CreateFilter = create_filter def create_send_as(self, username, name, address, reply_to=None, make_default=None, **kwargs): """Creates a send-as alias with the given properties. Args: username: string The name of the user. name: string The name that will appear in the "From" field. address: string The email address that appears as the origination address for emails sent by this user. reply_to: string (optional) The address to be used as the reply-to address in email sent using the alias. make_default: Boolean (optional) Whether or not this alias should become the default alias for this user. kwargs: The other parameters to pass to gdata.client.GDClient.post(). Returns: gdata.apps.emailsettings.data.EmailSettingsSendAsAlias of the new resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_SENDAS) new_alias = gdata.apps.emailsettings.data.EmailSettingsSendAsAlias( uri=uri, name=name, address=address, reply_to=reply_to, make_default=make_default) return self.post(new_alias, uri, **kwargs) CreateSendAs = create_send_as def retrieve_send_as(self, username, **kwargs): """Retrieves send-as aliases for the specified username Args: username: string The name of the user to get the send-as for Returns: A gdata.data.GDFeed of the user's send-as alias settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_SENDAS) return self.GetFeed(uri, auth_token=None, query=None, **kwargs) RetrieveSendAs = retrieve_send_as def update_webclip(self, username, enable, **kwargs): """Enable/Disable Google Mail web clip. Args: username: string The name of the user. enable: Boolean Whether to enable showing Web clips. kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsWebClip of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_WEBCLIP) new_webclip = gdata.apps.emailsettings.data.EmailSettingsWebClip( uri=uri, enable=enable) return self.update(new_webclip, **kwargs) UpdateWebclip = update_webclip def update_forwarding(self, username, enable, forward_to=None, action=None, **kwargs): """Update Google Mail Forwarding settings. Args: username: string The name of the user. enable: Boolean Whether to enable incoming email forwarding. forward_to: (optional) string The address email will be forwarded to. action: string (optional) The action to perform after forwarding an email (ACTION_KEEP, ACTION_ARCHIVE, ACTION_DELETE). kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsForwarding of the updated resource """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_FORWARDING) new_forwarding = gdata.apps.emailsettings.data.EmailSettingsForwarding( uri=uri, enable=enable, forward_to=forward_to, action=action) return self.update(new_forwarding, **kwargs) UpdateForwarding = update_forwarding def retrieve_forwarding(self, username, **kwargs): """Retrieves forwarding settings for the specified username Args: username: string The name of the user to get the forwarding settings for Returns: A gdata.data.GDEntry of the user's email forwarding settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_FORWARDING) return self.GetEntry(uri, auth_token=None, query=None, **kwargs) RetrieveForwarding = retrieve_forwarding def update_pop(self, username, enable, enable_for=None, action=None, **kwargs): """Update Google Mail POP settings. Args: username: string The name of the user. enable: Boolean Whether to enable incoming POP3 access. enable_for: string (optional) Whether to enable POP3 for all mail (POP_ENABLE_FOR_ALL_MAIL), or mail from now on (POP_ENABLE_FOR_MAIL_FROM_NOW_ON). action: string (optional) What Google Mail should do with its copy of the email after it is retrieved using POP (ACTION_KEEP, ACTION_ARCHIVE, ACTION_DELETE). kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsPop of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_POP) new_pop = gdata.apps.emailsettings.data.EmailSettingsPop( uri=uri, enable=enable, enable_for=enable_for, action=action) return self.update(new_pop, **kwargs) UpdatePop = update_pop def retrieve_pop(self, username, **kwargs): """Retrieves POP settings for the specified username Args: username: string The name of the user to get the POP settings for Returns: A gdata.data.GDEntry of the user's POP settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_POP) return self.GetEntry(uri, auth_token=None, query=None, **kwargs) RetrievePop = retrieve_pop def update_imap(self, username, enable, **kwargs): """Update Google Mail IMAP settings. Args: username: string The name of the user. enable: Boolean Whether to enable IMAP access.language kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsImap of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_IMAP) new_imap = gdata.apps.emailsettings.data.EmailSettingsImap( uri=uri, enable=enable) return self.update(new_imap, **kwargs) UpdateImap = update_imap def retrieve_imap(self, username, **kwargs): """Retrieves imap settings for the specified username Args: username: string The name of the user to get the imap settings for Returns: A gdata.data.GDEntry of the user's IMAP settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_IMAP) return self.GetEntry(uri, auth_token=None, query=None, **kwargs) RetrieveImap = retrieve_imap def update_vacation(self, username, enable, subject=None, message=None, contacts_only=None, **kwargs): """Update Google Mail vacation-responder settings. Args: username: string The name of the user. enable: Boolean Whether to enable the vacation responder. subject: string (optional) The subject line of the vacation responder autoresponse. message: string (optional) The message body of the vacation responder autoresponse. contacts_only: Boolean (optional) Whether to only send autoresponses to known contacts. kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsVacationResponder of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_VACATION_RESPONDER) new_vacation = gdata.apps.emailsettings.data.EmailSettingsVacationResponder( uri=uri, enable=enable, subject=subject, message=message, contacts_only=contacts_only) return self.update(new_vacation, **kwargs) UpdateVacation = update_vacation def retrieve_vacation(self, username, **kwargs): """Retrieves vacation settings for the specified username Args: username: string The name of the user to get the vacation settings for Returns: A gdata.data.GDEntry of the user's vacation auto-responder settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_VACATION_RESPONDER) return self.GetEntry(uri, auth_token=None, query=None, **kwargs) RetrieveVacation = retrieve_vacation def update_signature(self, username, signature, **kwargs): """Update Google Mail signature. Args: username: string The name of the user. signature: string The signature to be appended to outgoing messages. kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsSignature of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_SIGNATURE) new_signature = gdata.apps.emailsettings.data.EmailSettingsSignature( uri=uri, signature=signature) return self.update(new_signature, **kwargs) UpdateSignature = update_signature def retrieve_signature(self, username, **kwargs): """Retrieves signature settings for the specified username Args: username: string The name of the user to get the signature settings for Returns: A gdata.data.GDEntry of the user's signature settings """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_SIGNATURE) return self.GetEntry(uri, auth_token=None, query=None, **kwargs) RetrieveSignature = retrieve_signature def update_language(self, username, language, **kwargs): """Update Google Mail language settings. Args: username: string The name of the user. language: string The language tag for Google Mail's display language. kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsLanguage of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_LANGUAGE) new_language = gdata.apps.emailsettings.data.EmailSettingsLanguage( uri=uri, language=language) return self.update(new_language, **kwargs) UpdateLanguage = update_language def update_general_settings(self, username, page_size=None, shortcuts=None, arrows=None, snippets=None, use_unicode=None, **kwargs): """Update Google Mail general settings. Args: username: string The name of the user. page_size: int (optional) The number of conversations to be shown per page. shortcuts: Boolean (optional) Whether to enable keyboard shortcuts. arrows: Boolean (optional) Whether to display arrow-shaped personal indicators next to email sent specifically to the user. snippets: Boolean (optional) Whether to display snippets of the messages in the inbox and when searching. use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding for all outgoing messages. kwargs: The other parameters to pass to the update method. Returns: gdata.apps.emailsettings.data.EmailSettingsGeneral of the updated resource. """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_GENERAL) new_general = gdata.apps.emailsettings.data.EmailSettingsGeneral( uri=uri, page_size=page_size, shortcuts=shortcuts, arrows=arrows, snippets=snippets, use_unicode=use_unicode) return self.update(new_general, **kwargs) UpdateGeneralSettings = update_general_settings def add_email_delegate(self, username, address, **kwargs): """Add an email delegate to the mail account Args: username: string The name of the user address: string The email address of the delegated account """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_DELEGATION) new_delegation = gdata.apps.emailsettings.data.EmailSettingsDelegation( uri=uri, address=address) return self.post(new_delegation, uri, **kwargs) AddEmailDelegate = add_email_delegate def retrieve_email_delegates(self, username, **kwargs): """Retrieve a feed of the email delegates for the specified username Args: username: string The name of the user to get the email delegates for Returns: A gdata.data.GDFeed of the user's email delegates """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_DELEGATION) return self.GetFeed(uri, auth_token=None, query=None, **kwargs) RetrieveEmailDelegates = retrieve_email_delegates def delete_email_delegate(self, username, address, **kwargs): """Delete an email delegate from the specified account Args: username: string The name of the user address: string The email address of the delegated account """ uri = self.MakeEmailSettingsUri(username=username, setting_id=SETTING_ID_DELEGATION) uri = uri + '/' + address return self.delete(uri, **kwargs) DeleteEmailDelegate = delete_email_delegate
Python
#!/usr/bin/python # # Copyright 2010 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data model classes for the Email Settings API.""" __author__ = 'Claudio Cherubino <ccherubino@google.com>' import atom.data import gdata.apps import gdata.apps_property import gdata.data # This is required to work around a naming conflict between the Google # Spreadsheets API and Python's built-in property function pyproperty = property # The apps:property label of the label property LABEL_NAME = 'label' # The apps:property from of the filter property FILTER_FROM_NAME = 'from' # The apps:property to of the filter property FILTER_TO_NAME = 'to' # The apps:property subject of the filter property FILTER_SUBJECT_NAME = 'subject' # The apps:property hasTheWord of the filter property FILTER_HAS_THE_WORD_NAME = 'hasTheWord' # The apps:property doesNotHaveTheWord of the filter property FILTER_DOES_NOT_HAVE_THE_WORD_NAME = 'doesNotHaveTheWord' # The apps:property hasAttachment of the filter property FILTER_HAS_ATTACHMENTS_NAME = 'hasAttachment' # The apps:property label of the filter action property FILTER_LABEL = 'label' # The apps:property shouldMarkAsRead of the filter action property FILTER_MARK_AS_READ = 'shouldMarkAsRead' # The apps:property shouldArchive of the filter action propertylabel FILTER_ARCHIVE = 'shouldArchive' # The apps:property name of the send-as alias property SENDAS_ALIAS_NAME = 'name' # The apps:property address of theAPPS_TEMPLATE send-as alias property SENDAS_ALIAS_ADDRESS = 'address' # The apps:property replyTo of the send-as alias property SENDAS_ALIAS_REPLY_TO = 'replyTo' # The apps:property makeDefault of the send-as alias property SENDAS_ALIAS_MAKE_DEFAULT = 'makeDefault' # The apps:property enable of the webclip property WEBCLIP_ENABLE = 'enable' # The apps:property enable of the forwarding property FORWARDING_ENABLE = 'enable' # The apps:property forwardTo of the forwarding property FORWARDING_TO = 'forwardTo' # The apps:property action of the forwarding property FORWARDING_ACTION = 'action' # The apps:property enable of the POP property POP_ENABLE = 'enable' # The apps:property enableFor of the POP propertyACTION POP_ENABLE_FOR = 'enableFor' # The apps:property action of the POP property POP_ACTION = 'action' # The apps:property enable of the IMAP property IMAP_ENABLE = 'enable' # The apps:property enable of the vacation responder property VACATION_RESPONDER_ENABLE = 'enable' # The apps:property subject of the vacation responder property VACATION_RESPONDER_SUBJECT = 'subject' # The apps:property message of the vacation responder property VACATION_RESPONDER_MESSAGE = 'message' # The apps:property contactsOnly of the vacation responder property VACATION_RESPONDER_CONTACTS_ONLY = 'contactsOnly' # The apps:property signature of the signature property SIGNATURE_VALUE = 'signature' # The apps:property language of the language property LANGUAGE_TAG = 'language' # The apps:property pageSize of the general settings property GENERAL_PAGE_SIZE = 'pageSize' # The apps:property shortcuts of the general settings property GENERAL_SHORTCUTS = 'shortcuts' # The apps:property arrows of the general settings property GENERAL_ARROWS = 'arrows' # The apps:prgdata.appsoperty snippets of the general settings property GENERAL_SNIPPETS = 'snippets' # The apps:property uniAppsProcode of the general settings property GENERAL_UNICODE = 'unicode' # The apps:property delegationId of the email delegation property DELEGATION_ID = "delegationId" # The apps:property address of the email delegation property DELEGATION_ADDRESS = 'address' # The apps:property delegate of the email delegation property DELEGATION_DELEGATE = "delegate" # The apps:property status of the email delegation property DELEGATION_STATUS = "status" class EmailSettingsEntry(gdata.data.GDEntry): """Represents an Email Settings entry in object form.""" property = [gdata.apps_property.AppsProperty] def _GetProperty(self, name): """Get the apps:property value with the given name. Args: name: string Name of the apps:property value to get. Returns: The apps:property value with the given name, or None if the name was invalid. """ value = None for p in self.property: if p.name == name: value = p.value break return value def _SetProperty(self, name, value): """Set the apps:property value with the given name to the given value. Args: name: string Name of the apps:property value to set. value: string Value to give the apps:property value with the given name. """ found = False for i in range(len(self.property)): if self.property[i].name == name: self.property[i].value = value found = True break if not found: self.property.append(gdata.apps_property.AppsProperty(name=name, value=value)) def find_edit_link(self): return self.uri class EmailSettingsLabel(EmailSettingsEntry): """Represents a Label in object form.""" def GetName(self): """Get the name of the Label object. Returns: The name of this Label object as a string or None. """ return self._GetProperty(LABEL_NAME) def SetName(self, value): """Set the name of this Label object. Args: value: string The new label name to give this object. """ self._SetProperty(LABEL_NAME, value) name = pyproperty(GetName, SetName) def __init__(self, uri=None, name=None, *args, **kwargs): """Constructs a new EmailSettingsLabel object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. name: string (optional) The name to give this new object. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsLabel, self).__init__(*args, **kwargs) if uri: self.uri = uri if name: self.name = name class EmailSettingsFilter(EmailSettingsEntry): """Represents an Email Settings Filter in object form.""" def GetFrom(self): """Get the From value of the Filter object. Returns: The From value of this Filter object as a string or None. """ return self._GetProperty(FILTER_FROM_NAME) def SetFrom(self, value): """Set the From value of this Filter object. Args: value: string The new From value to give this object. """ self._SetProperty(FILTER_FROM_NAME, value) from_address = pyproperty(GetFrom, SetFrom) def GetTo(self): """Get the To value of the Filter object. Returns: The To value of this Filter object as a string or None. """ return self._GetProperty(FILTER_TO_NAME) def SetTo(self, value): """Set the To value of this Filter object. Args: value: string The new To value to give this object. """ self._SetProperty(FILTER_TO_NAME, value) to_address = pyproperty(GetTo, SetTo) def GetSubject(self): """Get the Subject value of the Filter object. Returns: The Subject value of this Filter object as a string or None. """ return self._GetProperty(FILTER_SUBJECT_NAME) def SetSubject(self, value): """Set the Subject value of this Filter object. Args: value: string The new Subject value to give this object. """ self._SetProperty(FILTER_SUBJECT_NAME, value) subject = pyproperty(GetSubject, SetSubject) def GetHasTheWord(self): """Get the HasTheWord value of the Filter object. Returns: The HasTheWord value of this Filter object as a string or None. """ return self._GetProperty(FILTER_HAS_THE_WORD_NAME) def SetHasTheWord(self, value): """Set the HasTheWord value of this Filter object. Args: value: string The new HasTheWord value to give this object. """ self._SetProperty(FILTER_HAS_THE_WORD_NAME, value) has_the_word = pyproperty(GetHasTheWord, SetHasTheWord) def GetDoesNotHaveTheWord(self): """Get the DoesNotHaveTheWord value of the Filter object. Returns: The DoesNotHaveTheWord value of this Filter object as a string or None. """ return self._GetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME) def SetDoesNotHaveTheWord(self, value): """Set the DoesNotHaveTheWord value of this Filter object. Args: value: string The new DoesNotHaveTheWord value to give this object. """ self._SetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME, value) does_not_have_the_word = pyproperty(GetDoesNotHaveTheWord, SetDoesNotHaveTheWord) def GetHasAttachments(self): """Get the HasAttachments value of the Filter object. Returns: The HasAttachments value of this Filter object as a string or None. """ return self._GetProperty(FILTER_HAS_ATTACHMENTS_NAME) def SetHasAttachments(self, value): """Set the HasAttachments value of this Filter object. Args: value: string The new HasAttachments value to give this object. """ self._SetProperty(FILTER_HAS_ATTACHMENTS_NAME, value) has_attachments = pyproperty(GetHasAttachments, SetHasAttachments) def GetLabel(self): """Get the Label value of the Filter object. Returns: The Label value of this Filter object as a string or None. """ return self._GetProperty(FILTER_LABEL) def SetLabel(self, value): """Set the Label value of this Filter object. Args: value: string The new Label value to give this object. """ self._SetProperty(FILTER_LABEL, value) label = pyproperty(GetLabel, SetLabel) def GetMarkAsRead(self): """Get the MarkAsRead value of the Filter object. Returns: The MarkAsRead value of this Filter object as a string or None. """ return self._GetProperty(FILTER_MARK_AS_READ) def SetMarkAsRead(self, value): """Set the MarkAsRead value of this Filter object. Args: value: string The new MarkAsRead value to give this object. """ self._SetProperty(FILTER_MARK_AS_READ, value) mark_as_read = pyproperty(GetMarkAsRead, SetMarkAsRead) def GetArchive(self): """Get the Archive value of the Filter object. Returns: The Archive value of this Filter object as a string or None. """ return self._GetProperty(FILTER_ARCHIVE) def SetArchive(self, value): """Set the Archive value of this Filter object. Args: value: string The new Archive value to give this object. """ self._SetProperty(FILTER_ARCHIVE, value) archive = pyproperty(GetArchive, SetArchive) def __init__(self, uri=None, from_address=None, to_address=None, subject=None, has_the_word=None, does_not_have_the_word=None, has_attachments=None, label=None, mark_as_read=None, archive=None, *args, **kwargs): """Constructs a new EmailSettingsFilter object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. from_address: string (optional) The source email address for the filter. to_address: string (optional) The destination email address for the filter. subject: string (optional) The value the email must have in its subject to be filtered. has_the_word: string (optional) The value the email must have in its subject or body to be filtered. does_not_have_the_word: string (optional) The value the email cannot have in its subject or body to be filtered. has_attachments: Boolean (optional) Whether or not the email must have an attachment to be filtered. label: string (optional) The name of the label to apply to messages matching the filter criteria. mark_as_read: Boolean (optional) Whether or not to mark messages matching the filter criteria as read. archive: Boolean (optional) Whether or not to move messages matching to Archived state. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsFilter, self).__init__(*args, **kwargs) if uri: self.uri = uri if from_address: self.from_address = from_address if to_address: self.to_address = to_address if subject: self.subject = subject if has_the_word: self.has_the_word = has_the_word if does_not_have_the_word: self.does_not_have_the_word = does_not_have_the_word if has_attachments is not None: self.has_attachments = str(has_attachments) if label: self.label = label if mark_as_read is not None: self.mark_as_read = str(mark_as_read) if archive is not None: self.archive = str(archive) class EmailSettingsSendAsAlias(EmailSettingsEntry): """Represents an Email Settings send-as Alias in object form.""" def GetName(self): """Get the Name of the send-as Alias object. Returns: The Name of this send-as Alias object as a string or None. """ return self._GetProperty(SENDAS_ALIAS_NAME) def SetName(self, value): """Set the Name of this send-as Alias object. Args: value: string The new Name to give this object. """ self._SetProperty(SENDAS_ALIAS_NAME, value) name = pyproperty(GetName, SetName) def GetAddress(self): """Get the Address of the send-as Alias object. Returns: The Address of this send-as Alias object as a string or None. """ return self._GetProperty(SENDAS_ALIAS_ADDRESS) def SetAddress(self, value): """Set the Address of this send-as Alias object. Args: value: string The new Address to give this object. """ self._SetProperty(SENDAS_ALIAS_ADDRESS, value) address = pyproperty(GetAddress, SetAddress) def GetReplyTo(self): """Get the ReplyTo address of the send-as Alias object. Returns: The ReplyTo address of this send-as Alias object as a string or None. """ return self._GetProperty(SENDAS_ALIAS_REPLY_TO) def SetReplyTo(self, value): """Set the ReplyTo address of this send-as Alias object. Args: value: string The new ReplyTo address to give this object. """ self._SetProperty(SENDAS_ALIAS_REPLY_TO, value) reply_to = pyproperty(GetReplyTo, SetReplyTo) def GetMakeDefault(self): """Get the MakeDefault value of the send-as Alias object. Returns: The MakeDefault value of this send-as Alias object as a string or None. """ return self._GetProperty(SENDAS_ALIAS_MAKE_DEFAULT) def SetMakeDefault(self, value): """Set the MakeDefault value of this send-as Alias object. Args: value: string The new MakeDefault valueto give this object.WebClip """ self._SetProperty(SENDAS_ALIAS_MAKE_DEFAULT, value) make_default = pyproperty(GetMakeDefault, SetMakeDefault) def __init__(self, uri=None, name=None, address=None, reply_to=None, make_default=None, *args, **kwargs): """Constructs a new EmailSettingsSendAsAlias object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. name: string (optional) The name that will appear in the "From" field for this user. address: string (optional) The email address that appears as the origination address for emails sent by this user. reply_to: string (optional) The address to be used as the reply-to address in email sent using the alias. make_default: Boolean (optional) Whether or not this alias should become the default alias for this user. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsSendAsAlias, self).__init__(*args, **kwargs) if uri: self.uri = uri if name: self.name = name if address: self.address = address if reply_to: self.reply_to = reply_to if make_default is not None: self.make_default = str(make_default) class EmailSettingsWebClip(EmailSettingsEntry): """Represents a WebClip in object form.""" def GetEnable(self): """Get the Enable value of the WebClip object. Returns: The Enable value of this WebClip object as a string or None. """ return self._GetProperty(WEBCLIP_ENABLE) def SetEnable(self, value): """Set the Enable value of this WebClip object. Args: value: string The new Enable value to give this object. """ self._SetProperty(WEBCLIP_ENABLE, value) enable = pyproperty(GetEnable, SetEnable) def __init__(self, uri=None, enable=None, *args, **kwargs): """Constructs a new EmailSettingsWebClip object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. enable: Boolean (optional) Whether to enable showing Web clips. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsWebClip, self).__init__(*args, **kwargs) if uri: self.uri = uri if enable is not None: self.enable = str(enable) class EmailSettingsForwarding(EmailSettingsEntry): """Represents Forwarding settings in object form.""" def GetEnable(self): """Get the Enable value of the Forwarding object. Returns: The Enable value of this Forwarding object as a string or None. """ return self._GetProperty(FORWARDING_ENABLE) def SetEnable(self, value): """Set the Enable value of this Forwarding object. Args: value: string The new Enable value to give this object. """ self._SetProperty(FORWARDING_ENABLE, value) enable = pyproperty(GetEnable, SetEnable) def GetForwardTo(self): """Get the ForwardTo value of the Forwarding object. Returns: The ForwardTo value of this Forwarding object as a string or None. """ return self._GetProperty(FORWARDING_TO) def SetForwardTo(self, value): """Set the ForwardTo value of this Forwarding object. Args: value: string The new ForwardTo value to give this object. """ self._SetProperty(FORWARDING_TO, value) forward_to = pyproperty(GetForwardTo, SetForwardTo) def GetAction(self): """Get the Action value of the Forwarding object. Returns: The Action value of this Forwarding object as a string or None. """ return self._GetProperty(FORWARDING_ACTION) def SetAction(self, value): """Set the Action value of this Forwarding object. Args: value: string The new Action value to give this object. """ self._SetProperty(FORWARDING_ACTION, value) action = pyproperty(GetAction, SetAction) def __init__(self, uri=None, enable=None, forward_to=None, action=None, *args, **kwargs): """Constructs a new EmailSettingsForwarding object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. enable: Boolean (optional) Whether to enable incoming email forwarding. forward_to: string (optional) The address email will be forwarded to. action: string (optional) The action to perform after forwarding an email ("KEEP", "ARCHIVE", "DELETE"). args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsForwarding, self).__init__(*args, **kwargs) if uri: self.uri = uri if enable is not None: self.enable = str(enable) if forward_to: self.forward_to = forward_to if action: self.action = action class EmailSettingsPop(EmailSettingsEntry): """Represents POP settings in object form.""" def GetEnable(self): """Get the Enable value of the POP object. Returns: The Enable value of this POP object as a string or None. """ return self._GetProperty(POP_ENABLE) def SetEnable(self, value): """Set the Enable value of this POP object. Args: value: string The new Enable value to give this object. """ self._SetProperty(POP_ENABLE, value) enable = pyproperty(GetEnable, SetEnable) def GetEnableFor(self): """Get the EnableFor value of the POP object. Returns: The EnableFor value of this POP object as a string or None. """ return self._GetProperty(POP_ENABLE_FOR) def SetEnableFor(self, value): """Set the EnableFor value of this POP object. Args: value: string The new EnableFor value to give this object. """ self._SetProperty(POP_ENABLE_FOR, value) enable_for = pyproperty(GetEnableFor, SetEnableFor) def GetPopAction(self): """Get the Action value of the POP object. Returns: The Action value of this POP object as a string or None. """ return self._GetProperty(POP_ACTION) def SetPopAction(self, value): """Set the Action value of this POP object. Args: value: string The new Action value to give this object. """ self._SetProperty(POP_ACTION, value) action = pyproperty(GetPopAction, SetPopAction) def __init__(self, uri=None, enable=None, enable_for=None, action=None, *args, **kwargs): """Constructs a new EmailSettingsPOP object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. enable: Boolean (optional) Whether to enable incoming POP3 access. enable_for: string (optional) Whether to enable POP3 for all mail ("ALL_MAIL"), or mail from now on ("MAIL_FROM_NOW_ON"). action: string (optional) What Google Mail should do with its copy of the email after it is retrieved using POP ("KEEP", "ARCHIVE", or "DELETE"). args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsPop, self).__init__(*args, **kwargs) if uri: self.uri = uri if enable is not None: self.enable = str(enable) if enable_for: self.enable_for = enable_for if action: self.action = action class EmailSettingsImap(EmailSettingsEntry): """Represents IMAP settings in object form.""" def GetEnable(self): """Get the Enable value of the IMAP object. Returns: The Enable value of this IMAP object as a string or None. """ return self._GetProperty(IMAP_ENABLE) def SetEnable(self, value): """Set the Enable value of this IMAP object. Args: value: string The new Enable value to give this object. """ self._SetProperty(IMAP_ENABLE, value) enable = pyproperty(GetEnable, SetEnable) def __init__(self, uri=None, enable=None, *args, **kwargs): """Constructs a new EmailSettingsImap object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. enable: Boolean (optional) Whether to enable IMAP access. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsImap, self).__init__(*args, **kwargs) if uri: self.uri = uri if enable is not None: self.enable = str(enable) class EmailSettingsVacationResponder(EmailSettingsEntry): """Represents Vacation Responder settings in object form.""" def GetEnable(self): """Get the Enable value of the Vacation Responder object. Returns: The Enable value of this Vacation Responder object as a string or None. """ return self._GetProperty(VACATION_RESPONDER_ENABLE) def SetEnable(self, value): """Set the Enable value of this Vacation Responder object. Args: value: string The new Enable value to give this object. """ self._SetProperty(VACATION_RESPONDER_ENABLE, value) enable = pyproperty(GetEnable, SetEnable) def GetSubject(self): """Get the Subject value of the Vacation Responder object. Returns: The Subject value of this Vacation Responder object as a string or None. """ return self._GetProperty(VACATION_RESPONDER_SUBJECT) def SetSubject(self, value): """Set the Subject value of this Vacation Responder object. Args: value: string The new Subject value to give this object. """ self._SetProperty(VACATION_RESPONDER_SUBJECT, value) subject = pyproperty(GetSubject, SetSubject) def GetMessage(self): """Get the Message value of the Vacation Responder object. Returns: The Message value of this Vacation Responder object as a string or None. """ return self._GetProperty(VACATION_RESPONDER_MESSAGE) def SetMessage(self, value): """Set the Message value of this Vacation Responder object. Args: value: string The new Message value to give this object. """ self._SetProperty(VACATION_RESPONDER_MESSAGE, value) message = pyproperty(GetMessage, SetMessage) def GetContactsOnly(self): """Get the ContactsOnly value of the Vacation Responder object. Returns: The ContactsOnly value of this Vacation Responder object as a string or None. """ return self._GetProperty(VACATION_RESPONDER_CONTACTS_ONLY) def SetContactsOnly(self, value): """Set the ContactsOnly value of this Vacation Responder object. Args: value: string The new ContactsOnly value to give this object. """ self._SetProperty(VACATION_RESPONDER_CONTACTS_ONLY, value) contacts_only = pyproperty(GetContactsOnly, SetContactsOnly) def __init__(self, uri=None, enable=None, subject=None, message=None, contacts_only=None, *args, **kwargs): """Constructs a new EmailSettingsVacationResponder object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. enable: Boolean (optional) Whether to enable the vacation responder. subject: string (optional) The subject line of the vacation responder autoresponse. message: string (optional) The message body of the vacation responder autoresponse. contacts_only: Boolean (optional) Whether to only send autoresponses to known contacts. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsVacationResponder, self).__init__(*args, **kwargs) if uri: self.uri = uri if enable is not None: self.enable = str(enable) if subject: self.subject = subject if message: self.message = message if contacts_only is not None: self.contacts_only = str(contacts_only) class EmailSettingsSignature(EmailSettingsEntry): """Represents a Signature in object form.""" def GetValue(self): """Get the value of the Signature object. Returns: The value of this Signature object as a string or None. """ value = self._GetProperty(SIGNATURE_VALUE) if value == ' ': # hack to support empty signature return '' else: return value def SetValue(self, value): """Set the name of this Signature object. Args: value: string The new signature value to give this object. """ if value == '': # hack to support empty signature value = ' ' self._SetProperty(SIGNATURE_VALUE, value) signature_value = pyproperty(GetValue, SetValue) def __init__(self, uri=None, signature=None, *args, **kwargs): """Constructs a new EmailSettingsSignature object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. signature: string (optional) The signature to be appended to outgoing messages. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsSignature, self).__init__(*args, **kwargs) if uri: self.uri = uri if signature is not None: self.signature_value = signature class EmailSettingsLanguage(EmailSettingsEntry): """Represents Language Settings in object form.""" def GetLanguage(self): """Get the tag of the Language object. Returns: The tag of this Language object as a string or None. """ return self._GetProperty(LANGUAGE_TAG) def SetLanguage(self, value): """Set the tag of this Language object. Args: value: string The new tag value to give this object. """ self._SetProperty(LANGUAGE_TAG, value) language_tag = pyproperty(GetLanguage, SetLanguage) def __init__(self, uri=None, language=None, *args, **kwargs): """Constructs a new EmailSettingsLanguage object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. language: string (optional) The language tag for Google Mail's display language. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsLanguage, self).__init__(*args, **kwargs) if uri: self.uri = uri if language: self.language_tag = language class EmailSettingsGeneral(EmailSettingsEntry): """Represents General Settings in object form.""" def GetPageSize(self): """Get the Page Size value of the General Settings object. Returns: The Page Size value of this General Settings object as a string or None. """ return self._GetProperty(GENERAL_PAGE_SIZE) def SetPageSize(self, value): """Set the Page Size value of this General Settings object. Args: value: string The new Page Size value to give this object. """ self._SetProperty(GENERAL_PAGE_SIZE, value) page_size = pyproperty(GetPageSize, SetPageSize) def GetShortcuts(self): """Get the Shortcuts value of the General Settings object. Returns: The Shortcuts value of this General Settings object as a string or None. """ return self._GetProperty(GENERAL_SHORTCUTS) def SetShortcuts(self, value): """Set the Shortcuts value of this General Settings object. Args: value: string The new Shortcuts value to give this object. """ self._SetProperty(GENERAL_SHORTCUTS, value) shortcuts = pyproperty(GetShortcuts, SetShortcuts) def GetArrows(self): """Get the Arrows value of the General Settings object. Returns: The Arrows value of this General Settings object as a string or None. """ return self._GetProperty(GENERAL_ARROWS) def SetArrows(self, value): """Set the Arrows value of this General Settings object. Args: value: string The new Arrows value to give this object. """ self._SetProperty(GENERAL_ARROWS, value) arrows = pyproperty(GetArrows, SetArrows) def GetSnippets(self): """Get the Snippets value of the General Settings object. Returns: The Snippets value of this General Settings object as a string or None. """ return self._GetProperty(GENERAL_SNIPPETS) def SetSnippets(self, value): """Set the Snippets value of this General Settings object. Args: value: string The new Snippets value to give this object. """ self._SetProperty(GENERAL_SNIPPETS, value) snippets = pyproperty(GetSnippets, SetSnippets) def GetUnicode(self): """Get the Unicode value of the General Settings object. Returns: The Unicode value of this General Settings object as a string or None. """ return self._GetProperty(GENERAL_UNICODE) def SetUnicode(self, value): """Set the Unicode value of this General Settings object. Args: value: string The new Unicode value to give this object. """ self._SetProperty(GENERAL_UNICODE, value) use_unicode = pyproperty(GetUnicode, SetUnicode) def __init__(self, uri=None, page_size=None, shortcuts=None, arrows=None, snippets=None, use_unicode=None, *args, **kwargs): """Constructs a new EmailSettingsGeneral object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. page_size: int (optional) The number of conversations to be shown per page. shortcuts: Boolean (optional) Whether to enable keyboard shortcuts. arrows: Boolean (optional) Whether to display arrow-shaped personal indicators next to email sent specifically to the user. snippets: Boolean (optional) Whether to display snippets of the messages in the inbox and when searching. use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding for all outgoing messages. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """ super(EmailSettingsGeneral, self).__init__(*args, **kwargs) if uri: self.uri = uri if page_size is not None: self.page_size = str(page_size) if shortcuts is not None: self.shortcuts = str(shortcuts) if arrows is not None: self.arrows = str(arrows) if snippets is not None: self.snippets = str(snippets) if use_unicode is not None: self.use_unicode = str(use_unicode) class EmailSettingsDelegation(EmailSettingsEntry): """Represents an Email Settings delegation entry in object form.""" def GetAddress(self): """Get the email address of the delegated user. Returns: The email address of the delegated user as a string or None. """ return self._GetProperty(DELEGATION_ADDRESS) def SetAddress(self, value): """Set the email address of of the delegated user. Args: value: string The email address of another user on the same domain """ self._SetProperty(DELEGATION_ADDRESS, value) address = pyproperty(GetAddress, SetAddress) def __init__(self, uri=None, address=None, *args, **kwargs): """Constructs a new EmailSettingsDelegation object with the given arguments. Args: uri: string (optional) The uri of of this object for HTTP requests. address: string The email address of the delegated user. """ super(EmailSettingsDelegation, self).__init__(*args, **kwargs) if uri: self.uri = uri if address: self.address = address
Python
#!/usr/bin/python # # Copyright (C) 2008 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
Python
#!/usr/bin/python # # Copyright (C) 2008 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Allow Google Apps domain administrators to manage organization unit and organization user. OrganizationService: Provides methods to manage organization unit and organization user. """ __author__ = 'Alexandre Vivien (alex@simplecode.fr)' import gdata.apps import gdata.apps.service import gdata.service API_VER = '2.0' CUSTOMER_BASE_URL = '/a/feeds/customer/2.0/customerId' BASE_UNIT_URL = '/a/feeds/orgunit/' + API_VER + '/%s' UNIT_URL = BASE_UNIT_URL + '/%s' UNIT_ALL_URL = BASE_UNIT_URL + '?get=all' UNIT_CHILD_URL = BASE_UNIT_URL + '?get=children&orgUnitPath=%s' BASE_USER_URL = '/a/feeds/orguser/' + API_VER + '/%s' USER_URL = BASE_USER_URL + '/%s' USER_ALL_URL = BASE_USER_URL + '?get=all' USER_CHILD_URL = BASE_USER_URL + '?get=children&orgUnitPath=%s' class OrganizationService(gdata.apps.service.PropertyService): """Client for the Google Apps Organizations service.""" def _Bool2Str(self, b): if b is None: return None return str(b is True).lower() def RetrieveCustomerId(self): """Retrieve the Customer ID for the account of the authenticated administrator making this request. Args: None. Returns: A dict containing the result of the retrieve operation. """ uri = CUSTOMER_BASE_URL return self._GetProperties(uri) def CreateOrgUnit(self, customer_id, name, parent_org_unit_path='/', description='', block_inheritance=False): """Create a Organization Unit. Args: customer_id: The ID of the Google Apps customer. name: The simple organization unit text name, not the full path name. parent_org_unit_path: The full path of the parental tree to this organization unit (default: '/'). Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) description: The human readable text description of the organization unit (optional). block_inheritance: This parameter blocks policy setting inheritance from organization units higher in the organization tree (default: False). Returns: A dict containing the result of the create operation. """ uri = BASE_UNIT_URL % (customer_id) properties = {} properties['name'] = name properties['parentOrgUnitPath'] = parent_org_unit_path properties['description'] = description properties['blockInheritance'] = self._Bool2Str(block_inheritance) return self._PostProperties(uri, properties) def UpdateOrgUnit(self, customer_id, org_unit_path, name=None, parent_org_unit_path=None, description=None, block_inheritance=None): """Update a Organization Unit. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) name: The simple organization unit text name, not the full path name. parent_org_unit_path: The full path of the parental tree to this organization unit. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) description: The human readable text description of the organization unit. block_inheritance: This parameter blocks policy setting inheritance from organization units higher in the organization tree. Returns: A dict containing the result of the update operation. """ uri = UNIT_URL % (customer_id, org_unit_path) properties = {} if name: properties['name'] = name if parent_org_unit_path: properties['parentOrgUnitPath'] = parent_org_unit_path if description: properties['description'] = description if block_inheritance: properties['blockInheritance'] = self._Bool2Str(block_inheritance) return self._PutProperties(uri, properties) def MoveUserToOrgUnit(self, customer_id, org_unit_path, users_to_move): """Move a user to an Organization Unit. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) users_to_move: Email addresses list of users to move. Note: You can move a maximum of 25 users at one time. Returns: A dict containing the result of the update operation. """ uri = UNIT_URL % (customer_id, org_unit_path) properties = {} if users_to_move and isinstance(users_to_move, list): properties['usersToMove'] = ', '.join(users_to_move) return self._PutProperties(uri, properties) def RetrieveOrgUnit(self, customer_id, org_unit_path): """Retrieve a Orgunit based on its path. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) Returns: A dict containing the result of the retrieve operation. """ uri = UNIT_URL % (customer_id, org_unit_path) return self._GetProperties(uri) def DeleteOrgUnit(self, customer_id, org_unit_path): """Delete a Orgunit based on its path. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) Returns: A dict containing the result of the delete operation. """ uri = UNIT_URL % (customer_id, org_unit_path) return self._DeleteProperties(uri) def RetrieveAllOrgUnits(self, customer_id): """Retrieve all OrgUnits in the customer's domain. Args: customer_id: The ID of the Google Apps customer. Returns: A list containing the result of the retrieve operation. """ uri = UNIT_ALL_URL % (customer_id) return self._GetPropertiesList(uri) def RetrievePageOfOrgUnits(self, customer_id, startKey=None): """Retrieve one page of OrgUnits in the customer's domain. Args: customer_id: The ID of the Google Apps customer. startKey: The key to continue for pagination through all OrgUnits. Returns: A feed object containing the result of the retrieve operation. """ uri = UNIT_ALL_URL % (customer_id) if startKey is not None: uri += "&startKey=" + startKey property_feed = self._GetPropertyFeed(uri) return property_feed def RetrieveSubOrgUnits(self, customer_id, org_unit_path): """Retrieve all Sub-OrgUnits of the provided OrgUnit. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) Returns: A list containing the result of the retrieve operation. """ uri = UNIT_CHILD_URL % (customer_id, org_unit_path) return self._GetPropertiesList(uri) def RetrieveOrgUser(self, customer_id, user_email): """Retrieve the OrgUnit of the user. Args: customer_id: The ID of the Google Apps customer. user_email: The email address of the user. Returns: A dict containing the result of the retrieve operation. """ uri = USER_URL % (customer_id, user_email) return self._GetProperties(uri) def UpdateOrgUser(self, customer_id, user_email, org_unit_path): """Update the OrgUnit of a OrgUser. Args: customer_id: The ID of the Google Apps customer. user_email: The email address of the user. org_unit_path: The new organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) Returns: A dict containing the result of the update operation. """ uri = USER_URL % (customer_id, user_email) properties = {} if org_unit_path: properties['orgUnitPath'] = org_unit_path return self._PutProperties(uri, properties) def RetrieveAllOrgUsers(self, customer_id): """Retrieve all OrgUsers in the customer's domain. Args: customer_id: The ID of the Google Apps customer. Returns: A list containing the result of the retrieve operation. """ uri = USER_ALL_URL % (customer_id) return self._GetPropertiesList(uri) def RetrievePageOfOrgUsers(self, customer_id, startKey=None): """Retrieve one page of OrgUsers in the customer's domain. Args: customer_id: The ID of the Google Apps customer. startKey: The key to continue for pagination through all OrgUnits. Returns: A feed object containing the result of the retrieve operation. """ uri = USER_ALL_URL % (customer_id) if startKey is not None: uri += "&startKey=" + startKey property_feed = self._GetPropertyFeed(uri) return property_feed def RetrieveOrgUnitUsers(self, customer_id, org_unit_path): """Retrieve all OrgUsers of the provided OrgUnit. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) Returns: A list containing the result of the retrieve operation. """ uri = USER_CHILD_URL % (customer_id, org_unit_path) return self._GetPropertiesList(uri) def RetrieveOrgUnitPageOfUsers(self, customer_id, org_unit_path, startKey=None): """Retrieve one page of OrgUsers of the provided OrgUnit. Args: customer_id: The ID of the Google Apps customer. org_unit_path: The organization's full path name. Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization) startKey: The key to continue for pagination through all OrgUsers. Returns: A feed object containing the result of the retrieve operation. """ uri = USER_CHILD_URL % (customer_id, org_unit_path) if startKey is not None: uri += "&startKey=" + startKey property_feed = self._GetPropertyFeed(uri) return property_feed
Python
#!/usr/bin/python # # Copyright (C) 2007 SIOS Technology, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains objects used with Google Apps.""" __author__ = 'tmatsuo@sios.com (Takashi MATSUO)' import atom import gdata # XML namespaces which are often used in Google Apps entity. APPS_NAMESPACE = 'http://schemas.google.com/apps/2006' APPS_TEMPLATE = '{http://schemas.google.com/apps/2006}%s' class EmailList(atom.AtomBase): """The Google Apps EmailList element""" _tag = 'emailList' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['name'] = 'name' def __init__(self, name=None, extension_elements=None, extension_attributes=None, text=None): self.name = name self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def EmailListFromString(xml_string): return atom.CreateClassFromXMLString(EmailList, xml_string) class Who(atom.AtomBase): """The Google Apps Who element""" _tag = 'who' _namespace = gdata.GDATA_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['rel'] = 'rel' _attributes['email'] = 'email' def __init__(self, rel=None, email=None, extension_elements=None, extension_attributes=None, text=None): self.rel = rel self.email = email self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def WhoFromString(xml_string): return atom.CreateClassFromXMLString(Who, xml_string) class Login(atom.AtomBase): """The Google Apps Login element""" _tag = 'login' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['userName'] = 'user_name' _attributes['password'] = 'password' _attributes['suspended'] = 'suspended' _attributes['admin'] = 'admin' _attributes['changePasswordAtNextLogin'] = 'change_password' _attributes['agreedToTerms'] = 'agreed_to_terms' _attributes['ipWhitelisted'] = 'ip_whitelisted' _attributes['hashFunctionName'] = 'hash_function_name' def __init__(self, user_name=None, password=None, suspended=None, ip_whitelisted=None, hash_function_name=None, admin=None, change_password=None, agreed_to_terms=None, extension_elements=None, extension_attributes=None, text=None): self.user_name = user_name self.password = password self.suspended = suspended self.admin = admin self.change_password = change_password self.agreed_to_terms = agreed_to_terms self.ip_whitelisted = ip_whitelisted self.hash_function_name = hash_function_name self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def LoginFromString(xml_string): return atom.CreateClassFromXMLString(Login, xml_string) class Quota(atom.AtomBase): """The Google Apps Quota element""" _tag = 'quota' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['limit'] = 'limit' def __init__(self, limit=None, extension_elements=None, extension_attributes=None, text=None): self.limit = limit self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def QuotaFromString(xml_string): return atom.CreateClassFromXMLString(Quota, xml_string) class Name(atom.AtomBase): """The Google Apps Name element""" _tag = 'name' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['familyName'] = 'family_name' _attributes['givenName'] = 'given_name' def __init__(self, family_name=None, given_name=None, extension_elements=None, extension_attributes=None, text=None): self.family_name = family_name self.given_name = given_name self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def NameFromString(xml_string): return atom.CreateClassFromXMLString(Name, xml_string) class Nickname(atom.AtomBase): """The Google Apps Nickname element""" _tag = 'nickname' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['name'] = 'name' def __init__(self, name=None, extension_elements=None, extension_attributes=None, text=None): self.name = name self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def NicknameFromString(xml_string): return atom.CreateClassFromXMLString(Nickname, xml_string) class NicknameEntry(gdata.GDataEntry): """A Google Apps flavor of an Atom Entry for Nickname""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}login' % APPS_NAMESPACE] = ('login', Login) _children['{%s}nickname' % APPS_NAMESPACE] = ('nickname', Nickname) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, login=None, nickname=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.login = login self.nickname = nickname self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def NicknameEntryFromString(xml_string): return atom.CreateClassFromXMLString(NicknameEntry, xml_string) class NicknameFeed(gdata.GDataFeed, gdata.LinkFinder): """A Google Apps Nickname feed flavor of an Atom Feed""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [NicknameEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) def NicknameFeedFromString(xml_string): return atom.CreateClassFromXMLString(NicknameFeed, xml_string) class UserEntry(gdata.GDataEntry): """A Google Apps flavor of an Atom Entry""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}login' % APPS_NAMESPACE] = ('login', Login) _children['{%s}name' % APPS_NAMESPACE] = ('name', Name) _children['{%s}quota' % APPS_NAMESPACE] = ('quota', Quota) # This child may already be defined in GDataEntry, confirm before removing. _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link', [gdata.FeedLink]) _children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, login=None, name=None, quota=None, who=None, feed_link=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.login = login self.name = name self.quota = quota self.who = who self.feed_link = feed_link or [] self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def UserEntryFromString(xml_string): return atom.CreateClassFromXMLString(UserEntry, xml_string) class UserFeed(gdata.GDataFeed, gdata.LinkFinder): """A Google Apps User feed flavor of an Atom Feed""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [UserEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) def UserFeedFromString(xml_string): return atom.CreateClassFromXMLString(UserFeed, xml_string) class EmailListEntry(gdata.GDataEntry): """A Google Apps EmailList flavor of an Atom Entry""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}emailList' % APPS_NAMESPACE] = ('email_list', EmailList) # Might be able to remove this _children entry. _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link', [gdata.FeedLink]) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, email_list=None, feed_link=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.email_list = email_list self.feed_link = feed_link or [] self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def EmailListEntryFromString(xml_string): return atom.CreateClassFromXMLString(EmailListEntry, xml_string) class EmailListFeed(gdata.GDataFeed, gdata.LinkFinder): """A Google Apps EmailList feed flavor of an Atom Feed""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [EmailListEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) def EmailListFeedFromString(xml_string): return atom.CreateClassFromXMLString(EmailListFeed, xml_string) class EmailListRecipientEntry(gdata.GDataEntry): """A Google Apps EmailListRecipient flavor of an Atom Entry""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, who=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.who = who self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def EmailListRecipientEntryFromString(xml_string): return atom.CreateClassFromXMLString(EmailListRecipientEntry, xml_string) class EmailListRecipientFeed(gdata.GDataFeed, gdata.LinkFinder): """A Google Apps EmailListRecipient feed flavor of an Atom Feed""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [EmailListRecipientEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) def EmailListRecipientFeedFromString(xml_string): return atom.CreateClassFromXMLString(EmailListRecipientFeed, xml_string) class Property(atom.AtomBase): """The Google Apps Property element""" _tag = 'property' _namespace = APPS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['name'] = 'name' _attributes['value'] = 'value' def __init__(self, name=None, value=None, extension_elements=None, extension_attributes=None, text=None): self.name = name self.value = value self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def PropertyFromString(xml_string): return atom.CreateClassFromXMLString(Property, xml_string) class PropertyEntry(gdata.GDataEntry): """A Google Apps Property flavor of an Atom Entry""" _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}property' % APPS_NAMESPACE] = ('property', [Property]) def __init__(self, author=None, category=None, content=None, atom_id=None, link=None, published=None, title=None, updated=None, property=None, extended_property=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataEntry.__init__(self, author=author, category=category, content=content, atom_id=atom_id, link=link, published=published, title=title, updated=updated) self.property = property self.extended_property = extended_property or [] self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def PropertyEntryFromString(xml_string): return atom.CreateClassFromXMLString(PropertyEntry, xml_string) class PropertyFeed(gdata.GDataFeed, gdata.LinkFinder): """A Google Apps Property feed flavor of an Atom Feed""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PropertyEntry]) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None): gdata.GDataFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) def PropertyFeedFromString(xml_string): return atom.CreateClassFromXMLString(PropertyFeed, xml_string)
Python
#!/usr/bin/python # # Copyright (C) 2008 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Allow Google Apps domain administrators to set domain admin settings. AdminSettingsService: Set admin settings.""" __author__ = 'jlee@pbu.edu' import gdata.apps import gdata.apps.service import gdata.service API_VER='2.0' class AdminSettingsService(gdata.apps.service.PropertyService): """Client for the Google Apps Admin Settings service.""" def _serviceUrl(self, setting_id, domain=None): if domain is None: domain = self.domain return '/a/feeds/domain/%s/%s/%s' % (API_VER, domain, setting_id) def genericGet(self, location): """Generic HTTP Get Wrapper Args: location: relative uri to Get Returns: A dict containing the result of the get operation.""" uri = self._serviceUrl(location) try: return self._GetProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def GetDefaultLanguage(self): """Gets Domain Default Language Args: None Returns: Default Language as a string. All possible values are listed at: http://code.google.com/apis/apps/email_settings/developers_guide_protocol.html#GA_email_language_tags""" result = self.genericGet('general/defaultLanguage') return result['defaultLanguage'] def UpdateDefaultLanguage(self, defaultLanguage): """Updates Domain Default Language Args: defaultLanguage: Domain Language to set possible values are at: http://code.google.com/apis/apps/email_settings/developers_guide_protocol.html#GA_email_language_tags Returns: A dict containing the result of the put operation""" uri = self._serviceUrl('general/defaultLanguage') properties = {'defaultLanguage': defaultLanguage} return self._PutProperties(uri, properties) def GetOrganizationName(self): """Gets Domain Default Language Args: None Returns: Organization Name as a string.""" result = self.genericGet('general/organizationName') return result['organizationName'] def UpdateOrganizationName(self, organizationName): """Updates Organization Name Args: organizationName: Name of organization Returns: A dict containing the result of the put operation""" uri = self._serviceUrl('general/organizationName') properties = {'organizationName': organizationName} return self._PutProperties(uri, properties) def GetMaximumNumberOfUsers(self): """Gets Maximum Number of Users Allowed Args: None Returns: An integer, the maximum number of users""" result = self.genericGet('general/maximumNumberOfUsers') return int(result['maximumNumberOfUsers']) def GetCurrentNumberOfUsers(self): """Gets Current Number of Users Args: None Returns: An integer, the current number of users""" result = self.genericGet('general/currentNumberOfUsers') return int(result['currentNumberOfUsers']) def IsDomainVerified(self): """Is the domain verified Args: None Returns: Boolean, is domain verified""" result = self.genericGet('accountInformation/isVerified') if result['isVerified'] == 'true': return True else: return False def GetSupportPIN(self): """Gets Support PIN Args: None Returns: A string, the Support PIN""" result = self.genericGet('accountInformation/supportPIN') return result['supportPIN'] def GetEdition(self): """Gets Google Apps Domain Edition Args: None Returns: A string, the domain's edition (premier, education, partner)""" result = self.genericGet('accountInformation/edition') return result['edition'] def GetCustomerPIN(self): """Gets Customer PIN Args: None Returns: A string, the customer PIN""" result = self.genericGet('accountInformation/customerPIN') return result['customerPIN'] def GetCreationTime(self): """Gets Domain Creation Time Args: None Returns: A string, the domain's creation time""" result = self.genericGet('accountInformation/creationTime') return result['creationTime'] def GetCountryCode(self): """Gets Domain Country Code Args: None Returns: A string, the domain's country code. Possible values at: http://www.iso.org/iso/country_codes/iso_3166_code_lists/english_country_names_and_code_elements.htm""" result = self.genericGet('accountInformation/countryCode') return result['countryCode'] def GetAdminSecondaryEmail(self): """Gets Domain Admin Secondary Email Address Args: None Returns: A string, the secondary email address for domain admin""" result = self.genericGet('accountInformation/adminSecondaryEmail') return result['adminSecondaryEmail'] def UpdateAdminSecondaryEmail(self, adminSecondaryEmail): """Gets Domain Creation Time Args: adminSecondaryEmail: string, secondary email address of admin Returns: A dict containing the result of the put operation""" uri = self._serviceUrl('accountInformation/adminSecondaryEmail') properties = {'adminSecondaryEmail': adminSecondaryEmail} return self._PutProperties(uri, properties) def GetDomainLogo(self): """Gets Domain Logo This function does not make use of the Google Apps Admin Settings API, it does an HTTP Get of a url specific to the Google Apps domain. It is included for completeness sake. Args: None Returns: binary image file""" import urllib url = 'http://www.google.com/a/cpanel/'+self.domain+'/images/logo.gif' response = urllib.urlopen(url) return response.read() def UpdateDomainLogo(self, logoImage): """Update Domain's Custom Logo Args: logoImage: binary image data Returns: A dict containing the result of the put operation""" from base64 import base64encode uri = self._serviceUrl('appearance/customLogo') properties = {'logoImage': base64encode(logoImage)} return self._PutProperties(uri, properties) def GetCNAMEVerificationStatus(self): """Gets Domain CNAME Verification Status Args: None Returns: A dict {recordName, verified, verifiedMethod}""" return self.genericGet('verification/cname') def UpdateCNAMEVerificationStatus(self, verified): """Updates CNAME Verification Status Args: verified: boolean, True will retry verification process Returns: A dict containing the result of the put operation""" uri = self._serviceUrl('verification/cname') properties = self.GetCNAMEVerificationStatus() properties['verified'] = verified return self._PutProperties(uri, properties) def GetMXVerificationStatus(self): """Gets Domain MX Verification Status Args: None Returns: A dict {verified, verifiedMethod}""" return self.genericGet('verification/mx') def UpdateMXVerificationStatus(self, verified): """Updates MX Verification Status Args: verified: boolean, True will retry verification process Returns: A dict containing the result of the put operation""" uri = self._serviceUrl('verification/mx') properties = self.GetMXVerificationStatus() properties['verified'] = verified return self._PutProperties(uri, properties) def GetSSOSettings(self): """Gets Domain Single Sign-On Settings Args: None Returns: A dict {samlSignonUri, samlLogoutUri, changePasswordUri, enableSSO, ssoWhitelist, useDomainSpecificIssuer}""" return self.genericGet('sso/general') def UpdateSSOSettings(self, enableSSO=None, samlSignonUri=None, samlLogoutUri=None, changePasswordUri=None, ssoWhitelist=None, useDomainSpecificIssuer=None): """Update SSO Settings. Args: enableSSO: boolean, SSO Master on/off switch samlSignonUri: string, SSO Login Page samlLogoutUri: string, SSO Logout Page samlPasswordUri: string, SSO Password Change Page ssoWhitelist: string, Range of IP Addresses which will see SSO useDomainSpecificIssuer: boolean, Include Google Apps Domain in Issuer Returns: A dict containing the result of the update operation. """ uri = self._serviceUrl('sso/general') #Get current settings, replace Nones with '' properties = self.GetSSOSettings() if properties['samlSignonUri'] == None: properties['samlSignonUri'] = '' if properties['samlLogoutUri'] == None: properties['samlLogoutUri'] = '' if properties['changePasswordUri'] == None: properties['changePasswordUri'] = '' if properties['ssoWhitelist'] == None: properties['ssoWhitelist'] = '' #update only the values we were passed if enableSSO != None: properties['enableSSO'] = gdata.apps.service._bool2str(enableSSO) if samlSignonUri != None: properties['samlSignonUri'] = samlSignonUri if samlLogoutUri != None: properties['samlLogoutUri'] = samlLogoutUri if changePasswordUri != None: properties['changePasswordUri'] = changePasswordUri if ssoWhitelist != None: properties['ssoWhitelist'] = ssoWhitelist if useDomainSpecificIssuer != None: properties['useDomainSpecificIssuer'] = gdata.apps.service._bool2str(useDomainSpecificIssuer) return self._PutProperties(uri, properties) def GetSSOKey(self): """Gets Domain Single Sign-On Signing Key Args: None Returns: A dict {modulus, exponent, algorithm, format}""" return self.genericGet('sso/signingkey') def UpdateSSOKey(self, signingKey): """Update SSO Settings. Args: signingKey: string, public key to be uploaded Returns: A dict containing the result of the update operation.""" uri = self._serviceUrl('sso/signingkey') properties = {'signingKey': signingKey} return self._PutProperties(uri, properties) def IsUserMigrationEnabled(self): """Is User Migration Enabled Args: None Returns: boolean, is user migration enabled""" result = self.genericGet('email/migration') if result['enableUserMigration'] == 'true': return True else: return False def UpdateUserMigrationStatus(self, enableUserMigration): """Update User Migration Status Args: enableUserMigration: boolean, user migration enable/disable Returns: A dict containing the result of the update operation.""" uri = self._serviceUrl('email/migration') properties = {'enableUserMigration': enableUserMigration} return self._PutProperties(uri, properties) def GetOutboundGatewaySettings(self): """Get Outbound Gateway Settings Args: None Returns: A dict {smartHost, smtpMode}""" uri = self._serviceUrl('email/gateway') try: return self._GetProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) except TypeError: #if no outbound gateway is set, we get a TypeError, #catch it and return nothing... return {'smartHost': None, 'smtpMode': None} def UpdateOutboundGatewaySettings(self, smartHost=None, smtpMode=None): """Update Outbound Gateway Settings Args: smartHost: string, ip address or hostname of outbound gateway smtpMode: string, SMTP or SMTP_TLS Returns: A dict containing the result of the update operation.""" uri = self._serviceUrl('email/gateway') #Get current settings, replace Nones with '' properties = GetOutboundGatewaySettings() if properties['smartHost'] == None: properties['smartHost'] = '' if properties['smtpMode'] == None: properties['smtpMode'] = '' #If we were passed new values for smartHost or smtpMode, update them if smartHost != None: properties['smartHost'] = smartHost if smtpMode != None: properties['smtpMode'] = smtpMode return self._PutProperties(uri, properties) def AddEmailRoute(self, routeDestination, routeRewriteTo, routeEnabled, bounceNotifications, accountHandling): """Adds Domain Email Route Args: routeDestination: string, destination ip address or hostname routeRewriteTo: boolean, rewrite smtp envelop To: routeEnabled: boolean, enable disable email routing bounceNotifications: boolean, send bound notificiations to sender accountHandling: string, which to route, "allAccounts", "provisionedAccounts", "unknownAccounts" Returns: A dict containing the result of the update operation.""" uri = self._serviceUrl('emailrouting') properties = {} properties['routeDestination'] = routeDestination properties['routeRewriteTo'] = gdata.apps.service._bool2str(routeRewriteTo) properties['routeEnabled'] = gdata.apps.service._bool2str(routeEnabled) properties['bounceNotifications'] = gdata.apps.service._bool2str(bounceNotifications) properties['accountHandling'] = accountHandling return self._PostProperties(uri, properties)
Python
#!/usr/bin/python # # Copyright (C) 2008 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
Python
# Copyright (C) 2008 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Allow Google Apps domain administrators to audit user data. AuditService: Set auditing.""" __author__ = 'jlee@pbu.edu' from base64 import b64encode import gdata.apps import gdata.apps.service import gdata.service class AuditService(gdata.apps.service.PropertyService): """Client for the Google Apps Audit service.""" def _serviceUrl(self, setting_id, domain=None, user=None): if domain is None: domain = self.domain if user is None: return '/a/feeds/compliance/audit/%s/%s' % (setting_id, domain) else: return '/a/feeds/compliance/audit/%s/%s/%s' % (setting_id, domain, user) def updatePGPKey(self, pgpkey): """Updates Public PGP Key Google uses to encrypt audit data Args: pgpkey: string, ASCII text of PGP Public Key to be used Returns: A dict containing the result of the POST operation.""" uri = self._serviceUrl('publickey') b64pgpkey = b64encode(pgpkey) properties = {} properties['publicKey'] = b64pgpkey return self._PostProperties(uri, properties) def createEmailMonitor(self, source_user, destination_user, end_date, begin_date=None, incoming_headers_only=False, outgoing_headers_only=False, drafts=False, drafts_headers_only=False, chats=False, chats_headers_only=False): """Creates a email monitor, forwarding the source_users emails/chats Args: source_user: string, the user whose email will be audited destination_user: string, the user to receive the audited email end_date: string, the date the audit will end in "yyyy-MM-dd HH:mm" format, required begin_date: string, the date the audit will start in "yyyy-MM-dd HH:mm" format, leave blank to use current time incoming_headers_only: boolean, whether to audit only the headers of mail delivered to source user outgoing_headers_only: boolean, whether to audit only the headers of mail sent from the source user drafts: boolean, whether to audit draft messages of the source user drafts_headers_only: boolean, whether to audit only the headers of mail drafts saved by the user chats: boolean, whether to audit archived chats of the source user chats_headers_only: boolean, whether to audit only the headers of archived chats of the source user Returns: A dict containing the result of the POST operation.""" uri = self._serviceUrl('mail/monitor', user=source_user) properties = {} properties['destUserName'] = destination_user if begin_date is not None: properties['beginDate'] = begin_date properties['endDate'] = end_date if incoming_headers_only: properties['incomingEmailMonitorLevel'] = 'HEADER_ONLY' else: properties['incomingEmailMonitorLevel'] = 'FULL_MESSAGE' if outgoing_headers_only: properties['outgoingEmailMonitorLevel'] = 'HEADER_ONLY' else: properties['outgoingEmailMonitorLevel'] = 'FULL_MESSAGE' if drafts: if drafts_headers_only: properties['draftMonitorLevel'] = 'HEADER_ONLY' else: properties['draftMonitorLevel'] = 'FULL_MESSAGE' if chats: if chats_headers_only: properties['chatMonitorLevel'] = 'HEADER_ONLY' else: properties['chatMonitorLevel'] = 'FULL_MESSAGE' return self._PostProperties(uri, properties) def getEmailMonitors(self, user): """"Gets the email monitors for the given user Args: user: string, the user to retrieve email monitors for Returns: list results of the POST operation """ uri = self._serviceUrl('mail/monitor', user=user) return self._GetPropertiesList(uri) def deleteEmailMonitor(self, source_user, destination_user): """Deletes the email monitor for the given user Args: source_user: string, the user who is being monitored destination_user: string, theuser who recieves the monitored emails Returns: Nothing """ uri = self._serviceUrl('mail/monitor', user=source_user+'/'+destination_user) try: return self._DeleteProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def createAccountInformationRequest(self, user): """Creates a request for account auditing details Args: user: string, the user to request account information for Returns: A dict containing the result of the post operation.""" uri = self._serviceUrl('account', user=user) properties = {} #XML Body is left empty try: return self._PostProperties(uri, properties) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def getAccountInformationRequestStatus(self, user, request_id): """Gets the status of an account auditing request Args: user: string, the user whose account auditing details were requested request_id: string, the request_id Returns: A dict containing the result of the get operation.""" uri = self._serviceUrl('account', user=user+'/'+request_id) try: return self._GetProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def getAllAccountInformationRequestsStatus(self): """Gets the status of all account auditing requests for the domain Args: None Returns: list results of the POST operation """ uri = self._serviceUrl('account') return self._GetPropertiesList(uri) def deleteAccountInformationRequest(self, user, request_id): """Deletes the request for account auditing information Args: user: string, the user whose account auditing details were requested request_id: string, the request_id Returns: Nothing """ uri = self._serviceUrl('account', user=user+'/'+request_id) try: return self._DeleteProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def createMailboxExportRequest(self, user, begin_date=None, end_date=None, include_deleted=False, search_query=None, headers_only=False): """Creates a mailbox export request Args: user: string, the user whose mailbox export is being requested begin_date: string, date of earliest emails to export, optional, defaults to date of account creation format is 'yyyy-MM-dd HH:mm' end_date: string, date of latest emails to export, optional, defaults to current date format is 'yyyy-MM-dd HH:mm' include_deleted: boolean, whether to include deleted emails in export, mutually exclusive with search_query search_query: string, gmail style search query, matched emails will be exported, mutually exclusive with include_deleted Returns: A dict containing the result of the post operation.""" uri = self._serviceUrl('mail/export', user=user) properties = {} if begin_date is not None: properties['beginDate'] = begin_date if end_date is not None: properties['endDate'] = end_date if include_deleted is not None: properties['includeDeleted'] = gdata.apps.service._bool2str(include_deleted) if search_query is not None: properties['searchQuery'] = search_query if headers_only is True: properties['packageContent'] = 'HEADER_ONLY' else: properties['packageContent'] = 'FULL_MESSAGE' return self._PostProperties(uri, properties) def getMailboxExportRequestStatus(self, user, request_id): """Gets the status of an mailbox export request Args: user: string, the user whose mailbox were requested request_id: string, the request_id Returns: A dict containing the result of the get operation.""" uri = self._serviceUrl('mail/export', user=user+'/'+request_id) try: return self._GetProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0]) def getAllMailboxExportRequestsStatus(self): """Gets the status of all mailbox export requests for the domain Args: None Returns: list results of the POST operation """ uri = self._serviceUrl('mail/export') return self._GetPropertiesList(uri) def deleteMailboxExportRequest(self, user, request_id): """Deletes the request for mailbox export Args: user: string, the user whose mailbox were requested request_id: string, the request_id Returns: Nothing """ uri = self._serviceUrl('mail/export', user=user+'/'+request_id) try: return self._DeleteProperties(uri) except gdata.service.RequestError, e: raise AppsForYourDomainException(e.args[0])
Python
#!/usr/bin/python # # Copyright (C) 2007 - 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cgi import math import random import re import time import types import urllib import atom.http_interface import atom.token_store import atom.url import gdata.oauth as oauth import gdata.oauth.rsa as oauth_rsa import gdata.tlslite.utils.keyfactory as keyfactory import gdata.tlslite.utils.cryptomath as cryptomath import gdata.gauth __author__ = 'api.jscudder (Jeff Scudder)' PROGRAMMATIC_AUTH_LABEL = 'GoogleLogin auth=' AUTHSUB_AUTH_LABEL = 'AuthSub token=' """This module provides functions and objects used with Google authentication. Details on Google authorization mechanisms used with the Google Data APIs can be found here: http://code.google.com/apis/gdata/auth.html http://code.google.com/apis/accounts/ The essential functions are the following. Related to ClientLogin: generate_client_login_request_body: Constructs the body of an HTTP request to obtain a ClientLogin token for a specific service. extract_client_login_token: Creates a ClientLoginToken with the token from a success response to a ClientLogin request. get_captcha_challenge: If the server responded to the ClientLogin request with a CAPTCHA challenge, this method extracts the CAPTCHA URL and identifying CAPTCHA token. Related to AuthSub: generate_auth_sub_url: Constructs a full URL for a AuthSub request. The user's browser must be sent to this Google Accounts URL and redirected back to the app to obtain the AuthSub token. extract_auth_sub_token_from_url: Once the user's browser has been redirected back to the web app, use this function to create an AuthSubToken with the correct authorization token and scope. token_from_http_body: Extracts the AuthSubToken value string from the server's response to an AuthSub session token upgrade request. """ def generate_client_login_request_body(email, password, service, source, account_type='HOSTED_OR_GOOGLE', captcha_token=None, captcha_response=None): """Creates the body of the autentication request See http://code.google.com/apis/accounts/AuthForInstalledApps.html#Request for more details. Args: email: str password: str service: str source: str account_type: str (optional) Defaul is 'HOSTED_OR_GOOGLE', other valid values are 'GOOGLE' and 'HOSTED' captcha_token: str (optional) captcha_response: str (optional) Returns: The HTTP body to send in a request for a client login token. """ return gdata.gauth.generate_client_login_request_body(email, password, service, source, account_type, captcha_token, captcha_response) GenerateClientLoginRequestBody = generate_client_login_request_body def GenerateClientLoginAuthToken(http_body): """Returns the token value to use in Authorization headers. Reads the token from the server's response to a Client Login request and creates header value to use in requests. Args: http_body: str The body of the server's HTTP response to a Client Login request Returns: The value half of an Authorization header. """ token = get_client_login_token(http_body) if token: return 'GoogleLogin auth=%s' % token return None def get_client_login_token(http_body): """Returns the token value for a ClientLoginToken. Reads the token from the server's response to a Client Login request and creates the token value string to use in requests. Args: http_body: str The body of the server's HTTP response to a Client Login request Returns: The token value string for a ClientLoginToken. """ return gdata.gauth.get_client_login_token_string(http_body) def extract_client_login_token(http_body, scopes): """Parses the server's response and returns a ClientLoginToken. Args: http_body: str The body of the server's HTTP response to a Client Login request. It is assumed that the login request was successful. scopes: list containing atom.url.Urls or strs. The scopes list contains all of the partial URLs under which the client login token is valid. For example, if scopes contains ['http://example.com/foo'] then the client login token would be valid for http://example.com/foo/bar/baz Returns: A ClientLoginToken which is valid for the specified scopes. """ token_string = get_client_login_token(http_body) token = ClientLoginToken(scopes=scopes) token.set_token_string(token_string) return token def get_captcha_challenge(http_body, captcha_base_url='http://www.google.com/accounts/'): """Returns the URL and token for a CAPTCHA challenge issued by the server. Args: http_body: str The body of the HTTP response from the server which contains the CAPTCHA challenge. captcha_base_url: str This function returns a full URL for viewing the challenge image which is built from the server's response. This base_url is used as the beginning of the URL because the server only provides the end of the URL. For example the server provides 'Captcha?ctoken=Hi...N' and the URL for the image is 'http://www.google.com/accounts/Captcha?ctoken=Hi...N' Returns: A dictionary containing the information needed to repond to the CAPTCHA challenge, the image URL and the ID token of the challenge. The dictionary is in the form: {'token': string identifying the CAPTCHA image, 'url': string containing the URL of the image} Returns None if there was no CAPTCHA challenge in the response. """ return gdata.gauth.get_captcha_challenge(http_body, captcha_base_url) GetCaptchaChallenge = get_captcha_challenge def GenerateOAuthRequestTokenUrl( oauth_input_params, scopes, request_token_url='https://www.google.com/accounts/OAuthGetRequestToken', extra_parameters=None): """Generate a URL at which a request for OAuth request token is to be sent. Args: oauth_input_params: OAuthInputParams OAuth input parameters. scopes: list of strings The URLs of the services to be accessed. request_token_url: string The beginning of the request token URL. This is normally 'https://www.google.com/accounts/OAuthGetRequestToken' or '/accounts/OAuthGetRequestToken' extra_parameters: dict (optional) key-value pairs as any additional parameters to be included in the URL and signature while making a request for fetching an OAuth request token. All the OAuth parameters are added by default. But if provided through this argument, any default parameters will be overwritten. For e.g. a default parameter oauth_version 1.0 can be overwritten if extra_parameters = {'oauth_version': '2.0'} Returns: atom.url.Url OAuth request token URL. """ scopes_string = ' '.join([str(scope) for scope in scopes]) parameters = {'scope': scopes_string} if extra_parameters: parameters.update(extra_parameters) oauth_request = oauth.OAuthRequest.from_consumer_and_token( oauth_input_params.GetConsumer(), http_url=request_token_url, parameters=parameters) oauth_request.sign_request(oauth_input_params.GetSignatureMethod(), oauth_input_params.GetConsumer(), None) return atom.url.parse_url(oauth_request.to_url()) def GenerateOAuthAuthorizationUrl( request_token, authorization_url='https://www.google.com/accounts/OAuthAuthorizeToken', callback_url=None, extra_params=None, include_scopes_in_callback=False, scopes_param_prefix='oauth_token_scope'): """Generates URL at which user will login to authorize the request token. Args: request_token: gdata.auth.OAuthToken OAuth request token. authorization_url: string The beginning of the authorization URL. This is normally 'https://www.google.com/accounts/OAuthAuthorizeToken' or '/accounts/OAuthAuthorizeToken' callback_url: string (optional) The URL user will be sent to after logging in and granting access. extra_params: dict (optional) Additional parameters to be sent. include_scopes_in_callback: Boolean (default=False) if set to True, and if 'callback_url' is present, the 'callback_url' will be modified to include the scope(s) from the request token as a URL parameter. The key for the 'callback' URL's scope parameter will be OAUTH_SCOPE_URL_PARAM_NAME. The benefit of including the scope URL as a parameter to the 'callback' URL, is that the page which receives the OAuth token will be able to tell which URLs the token grants access to. scopes_param_prefix: string (default='oauth_token_scope') The URL parameter key which maps to the list of valid scopes for the token. This URL parameter will be included in the callback URL along with the scopes of the token as value if include_scopes_in_callback=True. Returns: atom.url.Url OAuth authorization URL. """ scopes = request_token.scopes if isinstance(scopes, list): scopes = ' '.join(scopes) if include_scopes_in_callback and callback_url: if callback_url.find('?') > -1: callback_url += '&' else: callback_url += '?' callback_url += urllib.urlencode({scopes_param_prefix:scopes}) oauth_token = oauth.OAuthToken(request_token.key, request_token.secret) oauth_request = oauth.OAuthRequest.from_token_and_callback( token=oauth_token, callback=callback_url, http_url=authorization_url, parameters=extra_params) return atom.url.parse_url(oauth_request.to_url()) def GenerateOAuthAccessTokenUrl( authorized_request_token, oauth_input_params, access_token_url='https://www.google.com/accounts/OAuthGetAccessToken', oauth_version='1.0', oauth_verifier=None): """Generates URL at which user will login to authorize the request token. Args: authorized_request_token: gdata.auth.OAuthToken OAuth authorized request token. oauth_input_params: OAuthInputParams OAuth input parameters. access_token_url: string The beginning of the authorization URL. This is normally 'https://www.google.com/accounts/OAuthGetAccessToken' or '/accounts/OAuthGetAccessToken' oauth_version: str (default='1.0') oauth_version parameter. oauth_verifier: str (optional) If present, it is assumed that the client will use the OAuth v1.0a protocol which includes passing the oauth_verifier (as returned by the SP) in the access token step. Returns: atom.url.Url OAuth access token URL. """ oauth_token = oauth.OAuthToken(authorized_request_token.key, authorized_request_token.secret) parameters = {'oauth_version': oauth_version} if oauth_verifier is not None: parameters['oauth_verifier'] = oauth_verifier oauth_request = oauth.OAuthRequest.from_consumer_and_token( oauth_input_params.GetConsumer(), token=oauth_token, http_url=access_token_url, parameters=parameters) oauth_request.sign_request(oauth_input_params.GetSignatureMethod(), oauth_input_params.GetConsumer(), oauth_token) return atom.url.parse_url(oauth_request.to_url()) def GenerateAuthSubUrl(next, scope, secure=False, session=True, request_url='https://www.google.com/accounts/AuthSubRequest', domain='default'): """Generate a URL at which the user will login and be redirected back. Users enter their credentials on a Google login page and a token is sent to the URL specified in next. See documentation for AuthSub login at: http://code.google.com/apis/accounts/AuthForWebApps.html Args: request_url: str The beginning of the request URL. This is normally 'http://www.google.com/accounts/AuthSubRequest' or '/accounts/AuthSubRequest' next: string The URL user will be sent to after logging in. scope: string The URL of the service to be accessed. secure: boolean (optional) Determines whether or not the issued token is a secure token. session: boolean (optional) Determines whether or not the issued token can be upgraded to a session token. domain: str (optional) The Google Apps domain for this account. If this is not a Google Apps account, use 'default' which is the default value. """ # Translate True/False values for parameters into numeric values acceoted # by the AuthSub service. if secure: secure = 1 else: secure = 0 if session: session = 1 else: session = 0 request_params = urllib.urlencode({'next': next, 'scope': scope, 'secure': secure, 'session': session, 'hd': domain}) if request_url.find('?') == -1: return '%s?%s' % (request_url, request_params) else: # The request URL already contained url parameters so we should add # the parameters using the & seperator return '%s&%s' % (request_url, request_params) def generate_auth_sub_url(next, scopes, secure=False, session=True, request_url='https://www.google.com/accounts/AuthSubRequest', domain='default', scopes_param_prefix='auth_sub_scopes'): """Constructs a URL string for requesting a multiscope AuthSub token. The generated token will contain a URL parameter to pass along the requested scopes to the next URL. When the Google Accounts page redirects the broswser to the 'next' URL, it appends the single use AuthSub token value to the URL as a URL parameter with the key 'token'. However, the information about which scopes were requested is not included by Google Accounts. This method adds the scopes to the next URL before making the request so that the redirect will be sent to a page, and both the token value and the list of scopes can be extracted from the request URL. Args: next: atom.url.URL or string The URL user will be sent to after authorizing this web application to access their data. scopes: list containint strings The URLs of the services to be accessed. secure: boolean (optional) Determines whether or not the issued token is a secure token. session: boolean (optional) Determines whether or not the issued token can be upgraded to a session token. request_url: atom.url.Url or str The beginning of the request URL. This is normally 'http://www.google.com/accounts/AuthSubRequest' or '/accounts/AuthSubRequest' domain: The domain which the account is part of. This is used for Google Apps accounts, the default value is 'default' which means that the requested account is a Google Account (@gmail.com for example) scopes_param_prefix: str (optional) The requested scopes are added as a URL parameter to the next URL so that the page at the 'next' URL can extract the token value and the valid scopes from the URL. The key for the URL parameter defaults to 'auth_sub_scopes' Returns: An atom.url.Url which the user's browser should be directed to in order to authorize this application to access their information. """ if isinstance(next, (str, unicode)): next = atom.url.parse_url(next) scopes_string = ' '.join([str(scope) for scope in scopes]) next.params[scopes_param_prefix] = scopes_string if isinstance(request_url, (str, unicode)): request_url = atom.url.parse_url(request_url) request_url.params['next'] = str(next) request_url.params['scope'] = scopes_string if session: request_url.params['session'] = 1 else: request_url.params['session'] = 0 if secure: request_url.params['secure'] = 1 else: request_url.params['secure'] = 0 request_url.params['hd'] = domain return request_url def AuthSubTokenFromUrl(url): """Extracts the AuthSub token from the URL. Used after the AuthSub redirect has sent the user to the 'next' page and appended the token to the URL. This function returns the value to be used in the Authorization header. Args: url: str The URL of the current page which contains the AuthSub token as a URL parameter. """ token = TokenFromUrl(url) if token: return 'AuthSub token=%s' % token return None def TokenFromUrl(url): """Extracts the AuthSub token from the URL. Returns the raw token value. Args: url: str The URL or the query portion of the URL string (after the ?) of the current page which contains the AuthSub token as a URL parameter. """ if url.find('?') > -1: query_params = url.split('?')[1] else: query_params = url for pair in query_params.split('&'): if pair.startswith('token='): return pair[6:] return None def extract_auth_sub_token_from_url(url, scopes_param_prefix='auth_sub_scopes', rsa_key=None): """Creates an AuthSubToken and sets the token value and scopes from the URL. After the Google Accounts AuthSub pages redirect the user's broswer back to the web application (using the 'next' URL from the request) the web app must extract the token from the current page's URL. The token is provided as a URL parameter named 'token' and if generate_auth_sub_url was used to create the request, the token's valid scopes are included in a URL parameter whose name is specified in scopes_param_prefix. Args: url: atom.url.Url or str representing the current URL. The token value and valid scopes should be included as URL parameters. scopes_param_prefix: str (optional) The URL parameter key which maps to the list of valid scopes for the token. Returns: An AuthSubToken with the token value from the URL and set to be valid for the scopes passed in on the URL. If no scopes were included in the URL, the AuthSubToken defaults to being valid for no scopes. If there was no 'token' parameter in the URL, this function returns None. """ if isinstance(url, (str, unicode)): url = atom.url.parse_url(url) if 'token' not in url.params: return None scopes = [] if scopes_param_prefix in url.params: scopes = url.params[scopes_param_prefix].split(' ') token_value = url.params['token'] if rsa_key: token = SecureAuthSubToken(rsa_key, scopes=scopes) else: token = AuthSubToken(scopes=scopes) token.set_token_string(token_value) return token def AuthSubTokenFromHttpBody(http_body): """Extracts the AuthSub token from an HTTP body string. Used to find the new session token after making a request to upgrade a single use AuthSub token. Args: http_body: str The repsonse from the server which contains the AuthSub key. For example, this function would find the new session token from the server's response to an upgrade token request. Returns: The header value to use for Authorization which contains the AuthSub token. """ token_value = token_from_http_body(http_body) if token_value: return '%s%s' % (AUTHSUB_AUTH_LABEL, token_value) return None def token_from_http_body(http_body): """Extracts the AuthSub token from an HTTP body string. Used to find the new session token after making a request to upgrade a single use AuthSub token. Args: http_body: str The repsonse from the server which contains the AuthSub key. For example, this function would find the new session token from the server's response to an upgrade token request. Returns: The raw token value to use in an AuthSubToken object. """ for response_line in http_body.splitlines(): if response_line.startswith('Token='): # Strip off Token= and return the token value string. return response_line[6:] return None TokenFromHttpBody = token_from_http_body def OAuthTokenFromUrl(url, scopes_param_prefix='oauth_token_scope'): """Creates an OAuthToken and sets token key and scopes (if present) from URL. After the Google Accounts OAuth pages redirect the user's broswer back to the web application (using the 'callback' URL from the request) the web app can extract the token from the current page's URL. The token is same as the request token, but it is either authorized (if user grants access) or unauthorized (if user denies access). The token is provided as a URL parameter named 'oauth_token' and if it was chosen to use GenerateOAuthAuthorizationUrl with include_scopes_in_param=True, the token's valid scopes are included in a URL parameter whose name is specified in scopes_param_prefix. Args: url: atom.url.Url or str representing the current URL. The token value and valid scopes should be included as URL parameters. scopes_param_prefix: str (optional) The URL parameter key which maps to the list of valid scopes for the token. Returns: An OAuthToken with the token key from the URL and set to be valid for the scopes passed in on the URL. If no scopes were included in the URL, the OAuthToken defaults to being valid for no scopes. If there was no 'oauth_token' parameter in the URL, this function returns None. """ if isinstance(url, (str, unicode)): url = atom.url.parse_url(url) if 'oauth_token' not in url.params: return None scopes = [] if scopes_param_prefix in url.params: scopes = url.params[scopes_param_prefix].split(' ') token_key = url.params['oauth_token'] token = OAuthToken(key=token_key, scopes=scopes) return token def OAuthTokenFromHttpBody(http_body): """Parses the HTTP response body and returns an OAuth token. The returned OAuth token will just have key and secret parameters set. It won't have any knowledge about the scopes or oauth_input_params. It is your responsibility to make it aware of the remaining parameters. Returns: OAuthToken OAuth token. """ token = oauth.OAuthToken.from_string(http_body) oauth_token = OAuthToken(key=token.key, secret=token.secret) return oauth_token class OAuthSignatureMethod(object): """Holds valid OAuth signature methods. RSA_SHA1: Class to build signature according to RSA-SHA1 algorithm. HMAC_SHA1: Class to build signature according to HMAC-SHA1 algorithm. """ HMAC_SHA1 = oauth.OAuthSignatureMethod_HMAC_SHA1 class RSA_SHA1(oauth_rsa.OAuthSignatureMethod_RSA_SHA1): """Provides implementation for abstract methods to return RSA certs.""" def __init__(self, private_key, public_cert): self.private_key = private_key self.public_cert = public_cert def _fetch_public_cert(self, unused_oauth_request): return self.public_cert def _fetch_private_cert(self, unused_oauth_request): return self.private_key class OAuthInputParams(object): """Stores OAuth input parameters. This class is a store for OAuth input parameters viz. consumer key and secret, signature method and RSA key. """ def __init__(self, signature_method, consumer_key, consumer_secret=None, rsa_key=None, requestor_id=None): """Initializes object with parameters required for using OAuth mechanism. NOTE: Though consumer_secret and rsa_key are optional, either of the two is required depending on the value of the signature_method. Args: signature_method: class which provides implementation for strategy class oauth.oauth.OAuthSignatureMethod. Signature method to be used for signing each request. Valid implementations are provided as the constants defined by gdata.auth.OAuthSignatureMethod. Currently they are gdata.auth.OAuthSignatureMethod.RSA_SHA1 and gdata.auth.OAuthSignatureMethod.HMAC_SHA1. Instead of passing in the strategy class, you may pass in a string for 'RSA_SHA1' or 'HMAC_SHA1'. If you plan to use OAuth on App Engine (or another WSGI environment) I recommend specifying signature method using a string (the only options are 'RSA_SHA1' and 'HMAC_SHA1'). In these environments there are sometimes issues with pickling an object in which a member references a class or function. Storing a string to refer to the signature method mitigates complications when pickling. consumer_key: string Domain identifying third_party web application. consumer_secret: string (optional) Secret generated during registration. Required only for HMAC_SHA1 signature method. rsa_key: string (optional) Private key required for RSA_SHA1 signature method. requestor_id: string (optional) User email adress to make requests on their behalf. This parameter should only be set when performing 2 legged OAuth requests. """ if (signature_method == OAuthSignatureMethod.RSA_SHA1 or signature_method == 'RSA_SHA1'): self.__signature_strategy = 'RSA_SHA1' elif (signature_method == OAuthSignatureMethod.HMAC_SHA1 or signature_method == 'HMAC_SHA1'): self.__signature_strategy = 'HMAC_SHA1' else: self.__signature_strategy = signature_method self.rsa_key = rsa_key self._consumer = oauth.OAuthConsumer(consumer_key, consumer_secret) self.requestor_id = requestor_id def __get_signature_method(self): if self.__signature_strategy == 'RSA_SHA1': return OAuthSignatureMethod.RSA_SHA1(self.rsa_key, None) elif self.__signature_strategy == 'HMAC_SHA1': return OAuthSignatureMethod.HMAC_SHA1() else: return self.__signature_strategy() def __set_signature_method(self, signature_method): if (signature_method == OAuthSignatureMethod.RSA_SHA1 or signature_method == 'RSA_SHA1'): self.__signature_strategy = 'RSA_SHA1' elif (signature_method == OAuthSignatureMethod.HMAC_SHA1 or signature_method == 'HMAC_SHA1'): self.__signature_strategy = 'HMAC_SHA1' else: self.__signature_strategy = signature_method _signature_method = property(__get_signature_method, __set_signature_method, doc="""Returns object capable of signing the request using RSA of HMAC. Replaces the _signature_method member to avoid pickle errors.""") def GetSignatureMethod(self): """Gets the OAuth signature method. Returns: object of supertype <oauth.oauth.OAuthSignatureMethod> """ return self._signature_method def GetConsumer(self): """Gets the OAuth consumer. Returns: object of type <oauth.oauth.Consumer> """ return self._consumer class ClientLoginToken(atom.http_interface.GenericToken): """Stores the Authorization header in auth_header and adds to requests. This token will add it's Authorization header to an HTTP request as it is made. Ths token class is simple but some Token classes must calculate portions of the Authorization header based on the request being made, which is why the token is responsible for making requests via an http_client parameter. Args: auth_header: str The value for the Authorization header. scopes: list of str or atom.url.Url specifying the beginnings of URLs for which this token can be used. For example, if scopes contains 'http://example.com/foo', then this token can be used for a request to 'http://example.com/foo/bar' but it cannot be used for a request to 'http://example.com/baz' """ def __init__(self, auth_header=None, scopes=None): self.auth_header = auth_header self.scopes = scopes or [] def __str__(self): return self.auth_header def perform_request(self, http_client, operation, url, data=None, headers=None): """Sets the Authorization header and makes the HTTP request.""" if headers is None: headers = {'Authorization':self.auth_header} else: headers['Authorization'] = self.auth_header return http_client.request(operation, url, data=data, headers=headers) def get_token_string(self): """Removes PROGRAMMATIC_AUTH_LABEL to give just the token value.""" return self.auth_header[len(PROGRAMMATIC_AUTH_LABEL):] def set_token_string(self, token_string): self.auth_header = '%s%s' % (PROGRAMMATIC_AUTH_LABEL, token_string) def valid_for_scope(self, url): """Tells the caller if the token authorizes access to the desired URL. """ if isinstance(url, (str, unicode)): url = atom.url.parse_url(url) for scope in self.scopes: if scope == atom.token_store.SCOPE_ALL: return True if isinstance(scope, (str, unicode)): scope = atom.url.parse_url(scope) if scope == url: return True # Check the host and the path, but ignore the port and protocol. elif scope.host == url.host and not scope.path: return True elif scope.host == url.host and scope.path and not url.path: continue elif scope.host == url.host and url.path.startswith(scope.path): return True return False class AuthSubToken(ClientLoginToken): def get_token_string(self): """Removes AUTHSUB_AUTH_LABEL to give just the token value.""" return self.auth_header[len(AUTHSUB_AUTH_LABEL):] def set_token_string(self, token_string): self.auth_header = '%s%s' % (AUTHSUB_AUTH_LABEL, token_string) class OAuthToken(atom.http_interface.GenericToken): """Stores the token key, token secret and scopes for which token is valid. This token adds the authorization header to each request made. It re-calculates authorization header for every request since the OAuth signature to be added to the authorization header is dependent on the request parameters. Attributes: key: str The value for the OAuth token i.e. token key. secret: str The value for the OAuth token secret. scopes: list of str or atom.url.Url specifying the beginnings of URLs for which this token can be used. For example, if scopes contains 'http://example.com/foo', then this token can be used for a request to 'http://example.com/foo/bar' but it cannot be used for a request to 'http://example.com/baz' oauth_input_params: OAuthInputParams OAuth input parameters. """ def __init__(self, key=None, secret=None, scopes=None, oauth_input_params=None): self.key = key self.secret = secret self.scopes = scopes or [] self.oauth_input_params = oauth_input_params def __str__(self): return self.get_token_string() def get_token_string(self): """Returns the token string. The token string returned is of format oauth_token=[0]&oauth_token_secret=[1], where [0] and [1] are some strings. Returns: A token string of format oauth_token=[0]&oauth_token_secret=[1], where [0] and [1] are some strings. If self.secret is absent, it just returns oauth_token=[0]. If self.key is absent, it just returns oauth_token_secret=[1]. If both are absent, it returns None. """ if self.key and self.secret: return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret}) elif self.key: return 'oauth_token=%s' % self.key elif self.secret: return 'oauth_token_secret=%s' % self.secret else: return None def set_token_string(self, token_string): """Sets the token key and secret from the token string. Args: token_string: str Token string of form oauth_token=[0]&oauth_token_secret=[1]. If oauth_token is not present, self.key will be None. If oauth_token_secret is not present, self.secret will be None. """ token_params = cgi.parse_qs(token_string, keep_blank_values=False) if 'oauth_token' in token_params: self.key = token_params['oauth_token'][0] if 'oauth_token_secret' in token_params: self.secret = token_params['oauth_token_secret'][0] def GetAuthHeader(self, http_method, http_url, realm=''): """Get the authentication header. Args: http_method: string HTTP method i.e. operation e.g. GET, POST, PUT, etc. http_url: string or atom.url.Url HTTP URL to which request is made. realm: string (default='') realm parameter to be included in the authorization header. Returns: dict Header to be sent with every subsequent request after authentication. """ if isinstance(http_url, types.StringTypes): http_url = atom.url.parse_url(http_url) header = None token = None if self.key or self.secret: token = oauth.OAuthToken(self.key, self.secret) oauth_request = oauth.OAuthRequest.from_consumer_and_token( self.oauth_input_params.GetConsumer(), token=token, http_url=str(http_url), http_method=http_method, parameters=http_url.params) oauth_request.sign_request(self.oauth_input_params.GetSignatureMethod(), self.oauth_input_params.GetConsumer(), token) header = oauth_request.to_header(realm=realm) header['Authorization'] = header['Authorization'].replace('+', '%2B') return header def perform_request(self, http_client, operation, url, data=None, headers=None): """Sets the Authorization header and makes the HTTP request.""" if not headers: headers = {} if self.oauth_input_params.requestor_id: url.params['xoauth_requestor_id'] = self.oauth_input_params.requestor_id headers.update(self.GetAuthHeader(operation, url)) return http_client.request(operation, url, data=data, headers=headers) def valid_for_scope(self, url): if isinstance(url, (str, unicode)): url = atom.url.parse_url(url) for scope in self.scopes: if scope == atom.token_store.SCOPE_ALL: return True if isinstance(scope, (str, unicode)): scope = atom.url.parse_url(scope) if scope == url: return True # Check the host and the path, but ignore the port and protocol. elif scope.host == url.host and not scope.path: return True elif scope.host == url.host and scope.path and not url.path: continue elif scope.host == url.host and url.path.startswith(scope.path): return True return False class SecureAuthSubToken(AuthSubToken): """Stores the rsa private key, token, and scopes for the secure AuthSub token. This token adds the authorization header to each request made. It re-calculates authorization header for every request since the secure AuthSub signature to be added to the authorization header is dependent on the request parameters. Attributes: rsa_key: string The RSA private key in PEM format that the token will use to sign requests token_string: string (optional) The value for the AuthSub token. scopes: list of str or atom.url.Url specifying the beginnings of URLs for which this token can be used. For example, if scopes contains 'http://example.com/foo', then this token can be used for a request to 'http://example.com/foo/bar' but it cannot be used for a request to 'http://example.com/baz' """ def __init__(self, rsa_key, token_string=None, scopes=None): self.rsa_key = keyfactory.parsePEMKey(rsa_key) self.token_string = token_string or '' self.scopes = scopes or [] def __str__(self): return self.get_token_string() def get_token_string(self): return str(self.token_string) def set_token_string(self, token_string): self.token_string = token_string def GetAuthHeader(self, http_method, http_url): """Generates the Authorization header. The form of the secure AuthSub Authorization header is Authorization: AuthSub token="token" sigalg="sigalg" data="data" sig="sig" and data represents a string in the form data = http_method http_url timestamp nonce Args: http_method: string HTTP method i.e. operation e.g. GET, POST, PUT, etc. http_url: string or atom.url.Url HTTP URL to which request is made. Returns: dict Header to be sent with every subsequent request after authentication. """ timestamp = int(math.floor(time.time())) nonce = '%lu' % random.randrange(1, 2**64) data = '%s %s %d %s' % (http_method, str(http_url), timestamp, nonce) sig = cryptomath.bytesToBase64(self.rsa_key.hashAndSign(data)) header = {'Authorization': '%s"%s" data="%s" sig="%s" sigalg="rsa-sha1"' % (AUTHSUB_AUTH_LABEL, self.token_string, data, sig)} return header def perform_request(self, http_client, operation, url, data=None, headers=None): """Sets the Authorization header and makes the HTTP request.""" if not headers: headers = {} headers.update(self.GetAuthHeader(operation, url)) return http_client.request(operation, url, data=data, headers=headers)
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains the data classes of the Google Access Control List (ACL) Extension""" __author__ = 'j.s@google.com (Jeff Scudder)' import atom.core import atom.data import gdata.data import gdata.opensearch.data GACL_TEMPLATE = '{http://schemas.google.com/acl/2007}%s' class AclRole(atom.core.XmlElement): """Describes the role of an entry in an access control list.""" _qname = GACL_TEMPLATE % 'role' value = 'value' class AclScope(atom.core.XmlElement): """Describes the scope of an entry in an access control list.""" _qname = GACL_TEMPLATE % 'scope' type = 'type' value = 'value' class AclWithKey(atom.core.XmlElement): """Describes a key that can be used to access a document.""" _qname = GACL_TEMPLATE % 'withKey' key = 'key' role = AclRole class AclEntry(gdata.data.GDEntry): """Describes an entry in a feed of an access control list (ACL).""" scope = AclScope role = AclRole with_key = AclWithKey class AclFeed(gdata.data.GDFeed): """Describes a feed of an access control list (ACL).""" entry = [AclEntry]
Python
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """SitesClient extends gdata.client.GDClient to streamline Sites API calls.""" __author__ = 'e.bidelman (Eric Bidelman)' import atom.data import gdata.client import gdata.sites.data import gdata.gauth # Feed URI templates CONTENT_FEED_TEMPLATE = '/feeds/content/%s/%s/' REVISION_FEED_TEMPLATE = '/feeds/revision/%s/%s/' ACTIVITY_FEED_TEMPLATE = '/feeds/activity/%s/%s/' SITE_FEED_TEMPLATE = '/feeds/site/%s/' ACL_FEED_TEMPLATE = '/feeds/acl/site/%s/%s/' class SitesClient(gdata.client.GDClient): """Client extension for the Google Sites API service.""" host = 'sites.google.com' # default server for the API domain = 'site' # default site domain name api_version = '1.1' # default major version for the service. auth_service = 'jotspot' auth_scopes = gdata.gauth.AUTH_SCOPES['jotspot'] ssl = True def __init__(self, site=None, domain=None, auth_token=None, **kwargs): """Constructs a new client for the Sites API. Args: site: string (optional) Name (webspace) of the Google Site domain: string (optional) Domain of the (Google Apps hosted) Site. If no domain is given, the Site is assumed to be a consumer Google Site, in which case the value 'site' is used. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: The other parameters to pass to gdata.client.GDClient constructor. """ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs) self.site = site if domain is not None: self.domain = domain def __make_kind_category(self, label): if label is None: return None return atom.data.Category( scheme=gdata.sites.data.SITES_KIND_SCHEME, term='%s#%s' % (gdata.sites.data.SITES_NAMESPACE, label), label=label) __MakeKindCategory = __make_kind_category def __upload(self, entry, media_source, auth_token=None, **kwargs): """Uploads an attachment file to the Sites API. Args: entry: gdata.sites.data.ContentEntry The Atom XML to include. media_source: gdata.data.MediaSource The file payload to be uploaded. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to gdata.client.post(). Returns: The created entry. """ uri = self.make_content_feed_uri() return self.post(entry, uri, media_source=media_source, auth_token=auth_token, **kwargs) def _get_file_content(self, uri): """Fetches the file content from the specified URI. Args: uri: string The full URL to fetch the file contents from. Returns: The binary file content. Raises: gdata.client.RequestError: on error response from server. """ server_response = self.request('GET', uri) if server_response.status != 200: raise gdata.client.RequestError, {'status': server_response.status, 'reason': server_response.reason, 'body': server_response.read()} return server_response.read() _GetFileContent = _get_file_content def make_content_feed_uri(self): return CONTENT_FEED_TEMPLATE % (self.domain, self.site) MakeContentFeedUri = make_content_feed_uri def make_revision_feed_uri(self): return REVISION_FEED_TEMPLATE % (self.domain, self.site) MakeRevisionFeedUri = make_revision_feed_uri def make_activity_feed_uri(self): return ACTIVITY_FEED_TEMPLATE % (self.domain, self.site) MakeActivityFeedUri = make_activity_feed_uri def make_site_feed_uri(self, site_name=None): if site_name is not None: return (SITE_FEED_TEMPLATE % self.domain) + site_name else: return SITE_FEED_TEMPLATE % self.domain MakeSiteFeedUri = make_site_feed_uri def make_acl_feed_uri(self): return ACL_FEED_TEMPLATE % (self.domain, self.site) MakeAclFeedUri = make_acl_feed_uri def get_content_feed(self, uri=None, auth_token=None, **kwargs): """Retrieves the content feed containing the current state of site. Args: uri: string (optional) A full URI to query the Content feed with. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.get_feed(). Returns: gdata.sites.data.ContentFeed """ if uri is None: uri = self.make_content_feed_uri() return self.get_feed(uri, desired_class=gdata.sites.data.ContentFeed, auth_token=auth_token, **kwargs) GetContentFeed = get_content_feed def get_revision_feed(self, entry_or_uri_or_id, auth_token=None, **kwargs): """Retrieves the revision feed containing the revision history for a node. Args: entry_or_uri_or_id: string or gdata.sites.data.ContentEntry A full URI, content entry node ID, or a content entry object of the entry to retrieve revision information for. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.get_feed(). Returns: gdata.sites.data.RevisionFeed """ uri = self.make_revision_feed_uri() if isinstance(entry_or_uri_or_id, gdata.sites.data.ContentEntry): uri = entry_or_uri_or_id.FindRevisionLink() elif entry_or_uri_or_id.find('/') == -1: uri += entry_or_uri_or_id else: uri = entry_or_uri_or_id return self.get_feed(uri, desired_class=gdata.sites.data.RevisionFeed, auth_token=auth_token, **kwargs) GetRevisionFeed = get_revision_feed def get_activity_feed(self, uri=None, auth_token=None, **kwargs): """Retrieves the activity feed containing recent Site activity. Args: uri: string (optional) A full URI to query the Activity feed. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.get_feed(). Returns: gdata.sites.data.ActivityFeed """ if uri is None: uri = self.make_activity_feed_uri() return self.get_feed(uri, desired_class=gdata.sites.data.ActivityFeed, auth_token=auth_token, **kwargs) GetActivityFeed = get_activity_feed def get_site_feed(self, uri=None, auth_token=None, **kwargs): """Retrieves the site feed containing a list of sites a user has access to. Args: uri: string (optional) A full URI to query the site feed. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.get_feed(). Returns: gdata.sites.data.SiteFeed """ if uri is None: uri = self.make_site_feed_uri() return self.get_feed(uri, desired_class=gdata.sites.data.SiteFeed, auth_token=auth_token, **kwargs) GetSiteFeed = get_site_feed def get_acl_feed(self, uri=None, auth_token=None, **kwargs): """Retrieves the acl feed containing a site's sharing permissions. Args: uri: string (optional) A full URI to query the acl feed. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.get_feed(). Returns: gdata.sites.data.AclFeed """ if uri is None: uri = self.make_acl_feed_uri() return self.get_feed(uri, desired_class=gdata.sites.data.AclFeed, auth_token=auth_token, **kwargs) GetAclFeed = get_acl_feed def create_site(self, title, description=None, source_site=None, theme=None, uri=None, auth_token=None, **kwargs): """Creates a new Google Site. Note: This feature is only available to Google Apps domains. Args: title: string Title for the site. description: string (optional) A description/summary for the site. source_site: string (optional) The site feed URI of the site to copy. This parameter should only be specified when copying a site. theme: string (optional) The name of the theme to create the site with. uri: string (optional) A full site feed URI to override where the site is created/copied. By default, the site will be created under the currently set domain (e.g. self.domain). auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to gdata.client.post(). Returns: gdata.sites.data.SiteEntry of the created site. """ new_entry = gdata.sites.data.SiteEntry(title=atom.data.Title(text=title)) if description is not None: new_entry.summary = gdata.sites.data.Summary(text=description) # Add the source link if we're making a copy of a site. if source_site is not None: source_link = atom.data.Link(rel=gdata.sites.data.SITES_SOURCE_LINK_REL, type='application/atom+xml', href=source_site) new_entry.link.append(source_link) if theme is not None: new_entry.theme = gdata.sites.data.Theme(text=theme) if uri is None: uri = self.make_site_feed_uri() return self.post(new_entry, uri, auth_token=auth_token, **kwargs) CreateSite = create_site def create_page(self, kind, title, html='', page_name=None, parent=None, auth_token=None, **kwargs): """Creates a new page (specified by kind) on a Google Site. Args: kind: string The type of page/item to create. For example, webpage, listpage, comment, announcementspage, filecabinet, etc. The full list of supported kinds can be found in gdata.sites.gdata.SUPPORT_KINDS. title: string Title for the page. html: string (optional) XHTML for the page's content body. page_name: string (optional) The URL page name to set. If not set, the title will be normalized and used as the page's URL path. parent: string or gdata.sites.data.ContentEntry (optional) The parent entry or parent link url to create the page under. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to gdata.client.post(). Returns: gdata.sites.data.ContentEntry of the created page. """ new_entry = gdata.sites.data.ContentEntry( title=atom.data.Title(text=title), kind=kind, content=gdata.sites.data.Content(text=html)) if page_name is not None: new_entry.page_name = gdata.sites.data.PageName(text=page_name) # Add parent link to entry if it should be uploaded as a subpage. if isinstance(parent, gdata.sites.data.ContentEntry): parent_link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent.GetSelfLink().href) new_entry.link.append(parent_link) elif parent is not None: parent_link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent) new_entry.link.append(parent_link) return self.post(new_entry, self.make_content_feed_uri(), auth_token=auth_token, **kwargs) CreatePage = create_page def create_webattachment(self, src, content_type, title, parent, description=None, auth_token=None, **kwargs): """Creates a new webattachment within a filecabinet. Args: src: string The url of the web attachment. content_type: string The MIME type of the web attachment. title: string The title to name the web attachment. parent: string or gdata.sites.data.ContentEntry (optional) The parent entry or url of the filecabinet to create the attachment under. description: string (optional) A summary/description for the attachment. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to gdata.client.post(). Returns: gdata.sites.data.ContentEntry of the created page. """ new_entry = gdata.sites.data.ContentEntry( title=atom.data.Title(text=title), kind='webattachment', content=gdata.sites.data.Content(src=src, type=content_type)) if isinstance(parent, gdata.sites.data.ContentEntry): link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent.GetSelfLink().href) elif parent is not None: link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent) new_entry.link.append(link) # Add file decription if it was specified if description is not None: new_entry.summary = gdata.sites.data.Summary(type='text', text=description) return self.post(new_entry, self.make_content_feed_uri(), auth_token=auth_token, **kwargs) CreateWebAttachment = create_webattachment def upload_attachment(self, file_handle, parent, content_type=None, title=None, description=None, folder_name=None, auth_token=None, **kwargs): """Uploads an attachment to a parent page. Args: file_handle: MediaSource or string A gdata.data.MediaSource object containing the file to be uploaded or the full path name to the file on disk. parent: gdata.sites.data.ContentEntry or string The parent page to upload the file to or the full URI of the entry's self link. content_type: string (optional) The MIME type of the file (e.g 'application/pdf'). This should be provided if file is not a MediaSource object. title: string (optional) The title to name the attachment. If not included, the filepath or media source's filename is used. description: string (optional) A summary/description for the attachment. folder_name: string (optional) The name of an existing folder to upload the attachment to. This only applies when the parent parameter points to a filecabinet entry. auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or OAuthToken which authorizes this client to edit the user's data. kwargs: Other parameters to pass to self.__upload(). Returns: A gdata.sites.data.ContentEntry containing information about the created attachment. """ if isinstance(parent, gdata.sites.data.ContentEntry): link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent.GetSelfLink().href) else: link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL, type='application/atom+xml', href=parent) if not isinstance(file_handle, gdata.data.MediaSource): ms = gdata.data.MediaSource(file_path=file_handle, content_type=content_type) else: ms = file_handle # If no title specified, use the file name if title is None: title = ms.file_name new_entry = gdata.sites.data.ContentEntry(kind='attachment') new_entry.title = atom.data.Title(text=title) new_entry.link.append(link) # Add file decription if it was specified if description is not None: new_entry.summary = gdata.sites.data.Summary(type='text', text=description) # Upload the attachment to a filecabinet folder? if parent.Kind() == 'filecabinet' and folder_name is not None: folder_category = atom.data.Category( scheme=gdata.sites.data.FOLDER_KIND_TERM, term=folder_name) new_entry.category.append(folder_category) return self.__upload(new_entry, ms, auth_token=auth_token, **kwargs) UploadAttachment = upload_attachment def download_attachment(self, uri_or_entry, file_path): """Downloads an attachment file to disk. Args: uri_or_entry: string The full URL to download the file from. file_path: string The full path to save the file to. Raises: gdata.client.RequestError: on error response from server. """ uri = uri_or_entry if isinstance(uri_or_entry, gdata.sites.data.ContentEntry): uri = uri_or_entry.content.src f = open(file_path, 'wb') try: f.write(self._get_file_content(uri)) except gdata.client.RequestError, e: f.close() raise e f.flush() f.close() DownloadAttachment = download_attachment
Python
#!/usr/bin/python # # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data model classes for parsing and generating XML for the Sites Data API.""" __author__ = 'e.bidelman (Eric Bidelman)' import atom.core import atom.data import gdata.acl.data import gdata.data # XML Namespaces used in Google Sites entities. SITES_NAMESPACE = 'http://schemas.google.com/sites/2008' SITES_TEMPLATE = '{http://schemas.google.com/sites/2008}%s' SPREADSHEETS_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006' SPREADSHEETS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s' DC_TERMS_TEMPLATE = '{http://purl.org/dc/terms}%s' THR_TERMS_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s' XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml' XHTML_TEMPLATE = '{http://www.w3.org/1999/xhtml}%s' SITES_PARENT_LINK_REL = SITES_NAMESPACE + '#parent' SITES_REVISION_LINK_REL = SITES_NAMESPACE + '#revision' SITES_SOURCE_LINK_REL = SITES_NAMESPACE + '#source' SITES_KIND_SCHEME = 'http://schemas.google.com/g/2005#kind' ANNOUNCEMENT_KIND_TERM = SITES_NAMESPACE + '#announcement' ANNOUNCEMENT_PAGE_KIND_TERM = SITES_NAMESPACE + '#announcementspage' ATTACHMENT_KIND_TERM = SITES_NAMESPACE + '#attachment' COMMENT_KIND_TERM = SITES_NAMESPACE + '#comment' FILECABINET_KIND_TERM = SITES_NAMESPACE + '#filecabinet' LISTITEM_KIND_TERM = SITES_NAMESPACE + '#listitem' LISTPAGE_KIND_TERM = SITES_NAMESPACE + '#listpage' WEBPAGE_KIND_TERM = SITES_NAMESPACE + '#webpage' WEBATTACHMENT_KIND_TERM = SITES_NAMESPACE + '#webattachment' FOLDER_KIND_TERM = SITES_NAMESPACE + '#folder' TAG_KIND_TERM = SITES_NAMESPACE + '#tag' SUPPORT_KINDS = [ 'announcement', 'announcementspage', 'attachment', 'comment', 'filecabinet', 'listitem', 'listpage', 'webpage', 'webattachment', 'tag' ] class Revision(atom.core.XmlElement): """Google Sites <sites:revision>.""" _qname = SITES_TEMPLATE % 'revision' class PageName(atom.core.XmlElement): """Google Sites <sites:pageName>.""" _qname = SITES_TEMPLATE % 'pageName' class SiteName(atom.core.XmlElement): """Google Sites <sites:siteName>.""" _qname = SITES_TEMPLATE % 'siteName' class Theme(atom.core.XmlElement): """Google Sites <sites:theme>.""" _qname = SITES_TEMPLATE % 'theme' class Deleted(atom.core.XmlElement): """Google Sites <gd:deleted>.""" _qname = gdata.data.GDATA_TEMPLATE % 'deleted' class Publisher(atom.core.XmlElement): """Google Sites <dc:pulisher>.""" _qname = DC_TERMS_TEMPLATE % 'publisher' class Worksheet(atom.core.XmlElement): """Google Sites List Page <gs:worksheet>.""" _qname = SPREADSHEETS_TEMPLATE % 'worksheet' name = 'name' class Header(atom.core.XmlElement): """Google Sites List Page <gs:header>.""" _qname = SPREADSHEETS_TEMPLATE % 'header' row = 'row' class Column(atom.core.XmlElement): """Google Sites List Page <gs:column>.""" _qname = SPREADSHEETS_TEMPLATE % 'column' index = 'index' name = 'name' class Data(atom.core.XmlElement): """Google Sites List Page <gs:data>.""" _qname = SPREADSHEETS_TEMPLATE % 'data' startRow = 'startRow' column = [Column] class Field(atom.core.XmlElement): """Google Sites List Item <gs:field>.""" _qname = SPREADSHEETS_TEMPLATE % 'field' index = 'index' name = 'name' class InReplyTo(atom.core.XmlElement): """Google Sites List Item <thr:in-reply-to>.""" _qname = THR_TERMS_TEMPLATE % 'in-reply-to' href = 'href' ref = 'ref' source = 'source' type = 'type' class Content(atom.data.Content): """Google Sites version of <atom:content> that encapsulates XHTML.""" def __init__(self, html=None, type=None, **kwargs): if type is None and html: type = 'xhtml' super(Content, self).__init__(type=type, **kwargs) if html is not None: self.html = html def _get_html(self): if self.children: return self.children[0] else: return '' def _set_html(self, html): if not html: self.children = [] return if type(html) == str: html = atom.core.parse(html) if not html.namespace: html.namespace = XHTML_NAMESPACE self.children = [html] html = property(_get_html, _set_html) class Summary(atom.data.Summary): """Google Sites version of <atom:summary>.""" def __init__(self, html=None, type=None, text=None, **kwargs): if type is None and html: type = 'xhtml' super(Summary, self).__init__(type=type, text=text, **kwargs) if html is not None: self.html = html def _get_html(self): if self.children: return self.children[0] else: return '' def _set_html(self, html): if not html: self.children = [] return if type(html) == str: html = atom.core.parse(html) if not html.namespace: html.namespace = XHTML_NAMESPACE self.children = [html] html = property(_get_html, _set_html) class BaseSiteEntry(gdata.data.GDEntry): """Google Sites Entry.""" def __init__(self, kind=None, **kwargs): super(BaseSiteEntry, self).__init__(**kwargs) if kind is not None: self.category.append( atom.data.Category(scheme=SITES_KIND_SCHEME, term='%s#%s' % (SITES_NAMESPACE, kind), label=kind)) def __find_category_scheme(self, scheme): for category in self.category: if category.scheme == scheme: return category return None def kind(self): kind = self.__find_category_scheme(SITES_KIND_SCHEME) if kind is not None: return kind.term[len(SITES_NAMESPACE) + 1:] else: return None Kind = kind def get_node_id(self): return self.id.text[self.id.text.rfind('/') + 1:] GetNodeId = get_node_id def find_parent_link(self): return self.find_url(SITES_PARENT_LINK_REL) FindParentLink = find_parent_link def is_deleted(self): return self.deleted is not None IsDeleted = is_deleted class ContentEntry(BaseSiteEntry): """Google Sites Content Entry.""" content = Content deleted = Deleted publisher = Publisher in_reply_to = InReplyTo worksheet = Worksheet header = Header data = Data field = [Field] revision = Revision page_name = PageName feed_link = gdata.data.FeedLink def find_revison_link(self): return self.find_url(SITES_REVISION_LINK_REL) FindRevisionLink = find_revison_link class ContentFeed(gdata.data.GDFeed): """Google Sites Content Feed. The Content feed is a feed containing the current, editable site content. """ entry = [ContentEntry] def __get_entry_type(self, kind): matches = [] for entry in self.entry: if entry.Kind() == kind: matches.append(entry) return matches def get_announcements(self): return self.__get_entry_type('announcement') GetAnnouncements = get_announcements def get_announcement_pages(self): return self.__get_entry_type('announcementspage') GetAnnouncementPages = get_announcement_pages def get_attachments(self): return self.__get_entry_type('attachment') GetAttachments = get_attachments def get_comments(self): return self.__get_entry_type('comment') GetComments = get_comments def get_file_cabinets(self): return self.__get_entry_type('filecabinet') GetFileCabinets = get_file_cabinets def get_list_items(self): return self.__get_entry_type('listitem') GetListItems = get_list_items def get_list_pages(self): return self.__get_entry_type('listpage') GetListPages = get_list_pages def get_webpages(self): return self.__get_entry_type('webpage') GetWebpages = get_webpages def get_webattachments(self): return self.__get_entry_type('webattachment') GetWebattachments = get_webattachments class ActivityEntry(BaseSiteEntry): """Google Sites Activity Entry.""" summary = Summary class ActivityFeed(gdata.data.GDFeed): """Google Sites Activity Feed. The Activity feed is a feed containing recent Site activity. """ entry = [ActivityEntry] class RevisionEntry(BaseSiteEntry): """Google Sites Revision Entry.""" content = Content class RevisionFeed(gdata.data.GDFeed): """Google Sites Revision Feed. The Activity feed is a feed containing recent Site activity. """ entry = [RevisionEntry] class SiteEntry(gdata.data.GDEntry): """Google Sites Site Feed Entry.""" site_name = SiteName theme = Theme def find_source_link(self): return self.find_url(SITES_SOURCE_LINK_REL) FindSourceLink = find_source_link class SiteFeed(gdata.data.GDFeed): """Google Sites Site Feed. The Site feed can be used to list a user's sites and create new sites. """ entry = [SiteEntry] class AclEntry(gdata.acl.data.AclEntry): """Google Sites ACL Entry.""" class AclFeed(gdata.acl.data.AclFeed): """Google Sites ACL Feed. The ACL feed can be used to modify the sharing permissions of a Site. """ entry = [AclEntry]
Python
#!/usr/bin/python """ Extend gdata.service.GDataService to support authenticated CRUD ops on Books API http://code.google.com/apis/books/docs/getting-started.html http://code.google.com/apis/books/docs/gdata/developers_guide_protocol.html TODO: (here and __init__) * search based on label, review, or other annotations (possible?) * edit (specifically, Put requests) seem to fail effect a change Problems With API: * Adding a book with a review to the library adds a note, not a review. This does not get included in the returned item. You see this by looking at My Library through the website. * Editing a review never edits a review (unless it is freshly added, but see above). More generally, * a Put request with changed annotations (label/rating/review) does NOT change the data. Note: Put requests only work on the href from GetEditLink (as per the spec). Do not try to PUT to the annotate or library feeds, this will cause a 400 Invalid URI Bad Request response. Attempting to Post to one of the feeds with the updated annotations does not update them. See the following for (hopefully) a follow up: google.com/support/forum/p/booksearch-apis/thread?tid=27fd7f68de438fc8 * Attempts to workaround the edit problem continue to fail. For example, removing the item, editing the data, readding the item, gives us only our originally added data (annotations). This occurs even if we completely shut python down, refetch the book from the public feed, and re-add it. There is some kind of persistence going on that I cannot change. This is likely due to the annotations being cached in the annotation feed and the inability to edit (see Put, above) * GetAnnotationLink has www.books.... as the server, but hitting www... results in a bad URI error. * Spec indicates there may be multiple labels, but there does not seem to be a way to get the server to accept multiple labels, nor does the web interface have an obvious way to have multiple labels. Multiple labels are never returned. """ __author__ = "James Sams <sams.james@gmail.com>" __copyright__ = "Apache License v2.0" from shlex import split import gdata.service try: import books except ImportError: import gdata.books as books BOOK_SERVER = "books.google.com" GENERAL_FEED = "/books/feeds/volumes" ITEM_FEED = "/books/feeds/volumes/" LIBRARY_FEED = "/books/feeds/users/%s/collections/library/volumes" ANNOTATION_FEED = "/books/feeds/users/%s/volumes" PARTNER_FEED = "/books/feeds/p/%s/volumes" BOOK_SERVICE = "print" ACCOUNT_TYPE = "HOSTED_OR_GOOGLE" class BookService(gdata.service.GDataService): def __init__(self, email=None, password=None, source=None, server=BOOK_SERVER, account_type=ACCOUNT_TYPE, exception_handlers=tuple(), **kwargs): """source should be of form 'ProgramCompany - ProgramName - Version'""" gdata.service.GDataService.__init__(self, email=email, password=password, service=BOOK_SERVICE, source=source, server=server, **kwargs) self.exception_handlers = exception_handlers def search(self, q, start_index="1", max_results="10", min_viewability="none", feed=GENERAL_FEED, converter=books.BookFeed.FromString): """ Query the Public search feed. q is either a search string or a gdata.service.Query instance with a query set. min_viewability must be "none", "partial", or "full". If you change the feed to a single item feed, note that you will probably need to change the converter to be Book.FromString """ if not isinstance(q, gdata.service.Query): q = gdata.service.Query(text_query=q) if feed: q.feed = feed q['start-index'] = start_index q['max-results'] = max_results q['min-viewability'] = min_viewability return self.Get(uri=q.ToUri(),converter=converter) def search_by_keyword(self, q='', feed=GENERAL_FEED, start_index="1", max_results="10", min_viewability="none", **kwargs): """ Query the Public Search Feed by keyword. Non-keyword strings can be set in q. This is quite fragile. Is there a function somewhere in the Google library that will parse a query the same way that Google does? Legal Identifiers are listed below and correspond to their meaning at http://books.google.com/advanced_book_search: all_words exact_phrase at_least_one without_words title author publisher subject isbn lccn oclc seemingly unsupported: publication_date: a sequence of two, two tuples: ((min_month,min_year),(max_month,max_year)) where month is one/two digit month, year is 4 digit, eg: (('1','2000'),('10','2003')). Lower bound is inclusive, upper bound is exclusive """ for k, v in kwargs.items(): if not v: continue k = k.lower() if k == 'all_words': q = "%s %s" % (q, v) elif k == 'exact_phrase': q = '%s "%s"' % (q, v.strip('"')) elif k == 'at_least_one': q = '%s %s' % (q, ' '.join(['OR "%s"' % x for x in split(v)])) elif k == 'without_words': q = '%s %s' % (q, ' '.join(['-"%s"' % x for x in split(v)])) elif k in ('author','title', 'publisher'): q = '%s %s' % (q, ' '.join(['in%s:"%s"'%(k,x) for x in split(v)])) elif k == 'subject': q = '%s %s' % (q, ' '.join(['%s:"%s"' % (k,x) for x in split(v)])) elif k == 'isbn': q = '%s ISBN%s' % (q, v) elif k == 'issn': q = '%s ISSN%s' % (q,v) elif k == 'oclc': q = '%s OCLC%s' % (q,v) else: raise ValueError("Unsupported search keyword") return self.search(q.strip(),start_index=start_index, feed=feed, max_results=max_results, min_viewability=min_viewability) def search_library(self, q, id='me', **kwargs): """Like search, but in a library feed. Default is the authenticated user's feed. Change by setting id.""" if 'feed' in kwargs: raise ValueError("kwarg 'feed' conflicts with library_id") feed = LIBRARY_FEED % id return self.search(q, feed=feed, **kwargs) def search_library_by_keyword(self, id='me', **kwargs): """Hybrid of search_by_keyword and search_library """ if 'feed' in kwargs: raise ValueError("kwarg 'feed' conflicts with library_id") feed = LIBRARY_FEED % id return self.search_by_keyword(feed=feed,**kwargs) def search_annotations(self, q, id='me', **kwargs): """Like search, but in an annotation feed. Default is the authenticated user's feed. Change by setting id.""" if 'feed' in kwargs: raise ValueError("kwarg 'feed' conflicts with library_id") feed = ANNOTATION_FEED % id return self.search(q, feed=feed, **kwargs) def search_annotations_by_keyword(self, id='me', **kwargs): """Hybrid of search_by_keyword and search_annotations """ if 'feed' in kwargs: raise ValueError("kwarg 'feed' conflicts with library_id") feed = ANNOTATION_FEED % id return self.search_by_keyword(feed=feed,**kwargs) def add_item_to_library(self, item): """Add the item, either an XML string or books.Book instance, to the user's library feed""" feed = LIBRARY_FEED % 'me' return self.Post(data=item, uri=feed, converter=books.Book.FromString) def remove_item_from_library(self, item): """ Remove the item, a books.Book instance, from the authenticated user's library feed. Using an item retrieved from a public search will fail. """ return self.Delete(item.GetEditLink().href) def add_annotation(self, item): """ Add the item, either an XML string or books.Book instance, to the user's annotation feed. """ # do not use GetAnnotationLink, results in 400 Bad URI due to www return self.Post(data=item, uri=ANNOTATION_FEED % 'me', converter=books.Book.FromString) def edit_annotation(self, item): """ Send an edited item, a books.Book instance, to the user's annotation feed. Note that whereas extra annotations in add_annotations, minus ratings which are immutable once set, are simply added to the item in the annotation feed, if an annotation has been removed from the item, sending an edit request will remove that annotation. This should not happen with add_annotation. """ return self.Put(data=item, uri=item.GetEditLink().href, converter=books.Book.FromString) def get_by_google_id(self, id): return self.Get(ITEM_FEED + id, converter=books.Book.FromString) def get_library(self, id='me',feed=LIBRARY_FEED, start_index="1", max_results="100", min_viewability="none", converter=books.BookFeed.FromString): """ Return a generator object that will return gbook.Book instances until the search feed no longer returns an item from the GetNextLink method. Thus max_results is not the maximum number of items that will be returned, but rather the number of items per page of searches. This has been set high to reduce the required number of network requests. """ q = gdata.service.Query() q.feed = feed % id q['start-index'] = start_index q['max-results'] = max_results q['min-viewability'] = min_viewability x = self.Get(uri=q.ToUri(), converter=converter) while 1: for entry in x.entry: yield entry else: l = x.GetNextLink() if l: # hope the server preserves our preferences x = self.Get(uri=l.href, converter=converter) else: break def get_annotations(self, id='me', start_index="1", max_results="100", min_viewability="none", converter=books.BookFeed.FromString): """ Like get_library, but for the annotation feed """ return self.get_library(id=id, feed=ANNOTATION_FEED, max_results=max_results, min_viewability = min_viewability, converter=converter)
Python