code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
'''
Created on Feb 20, 2011
@author: t-bone
'''
def distinct(list):
distinct_results = []
for obj in list:
if obj not in distinct_results:
distinct_results.append(obj)
return distinct_results | Python |
'''
Created on Feb 4, 2011
@author: t-bone
'''
#from google.appengine.api import urlfetch
#if result.status_code == 200:
# parseCSV(result.content)
#import datetime
#from google.appengine.ext import db
#from google.appengine.tools import bulkloader
import csv
from datastore.models import Price, DeliveryPoint, Market, EndOfDay, Delivery, Volatility
import time
import datetime
def price(file):
price_reader = csv.reader(file, delimiter=',', quotechar="'")
#price_reader = csv.reader(open(file, 'rb'), delimiter=',', quotechar="'")
#price_reader = file
for row in price_reader:
eod = time.strptime(row[0],"%m/%d/%Y")
eod = datetime.date(eod[0],eod[1],eod[2])
eod_query = EndOfDay.all()
eod_instance = eod_query.filter("date =",eod).get()
if not(eod_instance):
eod_instance = EndOfDay(date=eod)
eod_instance.put()
delivery_point = DeliveryPoint.all()
delivery_point_nickname = row[1]
delivery_point.filter("nickname =", delivery_point_nickname)
delivery = time.strptime(row[2],"%m/%d/%Y")
delivery = datetime.date(delivery[0],delivery[1],delivery[2])
delivery_instance = Delivery(date=delivery,volume=1.0)
delivery_instance.put()
price_instance = Price(mid=float(row[3]),
bid=float(row[4]),
offer=float(row[5]))
price_instance.put()
market_instance = Market(eod=eod_instance,
delivery_point = delivery_point.get(),
price = price_instance,
delivery = delivery_instance)
market_instance.put()
return True
def volatility(file):
reader = csv.reader(file, delimiter=',', quotechar="'")
for row in reader:
eod = time.strptime(row[0],"%m/%d/%Y")
eod = datetime.date(eod[0],eod[1],eod[2])
eod_query = EndOfDay.all()
eod_instance = eod_query.filter("date =",eod).get()
if not(eod_instance):
eod_instance = EndOfDay(date=eod)
eod_instance.put()
delivery_point = DeliveryPoint.all()
delivery_point_nickname = row[1]
delivery_point.filter("nickname =", delivery_point_nickname)
delivery_point_instance = delivery_point.get()
volatility_instance = Volatility(mid=float(row[3]),
bid=float(row[4]),
offer=float(row[5]),
moneyness=0.0)
volatility_instance.put()
delivery = time.strptime(row[2],"%m/%d/%Y")
delivery = datetime.date(delivery[0],delivery[1],delivery[2])
delivery_query = Delivery.all().filter("date =",delivery).filter("volume =",1.0)
delivery_instance = delivery_query.get()
if not(delivery_instance):
delivery_instance = Delivery(date=delivery,volume=1.0)
delivery_instance.put()
market = Market.all().filter("eod =",eod_instance)
market.filter("delivery_point =",delivery_point_instance)
market.filter("delivery =",delivery_instance)
market_instance = market.get()
if market_instance:
market_instance.volatility = volatility_instance
else:
market_instance = Market(eod=eod_instance,
delivery_point = delivery_point_instance,
volatility = volatility_instance,
delivery = delivery_instance)
market_instance.put()
return True
def interest_rate(file):
pass
#price_loader()
#def prices():
#result = urlfetch.fetch(url="ftp://ftp.cmegroup.com/datamine_sample_data/eod/ECLXF001.csv",)
#print result.status_code | Python |
'''
Created on Mar 6, 2011
@author: t-bone
'''
def HVaR():
pass | Python |
'''
Created on Mar 6, 2011
@author: t-bone
'''
def CaR():
pass | Python |
'''
Created on Mar 6, 2011
@author: t-bone
'''
import math
import random
from stats.general import percentile
from datastore import models
def VaR(portfolio,market):
percent = .975
market = Market()
mtm = portfolio.eval(market)
pnl = []
for i in range(nsim):
sim_market = market.shock()
portfolio.eval(sim_market)
pnl.append(portfolio.mtm - mtm)
VaR = -percentile(pnl,1-percent)
class Market(object):
def __init__(self):
forward_curve_query = models.ForwardCurve.all().filter
self.curves = []
for forward_curve in forward_curve_query:
self.curves.append(ForwardCurve(forward_curve.name,forward_curve.structure))
def shock(self):
nrisk_factor = 2
nsim = 100
S0 = [50,10]
sigma = [.5,.7]
S = [[]]
for irisk_factor in range(nrisk_factor):
for isim in range(nsim):
S[irisk_factor].append(random.gauss(0,1))
for i in range(nrisk_factor):
S[i][:]=S0[i]*math.exp(sigma[i]*S[i][:]-(sigma[i]**2)/2)
class ForwardCurve(object):
def __init__(self,name,structure):
self.name = name
self.points = []
for start_date,end_date in structure:
self.points.append(CurvePoint(self,start_date,end_date))
class CurvePoint(object):
def __init__(self,forward_curve,start_date,end_date,start_eod,end_eod):
eod_query = models.EndOfDay.all(keys_only=True).filter('date >=',start_eod).filter('date <=',end_eod)
market_query = models.Market.all().filter('forward_curve =',forward_curve.name)
market_query.filter('eod in',[eod for eod in eod_query])
self.price = [market.price.mid for market in market_query] | Python |
#====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ====================================================================
#
# This software consists of voluntary contributions made by many
# individuals on behalf of the Apache Software Foundation. For more
# information on the Apache Software Foundation, please see
# <http://www.apache.org/>.
#
import os
import re
import tempfile
import shutil
ignore_pattern = re.compile('^(.svn|target|bin|classes)')
java_pattern = re.compile('^.*\.java')
annot_pattern = re.compile('import org\.apache\.http\.annotation\.')
def process_dir(dir):
files = os.listdir(dir)
for file in files:
f = os.path.join(dir, file)
if os.path.isdir(f):
if not ignore_pattern.match(file):
process_dir(f)
else:
if java_pattern.match(file):
process_source(f)
def process_source(filename):
tmp = tempfile.mkstemp()
tmpfd = tmp[0]
tmpfile = tmp[1]
try:
changed = False
dst = os.fdopen(tmpfd, 'w')
try:
src = open(filename)
try:
for line in src:
if annot_pattern.match(line):
changed = True
line = line.replace('import org.apache.http.annotation.', 'import net.jcip.annotations.')
dst.write(line)
finally:
src.close()
finally:
dst.close();
if changed:
shutil.move(tmpfile, filename)
else:
os.remove(tmpfile)
except:
os.remove(tmpfile)
process_dir('.')
| Python |
if False: # set to True to insert test data
store(store.product.id > 0).delete()
store(store.category.id > 0).delete()
if len(store(store.product.id > 0).select()) == 0:
fantasy_id = store.category.insert(name='Fantasy', description='Fantasy books', small_image='testdata/hp1.jpg')
hp1 = store.product.insert(name="Harry Potter and the Sorcerer's Stone", category=fantasy_id, price=7.91, small_image='testdata/hp1.jpg')
hp2 = store.product.insert(name="Harry Potter and the Chamber of Secrets", category=fantasy_id, price=8.91, small_image='testdata/hp2.jpg')
hp3 = store.product.insert(name="Harry Potter and the Prisoner of Azkaban", category=fantasy_id, price=8.91, small_image='testdata/hp3.jpg')
hp4 = store.product.insert(name="Harry Potter and the Goblet of Fire", category=fantasy_id, price=9.91, small_image='testdata/hp4.jpg')
hp5 = store.product.insert(name="Harry Potter and the Order of the Phoenix", category=fantasy_id, price=9.91, small_image='testdata/hp5.jpg')
hp6 = store.product.insert(name="Harry Potter and the Half-Blood Prince", category=fantasy_id, price=9.91, small_image='testdata/hp6.jpg')
store.option.insert(product=hp1, description='Bookmark', price=1.5)
store.option.insert(product=hp1, description='Wizard hat', price=12)
for p2 in (hp2, hp3, hp4, hp5, hp6):
store.cross_sell.insert(p1=hp1, p2=p2)
hp1_hard = store.product.insert(name="Harry Potter and the Sorcerer's Stone [hardcover]", category=fantasy_id, price=15.91, small_image='testdata/hp1.jpg')
store.up_sell.insert(product=hp1, better=hp1_hard)
| Python |
UNDEFINED = -1
if request.env.web2py_runtime_gae: # if running on Google App Engine
store = DAL('gae') # connect to Google BigTable
session.connect(request, response, db=store) # and store sessions and tickets there
else:
store = DAL("sqlite://store.db")
store.define_table('category',
Field('name'),
Field('description', 'text'),
Field('small_image', 'upload'),
)
store.define_table('product',
Field('name'),
Field('category', store.category),
Field('description', 'text', default=''),
Field('small_image', 'upload'),
Field('large_image', 'upload', default=''),
Field('quantity_in_stock', 'integer', default=UNDEFINED), # if UNDEFINED, don't show
Field('max_quantity', 'integer', default=0), # maximum quantity that can be purchased in an order. If 0, no limit. If UNDEFINED, don't show.
Field('price', 'double', default=1.0),
Field('old_price', 'double', default=0.0),
Field('weight_in_pounds', 'double', default=1),
Field('tax_rate_in_your_state', 'double', default=10.0),
Field('tax_rate_outside_your_state', 'double', default=0.0),
Field('featured', 'boolean', default=False),
Field('allow_rating', 'boolean', default=False),
Field('rating', 'integer', default='0'),
Field('viewed', 'integer', default='0'),
Field('clicked', 'integer', default='0'))
# each product can have optional addons
store.define_table('option',
Field('product', store.product),
Field('description'),
Field('price', 'double', default=1.0),
)
# support for merchandising
# for p1 show p2, and for p2 show p1
store.define_table('cross_sell',
Field('p1', store.product),
Field('p2', store.product),
)
# for product, show better, but not the reverse
store.define_table('up_sell',
Field('product', store.product),
Field('better', store.product),
)
store.define_table('comment',
Field('product', store.product),
Field('author'),
Field('email'),
Field('body', 'text'),
Field('rate', 'integer')
)
store.define_table('info',
Field('google_merchant_id', default='[google checkout id]', length=256),
Field('name', default='[store name]'),
Field('headline', default='[store headline]'),
Field('address', default='[store address]'),
Field('city', default='[store city]'),
Field('state', default='[store state]'),
Field('zip_code', default='[store zip]'),
Field('phone', default='[store phone number]'),
Field('fax', default='[store fax number]'),
Field('email', requires=IS_EMAIL(), default='yourname@yourdomain.com'),
Field('description', 'text', default='[about your store]'),
Field('why_buy', 'text', default='[why buy at your store]'),
Field('return_policy', 'text', default='[what is your return policy]'),
Field('logo', 'upload', default=''),
Field('color_background', length=10, default='white'),
Field('color_foreground', length=10, default='black'),
Field('color_header', length=10, default='#F6F6F6'),
Field('color_link', length=10, default='#385ea2'),
Field('font_family', length=32, default='arial, helvetica'),
Field('ship_usps_express_mail', 'boolean', default=True),
Field('ship_usps_express_mail_fc', 'double', default=0),
Field('ship_usps_express_mail_vc', 'double', default=0),
Field('ship_usps_express_mail_bc', 'double', default=0),
Field('ship_usps_priority_mail', 'boolean', default=True),
Field('ship_usps_priority_mail_fc', 'double', default=0),
Field('ship_usps_priority_mail_vc', 'double', default=0),
Field('ship_usps_priority_mail_bc', 'double', default=0),
Field('ship_ups_next_day_air', 'boolean', default=True),
Field('ship_ups_next_day_air_fc', 'double', default=0),
Field('ship_ups_next_day_air_vc', 'double', default=0),
Field('ship_ups_next_day_air_bc', 'double', default=0),
Field('ship_ups_second_day_air', 'boolean', default=True),
Field('ship_ups_second_day_air_fc', 'double', default=0),
Field('ship_ups_second_day_air_vc', 'double', default=0),
Field('ship_ups_second_day_air_bc', 'double', default=0),
Field('ship_ups_ground', 'boolean', default=True),
Field('ship_ups_ground_fc', 'double', default=0),
Field('ship_ups_ground_vc', 'double', default=0),
Field('ship_ups_ground_bc', 'double', default=0),
Field('ship_fedex_priority_overnight', 'boolean', default=True),
Field('ship_fedex_priority_overnight_fc', 'double', default=0),
Field('ship_fedex_priority_overnight_vc', 'double', default=0),
Field('ship_fedex_priority_overnight_bc', 'double', default=0),
Field('ship_fedex_second_day', 'boolean', default=True),
Field('ship_fedex_second_day_fc', 'double', default=0),
Field('ship_fedex_second_day_vc', 'double', default=0),
Field('ship_fedex_second_day_bc', 'double', default=0),
Field('ship_fedex_ground', 'boolean', default=True),
Field('ship_fedex_ground_fc', 'double', default=0),
Field('ship_fedex_ground_vc', 'double', default=0),
Field('ship_fedex_ground_bc', 'double', default=0)
)
store.category.name.requires = IS_NOT_IN_DB(store, 'category.name')
store.product.name.requires = IS_NOT_IN_DB(store, 'product.name')
store.product.category.requires = IS_IN_DB(store, 'category.id', 'category.name')
store.product.name.requires = IS_NOT_EMPTY()
store.product.description.requires = IS_NOT_EMPTY()
store.product.quantity_in_stock.requires = IS_INT_IN_RANGE(0, 1000)
store.product.price.requires = IS_FLOAT_IN_RANGE(0, 10000)
store.product.rating.requires = IS_INT_IN_RANGE(-10000, 10000)
store.product.viewed.requires = IS_INT_IN_RANGE(0, 1000000)
store.product.clicked.requires = IS_INT_IN_RANGE(0, 1000000)
store.option.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.cross_sell.p1.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.cross_sell.p2.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.up_sell.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.up_sell.better.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.comment.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.comment.author.requires = IS_NOT_EMPTY()
store.comment.email.requires = IS_EMAIL()
store.comment.body.requires = IS_NOT_EMPTY()
store.comment.rate.requires = IS_IN_SET(range(5, 0, -1))
for field in store.info.fields:
if field[:-2] in ['fc', 'vc']:
store.info[field].requires = IS_FLOAT_IN_RANGE(0, 100)
if len(store(store.info.id > 0).select()) == 0:
store.info.insert(name='[store name]')
mystore = store(store.info.id > 0).select()[0] | Python |
#
import re
# delimiter to use between words in URL
URL_DELIMITER = '-'
def pretty_url(id, name):
"""Create pretty URL from record name and ID
"""
return '%s%s%d' % (' '.join(re.sub('[^\w ]+', '', name).split()).replace(' ', URL_DELIMITER), URL_DELIMITER, id)
def pretty_id(url):
"""Extract id from pretty URL
"""
return int(url.rpartition(URL_DELIMITER)[-1])
def pretty_text(s):
"Make text pretty by capitalizing and using 'home' instead of 'default'"
return s.replace('default', 'home').replace('_', ' ').capitalize()
def title():
if response.title:
return response.title
elif request.function == 'index':
return pretty_text(request.controller)
else:
return pretty_text(request.function)
| Python |
###########################################################
### make sure administrator is on localhost
############################################################
import os
from gluon.contenttype import contenttype
from gluon.fileutils import check_credentials, listdir
if not session.authorized and not request.function=='login':
redirect(URL(r=request,f='login'))
response.view='manage.html'
response.menu=[['manage',True,'/%s/manage/index' % (request.application)],
['logout',False,'/%s/manage/logout' % (request.application)],
['back to store',False,'/%s/default/index' % (request.application)]]
###########################################################
### list all tables in database
############################################################
def login():
response.view='manage/login.html'
from gluon.fileutils import check_credentials
if check_credentials(request,'admin'):
session.authorized=True
redirect(URL(r=request,f='index'))
return dict()
def logout():
session.authorized=False
redirect(URL(r=request,c='default',f='index'))
def index():
import types as _types
_dbs={}
for _key,_value in globals().items():
try:
if _value.__class__==SQLDB:
tables=_dbs[_key]=[]
for _tablename in _value.tables():
tables.append((_key,_tablename))
except: pass
return dict(dbs=_dbs)
###########################################################
### insert a new record
############################################################
def insert():
try:
dbname=request.args[0]
db=eval(dbname)
table=request.args[1]
form=SQLFORM(db[table])
except: redirect(URL(r=request,f='index'))
if form.accepts(request.vars,session):
response.flash='new record inserted'
redirect(URL(r=request,f='select',args=request.args))
elif len(request.vars):
response.flash='There are error in your submission form'
return dict(form=form)
###########################################################
### list all records in table and insert new record
############################################################
def download():
filename=request.args[0]
response.headers['Content-Type']=contenttype(filename)
return open(os.path.join(request.folder,'uploads/','%s' % filename),'rb').read()
def csv():
import gluon.contenttype, csv, cStringIO
response.headers['Content-Type']=gluon.contenttype.contenttype('.csv')
try:
dbname=request.vars.dbname
db=eval(dbname)
records=db(request.vars.query).select()
except: redirect(URL(r=request,f='index'))
s=cStringIO.StringIO()
writer = csv.writer(s)
writer.writerow(records.colnames)
c=range(len(records.colnames))
for i in range(len(records)):
writer.writerow([records.response[i][j] for j in c])
### FILL HERE
return s.getvalue()
def import_csv(table,file):
import csv
reader = csv.reader(file)
colnames=None
for line in reader:
if not colnames:
colnames=[x[x.find('.')+1:] for x in line]
c=[i for i in range(len(line)) if colnames[i]!='id']
else:
items=[(colnames[i],line[i]) for i in c]
table.insert(**dict(items))
def select():
try:
dbname=request.args[0]
db=eval(dbname)
if not request.vars.query:
table=request.args[1]
query='%s.id>0' % table
else: query=request.vars.query
except: redirect(URL(r=request,f='index'))
if request.vars.csvfile!=None:
try:
import_csv(db[table],request.vars.csvfile.file)
response.flash='data uploaded'
except: reponse.flash='unable to parse csv file'
if request.vars.delete_all and request.vars.delete_all_sure=='yes':
try:
db(query).delete()
response.flash='records deleted'
except: response.flash='invalid SQL FILTER'
elif request.vars.update_string:
try:
env=dict(db=db,query=query)
exec('db(query).update('+request.vars.update_string+')') in env
response.flash='records updated'
except: response.flash='invalid SQL FILTER or UPDATE STRING'
if request.vars.start: start=int(request.vars.start)
else: start=0
limitby=(start,start+100)
try:
records=db(query).select(limitby=limitby)
except:
response.flash='invalid SQL FILTER'
return dict(records='no records',nrecords=0,query=query,start=0)
linkto=URL(r=request,f='update/%s'% (dbname))
upload=URL(r=request,f='download')
return dict(start=start,query=query,\
nrecords=len(records),\
records=SQLTABLE(records,linkto,upload,_class='sortable'))
###########################################################
### edit delete one record
############################################################
def update():
try:
dbname=request.args[0]
db=eval(dbname)
table=request.args[1]
except: redirect(URL(r=request,f='index'))
try:
id=int(request.args[2])
record=db(db[table].id==id).select()[0]
except: redirect(URL(r=request,f='select/%s/%s'%(dbname,table)))
form=SQLFORM(db[table],record,deletable=True,
linkto=URL(r=request,f='select/'+dbname),
upload=URL(r=request,f='download/'))
if form.accepts(request.vars,session):
response.flash='done!'
redirect(URL(r=request,f='select/%s/%s'%(dbname,table)))
return dict(form=form)
def cleanup():
app=request.application
files=listdir('applications/%s/cache/' % app,'',0)
for file in files: os.unlink(file)
files=listdir('applications/%s/errors/' % app,'',0)
for file in files: os.unlink(file)
files=listdir('applications/%s/sessions/' % app,'',0)
for file in files: os.unlink(file)
session.flash="cache, errors and sessions cleaned"
redirect(URL(r=request,f='index'))
def setup():
response.view='manage/setup.html'
form=SQLFORM(store.info,mystore)
if form.accepts(request.vars,session):
response.flash='that was easy! now go vist your store.'
else:
response.flash='welcome to the store-in-a-stick setup'
return dict(form=form) | Python |
###########################################################
### make sure administrator is on localhost
############################################################
import os, socket, datetime,copy
import gluon.contenttype
import gluon.fileutils
### crytical --- make a copy of the environment
global_env=copy.copy(globals())
global_env['datetime']=datetime
http_host = request.env.http_host.split(':')[0]
remote_addr = request.env.remote_addr
try: hosts=(http_host, socket.gethostbyname(remote_addr))
except: hosts=(http_host,)
if remote_addr not in hosts:
pass #raise HTTP(400)
if not gluon.fileutils.check_credentials(request):
redirect('/admin')
response.view='appadmin.html'
response.menu=[[T('design'),False,URL('admin','default','design',
args=[request.application])],
[T('db'),False,URL(r=request,f='index')],
[T('state'),False,URL(r=request,f='state')]]
###########################################################
### auxiliary functions
############################################################
def get_databases(request):
dbs={}
for key,value in global_env.items():
cond=False
try: cond=isinstance(value,GQLDB)
except: cond=isinstance(value,SQLDB)
if cond: dbs[key]=value
return dbs
databases=get_databases(None)
def eval_in_global_env(text):
exec('_ret=%s'%text,{},global_env)
return global_env['_ret']
def get_database(request):
if request.args and request.args[0] in databases:
return eval_in_global_env(request.args[0])
else:
session.flash=T('invalid request')
redirect(URL(r=request,f='index'))
def get_table(request):
db=get_database(request)
if len(request.args)>1 and request.args[1] in db.tables:
return db,request.args[1]
else:
session.flash=T('invalid request')
redirect(URL(r=request,f='index'))
def get_query(request):
try:
return eval_in_global_env(request.vars.query)
except Exception:
return None
###########################################################
### list all databases and tables
############################################################
def index():
return dict(databases=databases)
###########################################################
### insert a new record
############################################################
def insert():
db,table=get_table(request)
form=SQLFORM(db[table])
if form.accepts(request.vars,session):
response.flash=T('new record inserted')
return dict(form=form)
###########################################################
### list all records in table and insert new record
############################################################
def download():
import os
db=get_database(request)
filename=request.args[1]
print filename
### for GAE only ###
table,field=filename.split('.')[:2]
if table in db.tables and field in db[table].fields:
uploadfield=db[table][field].uploadfield
if isinstance(uploadfield,str):
from gluon.contenttype import contenttype
response.headers['Content-Type']=contenttype(filename)
rows=db(db[table][field]==filename).select()
return rows[0][uploadfield]
### end for GAE ###
path=os.path.join(request.folder,'uploads/',filename)
return response.stream(open(path,'rb'))
def csv():
import gluon.contenttype
response.headers['Content-Type']=gluon.contenttype.contenttype('.csv')
query=get_query(request)
if not query: return None
response.headers['Content-disposition']="attachment; filename=%s_%s.csv"%\
tuple(request.vars.query.split('.')[:2])
return str(db(query).select())
def import_csv(table,file):
import csv
reader = csv.reader(file)
colnames=None
for line in reader:
if not colnames:
colnames=[x[x.find('.')+1:] for x in line]
c=[i for i in range(len(line)) if colnames[i]!='id']
else:
items=[(colnames[i],line[i]) for i in c]
table.insert(**dict(items))
def select():
import re
db=get_database(request)
dbname=request.args[0]
regex=re.compile('(?P<table>\w+)\.(?P<field>\w+)=(?P<value>\d+)')
if request.vars.query:
match=regex.match(request.vars.query)
if match: request.vars.query='%s.%s.%s==%s' % (request.args[0],match.group('table'),match.group('field'),match.group('value'))
else:
request.vars.query=session.last_query
query=get_query(request)
if request.vars.start: start=int(request.vars.start)
else: start=0
nrows=0
stop=start+100
table=None
rows=[]
orderby=request.vars.orderby
if orderby:
orderby=dbname+'.'+orderby
if orderby==session.last_orderby:
if orderby[0]=='~': orderby=orderby[1:]
else: orderby='~'+orderby
session.last_orderby=orderby
session.last_query=request.vars.query
form=FORM(TABLE(TR('Query:','',INPUT(_style='width:400px',_name='query',_value=request.vars.query or '',requires=IS_NOT_EMPTY())),
TR('Update:',INPUT(_name='update_check',_type='checkbox',value=False),
INPUT(_style='width:400px',_name='update_fields',_value=request.vars.update_fields or '')),
TR('Delete:',INPUT(_name='delete_check',_class='delete',_type='checkbox',value=False),''),
TR('','',INPUT(_type='submit',_value='submit'))))
if request.vars.csvfile!=None:
try:
import_csv(db[request.vars.table],request.vars.csvfile.file)
response.flash=T('data uploaded')
except:
response.flash=T('unable to parse csv file')
if form.accepts(request.vars,formname=None):
regex=re.compile(request.args[0]+'\.(?P<table>\w+)\.id\>0')
match=regex.match(form.vars.query.strip())
if match: table=match.group('table')
try:
nrows=db(query).count()
if form.vars.update_check and form.vars.update_fields:
db(query).update(**eval_in_global_env('dict(%s)'%form.vars.update_fields))
response.flash=T('%s rows updated',nrows)
elif form.vars.delete_check:
db(query).delete()
response.flash=T('%s rows deleted',nrows)
nrows=db(query).count()
if orderby: rows=db(query).select(limitby=(start,stop), orderby=eval_in_global_env(orderby))
else: rows=db(query).select(limitby=(start,stop))
except:
rows,nrows=[],0
response.flash=T('Invalid Query')
return dict(form=form,table=table,start=start,stop=stop,nrows=nrows,rows=rows,query=request.vars.query)
###########################################################
### edit delete one record
############################################################
def update():
db,table=get_table(request)
try:
id=int(request.args[2])
record=db(db[table].id==id).select()[0]
except:
session.flash=T('record does not exist')
redirect(URL(r=request,f='select',args=request.args[:1],vars=dict(query='%s.%s.id>0'%tuple(request.args[:2]))))
form=SQLFORM(db[table],record,deletable=True,
linkto=URL(r=request,f='select',args=request.args[:1]),
upload=URL(r=request,f='download',args=request.args[:1]))
if form.accepts(request.vars,session):
response.flash=T('done!')
redirect(URL(r=request,f='select',args=request.args[:1],vars=dict(query='%s.%s.id>0'%tuple(request.args[:2]))))
return dict(form=form)
###########################################################
### get global variables
############################################################
def state(): return dict() | Python |
if not session.cart:
# instantiate new cart
session.cart, session.balance = [], 0
session.google_merchant_id = mystore.google_merchant_id
response.menu = [
['Store Front', request.function == 'index', URL(r=request, f='index')],
['About Us', request.function == 'aboutus', URL(r=request, f='aboutus')],
['Contact Us', request.function == 'contactus', URL(r=request, f='contactus')],
['Shopping Cart $%.2f' % float(session.balance), request.function == 'checkout', URL(r=request, f='checkout')]
]
def index():
categories = store().select(store.category.ALL, orderby=store.category.name)
featured = store(store.product.featured == True).select()
return dict(categories=categories,featured=featured)
def category():
if not request.args: redirect(URL(r=request, f='index'))
category_id = pretty_id(request.args[0])
if len(request.args) == 3:
# pagination
start, stop = int(request.args[1]), int(request.args[2])
else:
start, stop = 0, 20
categories = store().select(store.category.ALL, orderby=store.category.name)
category_name = None
for category in categories:
if category.id == category_id:
response.title = category_name = category.name
if not category_name: redirect(URL(r=request, f='index'))
if start == 0:
featured = store(store.product.featured == True)(store.product.category == category_id).select()
else:
featured = []
ids = [p.id for p in featured]
favourites = store(store.product.category == category_id).select(limitby=(start, stop))
favourites = [f for f in favourites if f.id not in ids]
return dict(category_name=category_name, categories=categories, featured=featured, favourites=favourites)
def product():
if not request.args: redirect(URL(r=request, f='index'))
product_id = pretty_id(request.args[0])
products = store(store.product.id == product_id).select()
if not products: redirect(URL(r=request, f='index'))
product = products[0]
response.title = product.name
product.update_record(viewed=product.viewed+1)
options = store(store.option.product == product.id).select()
product_form = FORM(
TABLE(
[TR(TD(INPUT(_name='option', _value=option.id, _type='checkbox', _onchange="update_price(this, %.2f)" % option.price), option.description), H3('$%.2f' % option.price)) for option in options],
TR(
'Price:',
H2('$%.2f' % float(product.price), _id='total_price')
),
BR(),
TH('Qty:', INPUT(_name='quantity', _class='integer', _value=1, _size=1)), INPUT(_type='submit', _value='Add to cart'),
)
)
if product_form.accepts(request.vars, session):
quantity = int(product_form.vars.quantity)
option_ids = product_form.vars.option
if not isinstance(option_ids, list):
option_ids = [option_ids] if option_ids else []
option_ids = [int(o) for o in option_ids]
product.update_record(clicked=product.clicked+1)
session.cart.append((product_id, quantity, option_ids))
redirect(URL(r=request, f='checkout'))
# post a comment about a product
comment_form = SQLFORM(store.comment, fields=['author', 'email', 'body', 'rate'])
comment_form.vars.product = product.id
if comment_form.accepts(request.vars, session):
nc = store(store.comment.product == product.id).count()
t = products[0].rating*nc + int(comment_form.vars.rate)
products[0].update_record(rating=t/(nc+1))
response.flash = 'comment posted'
if comment_form.errors: response.flash = 'invalid comment'
comments = store(store.comment.product == product.id).select()
better_ids = [row.better for row in store(store.up_sell.product == product.id).select(store.up_sell.better)]
related_ids = [row.p2 for row in store(store.cross_sell.p1 == product.id).select()] + [row.p1 for row in store(store.cross_sell.p2 == product.id).select()]
suggested = [store.product[id] for id in better_ids + related_ids] # XXXstore(store.product.id.belongs(better_ids + related_ids)).select()
return dict(product=product, comments=comments, options=options, suggested=suggested, product_form=product_form, comment_form=comment_form)
"""
{{ if product.old_price: }}
<b>was ${{= '%.2f' % float(product.old_price) }}</b>
{{ pass }}
</form>
"""
def remove_from_cart():
# remove product from cart
del session.cart[int(request.args[0])]
redirect(URL(r=request, f='checkout'))
def empty_cart():
# empty cart of all products
session.cart.clear()
session.balance = 0
redirect(URL(r=request, f='checkout'))
def checkout():
order = []
balance = 0
for product_id, qty, option_ids in session.cart:
products = store(store.product.id == product_id).select()
if products:
product = products[0]
options = [store.option[id] for id in option_ids]# XXX store(store.option.id.belongs(option_ids)).select() if option_ids else []
total_price = qty * (product.price + sum([option.price for option in options]))
order.append((product_id, qty, total_price, product, options))
balance += total_price
else:
# invalid product
pass
session.balance = balance # XXX is updating in time?
return dict(order=order, merchant_id=session.google_merchant_id)
def popup():
return dict()
def show():
response.session_id = None
import gluon.contenttype, os
filename = '/'.join(request.args)
response.headers['Content-Type'] = gluon.contenttype.contenttype(filename)
# XXX is this path going to be a problem on Windows?
return open(os.path.join(request.folder, 'uploads', filename), 'rb').read()
def aboutus(): return dict()
def contactus(): return dict()
| Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
from distutils.core import setup
setup(
name='webscraping',
version='1',
packages=['webscraping'],
package_dir={'webscraping':'.'}, # look for package contents in current directory
author='Richard Penman',
author_email='richard@sitescraper.net',
description='Pure python library aimed to make web scraping easier',
url='http://code.google.com/p/webscraping',
license='lgpl',
)
| Python |
__doc__ = """
pdict has a dictionary like interface and a sqlite backend
It uses pickle to store Python objects and strings, which are then compressed
Multithreading is supported
"""
from datetime import datetime
import sqlite3
import zlib
import threading
try:
import cPickle as pickle
except ImportError:
import pickle
class PersistentDict(object):
"""stores and retrieves persistent data through a dict-like interface
data is stored compressed on disk using sqlite3
"""
def __init__(self, filename=':memory:', compress_level=6, cache_timeout=None, sqlite_timeout=1000):
"""initialize a new PersistentDict with the specified database file.
filename: where to store sqlite database. Uses in memory by default.
compress_level: between 1-9 (in my test levels 1-3 produced a 1300kb file in ~7 seconds while 4-9 a 288kb file in ~9 seconds)
cache_timeout: a timedelta object of how old data can be. By default is set to None to disable.
sqlite_timeout: how long should a thread wait for sqlite database to be ready
"""
self._conn = sqlite3.connect(filename, timeout=sqlite_timeout, isolation_level=None, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
self._conn.text_factory = lambda x: unicode(x, 'utf-8', 'replace')
sql = """
CREATE TABLE IF NOT EXISTS config (
key TEXT NOT NULL PRIMARY KEY UNIQUE,
value BLOB,
meta BLOB,
created timestamp DEFAULT (datetime('now', 'localtime')),
updated timestamp DEFAULT (datetime('now', 'localtime'))
);
"""
self._conn.execute(sql)
self._conn.execute("CREATE INDEX IF NOT EXISTS keys ON config (key);")
self.compress_level = compress_level
self.timeout = cache_timeout
def __contains__(self, key):
"""check the database to see if a key exists
"""
row = self._conn.execute("SELECT updated FROM config WHERE key=?;", (key,)).fetchone()
return row and self.is_fresh(row[0])
def __getitem__(self, key):
"""return the value of the specified key or raise KeyError if not found
"""
row = self._conn.execute("SELECT value, updated FROM config WHERE key=?;", (key,)).fetchone()
if row:
if self.is_fresh(row[1]):
return self.deserialize(row[0])
else:
raise KeyError("Key `%s' is stale" % key)
else:
raise KeyError("Key `%s' does not exist" % key)
def __setitem__(self, key, value):
"""set the value of the specified key
"""
try:
self._conn.execute("INSERT INTO config (key, value, meta) VALUES(?, ?, ?);", (key, self.serialize(value), self.serialize({})))
except sqlite3.IntegrityError:
# already exists, so update
self._conn.execute("UPDATE config SET value=?, updated=? WHERE key=?;", (self.serialize(value), datetime.now(), key))
def __delitem__(self, key):
"""remove the specifed value from the database
"""
self._conn.execute("DELETE FROM config WHERE key=?;", (key,))
def serialize(self, value):
"""convert object to a compressed pickled string to save in the db
"""
return sqlite3.Binary(zlib.compress(pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL), self.compress_level))
def deserialize(self, value):
"""convert compressed pickled string from database back into an object
"""
if value:
return pickle.loads(zlib.decompress(value))
def keys(self):
"""returns a generator of each key in the database
"""
c = self._conn.cursor()
c.execute("SELECT key FROM config;")
for row in c:
yield row[0]
def is_fresh(self, t):
"""returns whether this datetime has expired
"""
return self.timeout is None or datetime.now() - t < self.timeout
def get(self, key, default=None):
"""Get data at key and return default if not defined
"""
data = default
if key:
row = self._conn.execute("SELECT value, meta, created, updated FROM config WHERE key=?;", (key,)).fetchone()
if row:
data = dict(
value=self.deserialize(row[0]),
meta=self.deserialize(row[1]),
created=row[2],
updated=row[3]
)
return data
def set(self, key, new_data):
"""set the data for the specified key
data is a dict {'value': ..., 'meta': ..., 'created': ..., 'updated': ...}
"""
current_data = self.get(key)
current_data.update(new_data)
value = self.serialize(current_data.get('value'))
meta = self.serialize(current_data.get('meta'))
created = current_data.get('created')
updated = current_data.get('updated')
# already exists, so update
self._conn.execute("UPDATE config SET value=?, meta=?, created=?, updated=? WHERE key=?;", (value, meta, created, updated, key))
def meta(self, key, value=None):
"""Set of get the meta attribute
"""
if value is None:
# want to get meta
row = self._conn.execute("SELECT meta FROM config WHERE key=?;", (key,)).fetchone()
if row:
return self.deserialize(row[0])
else:
raise KeyError("Key `%s' does not exist" % key)
else:
# want to set meta
self._conn.execute("UPDATE config SET meta=?, updated=? WHERE key=?;", (self.serialize(value), datetime.now(), key))
def clear(self):
"""Clear all cached data
"""
self._conn.execute("DELETE FROM config;")
def merge(self, db, override=False):
"""Merge this databases content
override determines whether to override existing keys
"""
for key in db.keys():
if override or key not in self:
self[key] = db[key]
if __name__ == '__main__':
# test performance of compression and verify stored data is correct
import os
import time
key = 'key'
input = 'abc' * 100000
for compress_level in range(1, 10):
print 'Compression:', compress_level
start = time.time()
file = 'persistent%d.db' % compress_level
try:
os.remove(file)
except OSError:
pass
p = PersistentDict(file, compress_level)
p[key] = input
print 'Time: %.2f seconds' % (time.time() - start)
print 'Size: %d bytes' % os.path.getsize(file)
print
assert key in p
assert input == p[key]
del p[key]
| Python |
__doc__ = 'High level functions for extracting and storing data'
import os
import re
import csv
import math
import logging
from collections import defaultdict
import common
import settings
import xpath
def get_excerpt(html, try_meta=False, max_chars=255):
"""Extract excerpt from this HTML by finding largest text block
try_meta indicates whether to try extracting from meta description tag
max_chars is the maximum number of characters for the excerpt
"""
# try extracting meta description tag
excerpt = ''
if try_meta:
excerpt = xpath.get(html, '/html/head/meta[@name="description"]/@content')
if not excerpt:
# remove these tags and then find biggest text block
bad_tags = 'hr', 'br', 'script', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'
content = common.remove_tags(xpath.get(html, '/html/body', remove=bad_tags))
if content:
excerpt = max((len(p.strip()), p) for p in content.splitlines())[1]
return common.unescape(excerpt.strip())[:max_chars]
def extract_emails(html):
"""Extract emails and look for common obfuscations
>>> extract_emails('')
[]
>>> extract_emails('hello richard@sitescraper.net world')
['richard@sitescraper.net']
>>> extract_emails('hello richard@<!-- trick comment -->sitescraper.net world')
['richard@sitescraper.net']
>>> extract_emails('hello richard AT sitescraper DOT net world')
['richard@sitescraper.net']
"""
email_re = re.compile('([\w\.-]{1,64})@(\w[\w\.-]{1,255})\.(\w+)')
# remove comments, which can obfuscate emails
html = re.compile('<!--.*?-->', re.DOTALL).sub('', html).replace('mailto:', '')
emails = []
for user, domain, ext in email_re.findall(html):
if ext.lower() not in common.MEDIA_EXTENSIONS and len(ext)>=2 and not re.compile('\d').search(ext) and domain.count('.')<=3:
email = '%s@%s.%s' % (user, domain, ext)
if email not in emails:
emails.append(email)
# look for obfuscated email
for user, domain, ext in re.compile('([\w\.-]{1,64})\s?.?AT.?\s?([\w\.-]{1,255})\s?.?DOT.?\s?(\w+)', re.IGNORECASE).findall(html):
if ext.lower() not in common.MEDIA_EXTENSIONS and len(ext)>=2 and not re.compile('\d').search(ext) and domain.count('.')<=3:
email = '%s@%s.%s' % (user, domain, ext)
if email not in emails:
emails.append(email)
return emails
def parse_us_address(address):
"""Parse usa address
>>> parse_us_address('6200 20th Street, Vero Beach, FL 32966')
('6200 20th Street', 'Vero Beach', 'FL', '32966')
"""
city = state = zipcode = ''
addrs = map(lambda x:x.strip(), address.split(','))
if addrs:
m = re.compile('([A-Z]{2,})\s*(\d[\d\-\s]+\d)').search(addrs[-1])
if m:
state = m.groups()[0].strip()
zipcode = m.groups()[1].strip()
if len(addrs)>=3:
city = addrs[-2].strip()
address = ','.join(addrs[:-2])
else:
address = ','.join(addrs[:-1])
return address, city, state, zipcode
def distance(p1, p2):
"""Calculate distance between 2 (latitude, longitude) points
Multiply result by radius of earth (6373 km, 3960 miles)
"""
lat1, long1 = p1
lat2, long2 = p2
# Convert latitude and longitude to
# spherical coordinates in radians.
degrees_to_radians = math.pi/180.0
# phi = 90 - latitude
phi1 = (90.0 - lat1)*degrees_to_radians
phi2 = (90.0 - lat2)*degrees_to_radians
# theta = longitude
theta1 = long1*degrees_to_radians
theta2 = long2*degrees_to_radians
# Compute spherical distance from spherical coordinates.
# For two locations in spherical coordinates
# (1, theta, phi) and (1, theta, phi)
# cosine( arc length ) =
# sin phi sin phi' cos(theta-theta') + cos phi cos phi'
# distance = rho * arc length
cos = (math.sin(phi1)*math.sin(phi2)*math.cos(theta1 - theta2) + math.cos(phi1)*math.cos(phi2))
arc = math.acos( cos )
# Remember to multiply arc by the radius of the earth
# in your favorite set of units to get length.
return arc
| Python |
__doc__ = 'Framework for crawling and scraping webpages with JQuery'
import sys
import os
import re
import urllib2
import random
from time import time, sleep
from datetime import datetime
from PyQt4.QtGui import QApplication, QDesktopServices
from PyQt4.QtCore import QByteArray, QString, QUrl, QTimer, QEventLoop, QIODevice, QObject, QVariant
from PyQt4.QtWebKit import QWebFrame, QWebView, QWebPage, QWebSettings
from PyQt4.QtNetwork import QNetworkAccessManager, QNetworkProxy, QNetworkRequest, QNetworkReply, QNetworkDiskCache
import common
import settings
import xpath
"""
TODO
right click find xpath:
http://doc.qt.nokia.com/4.6/webkit-domtraversal.html
http://doc.qt.nokia.com/4.6/webkit-simpleselector.html
textbox for jquery input
http://www.rkblog.rk.edu.pl/w/p/webkit-pyqt-rendering-web-pages/
threaded multiple URLs
exit on close window signal
add progress bar for loading page
implement watir API?
"""
def qstring_to_unicode(qstr):
"""Convert QString to unicode
"""
if isinstance(qstr, unicode):
return qstr
else:
return common.to_unicode(qstr.toUtf8().data(), 'utf-8')
class NetworkAccessManager(QNetworkAccessManager):
"""Subclass QNetworkAccessManager for finer control network operations
"""
def __init__(self, proxy, allowed_media, allowed_regex, cache_size=100, cache_dir='.webkit_cache'):
"""
See JQueryBrowser for details of arguments
cache_size is the maximum size of the webkit cache (MB)
"""
QNetworkAccessManager.__init__(self)
# initialize the manager cache
#QDesktopServices.storageLocation(QDesktopServices.CacheLocation)
cache = QNetworkDiskCache()
cache.setCacheDirectory(cache_dir)
cache.setMaximumCacheSize(cache_size * 1024 * 1024) # need to convert cache value to bytes
self.setCache(cache)
self.allowed_regex = allowed_regex
# allowed content extensions
self.banned_extensions = common.MEDIA_EXTENSIONS
for ext in allowed_media:
if ext in self.banned_extensions:
self.banned_extensions.remove(ext)
# and proxy
self.setProxy(proxy)
def setProxy(self, proxy):
"""Allow setting string as proxy
"""
if isinstance(proxy, basestring):
match = re.match('((?P<username>\w+):(?P<password>\w+)@)?(?P<host>\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})(:(?P<port>\d+))?', proxy)
if match:
groups = match.groupdict()
username = groups.get('username') or ''
password = groups.get('password') or ''
host = groups.get('host')
port = groups.get('port')
#print host, port, username, password
proxy = QNetworkProxy(QNetworkProxy.HttpProxy, host, int(port), username, password)
else:
common.logger.info('Invalid proxy:' + proxy)
proxy = None
if proxy:
QNetworkAccessManager.setProxy(self, proxy)
def createRequest(self, operation, request, data):
if operation == self.GetOperation:
if self.is_forbidden(request):
# deny GET request for banned media type by setting dummy URL
# XXX abort properly
request.setUrl(QUrl(QString('forbidden://localhost/')))
else:
common.logger.debug(common.to_unicode(request.url().toString().toUtf8().data()).encode('utf-8'))
#print request.url().toString(), operation
request.setAttribute(QNetworkRequest.CacheLoadControlAttribute, QNetworkRequest.PreferCache)
reply = QNetworkAccessManager.createRequest(self, operation, request, data)
reply.error.connect(self.catch_error)
#add Base-Url header, then we can get it from QWebView
if isinstance(request.originatingObject(), QWebFrame):
try:
reply.setRawHeader(QByteArray('Base-Url'), QByteArray('').append(request.originatingObject().page().mainFrame().baseUrl().toString()))
except Exception, e:
common.logger.debug(e)
return reply
def is_forbidden(self, request):
"""Returns whether this request is permitted by checking URL extension and regex
XXX head request for mime?
"""
forbidden = False
url = common.to_unicode(request.url().toString().toUtf8().data()).encode('utf-8')
if common.get_extension(url) in self.banned_extensions:
forbidden = True
elif re.match(self.allowed_regex, url) is None:
forbidden = True
return forbidden
def catch_error(self, eid):
if eid not in (5, 301):
errors = {
0 : 'no error condition. Note: When the HTTP protocol returns a redirect no error will be reported. You can check if there is a redirect with the QNetworkRequest::RedirectionTargetAttribute attribute.',
1 : 'the remote server refused the connection (the server is not accepting requests)',
2 : 'the remote server closed the connection prematurely, before the entire reply was received and processed',
3 : 'the remote host name was not found (invalid hostname)',
4 : 'the connection to the remote server timed out',
5 : 'the operation was canceled via calls to abort() or close() before it was finished.',
6 : 'the SSL/TLS handshake failed and the encrypted channel could not be established. The sslErrors() signal should have been emitted.',
7 : 'the connection was broken due to disconnection from the network, however the system has initiated roaming to another access point. The request should be resubmitted and will be processed as soon as the connection is re-established.',
101 : 'the connection to the proxy server was refused (the proxy server is not accepting requests)',
102 : 'the proxy server closed the connection prematurely, before the entire reply was received and processed',
103 : 'the proxy host name was not found (invalid proxy hostname)',
104 : 'the connection to the proxy timed out or the proxy did not reply in time to the request sent',
105 : 'the proxy requires authentication in order to honour the request but did not accept any credentials offered (if any)',
201 : 'the access to the remote content was denied (similar to HTTP error 401)',
202 : 'the operation requested on the remote content is not permitted',
203 : 'the remote content was not found at the server (similar to HTTP error 404)',
204 : 'the remote server requires authentication to serve the content but the credentials provided were not accepted (if any)',
205 : 'the request needed to be sent again, but this failed for example because the upload data could not be read a second time.',
301 : 'the Network Access API cannot honor the request because the protocol is not known',
302 : 'the requested operation is invalid for this protocol',
99 : 'an unknown network-related error was detected',
199 : 'an unknown proxy-related error was detected',
299 : 'an unknown error related to the remote content was detected',
399 : 'a breakdown in protocol was detected (parsing error, invalid or unexpected responses, etc.)',
}
common.logger.debug('Error %d: %s (%s)' % (eid, errors.get(eid, 'unknown error'), self.sender().url().toString()))
class NetworkReply(QNetworkReply):
def __init__(self, parent, reply):
QNetworkReply.__init__(self, parent)
self.reply = reply # reply to proxy
self.data = '' # contains downloaded data
self.buffer = '' # contains buffer of data to read
self.setOpenMode(QNetworkReply.ReadOnly | QNetworkReply.Unbuffered)
#print dir(reply)
# connect signal from proxy reply
reply.metaDataChanged.connect(self.applyMetaData)
reply.readyRead.connect(self.readInternal)
reply.finished.connect(self.finished)
reply.uploadProgress.connect(self.uploadProgress)
reply.downloadProgress.connect(self.downloadProgress)
def __getattribute__(self, attr):
"""Send undefined methods straight through to proxied reply
"""
# send these attributes through to proxy reply
if attr in ('operation', 'request', 'url', 'abort', 'close'):#, 'isSequential'):
value = self.reply.__getattribute__(attr)
else:
value = QNetworkReply.__getattribute__(self, attr)
#print attr, value
return value
def abort(self):
pass # qt requires that this be defined
def isSequential(self):
return True
def applyMetaData(self):
for header in self.reply.rawHeaderList():
self.setRawHeader(header, self.reply.rawHeader(header))
self.setHeader(QNetworkRequest.ContentTypeHeader, self.reply.header(QNetworkRequest.ContentTypeHeader))
self.setHeader(QNetworkRequest.ContentLengthHeader, self.reply.header(QNetworkRequest.ContentLengthHeader))
self.setHeader(QNetworkRequest.LocationHeader, self.reply.header(QNetworkRequest.LocationHeader))
self.setHeader(QNetworkRequest.LastModifiedHeader, self.reply.header(QNetworkRequest.LastModifiedHeader))
self.setHeader(QNetworkRequest.SetCookieHeader, self.reply.header(QNetworkRequest.SetCookieHeader))
self.setAttribute(QNetworkRequest.HttpStatusCodeAttribute, self.reply.attribute(QNetworkRequest.HttpStatusCodeAttribute))
self.setAttribute(QNetworkRequest.HttpReasonPhraseAttribute, self.reply.attribute(QNetworkRequest.HttpReasonPhraseAttribute))
self.setAttribute(QNetworkRequest.RedirectionTargetAttribute, self.reply.attribute(QNetworkRequest.RedirectionTargetAttribute))
self.setAttribute(QNetworkRequest.ConnectionEncryptedAttribute, self.reply.attribute(QNetworkRequest.ConnectionEncryptedAttribute))
self.setAttribute(QNetworkRequest.CacheLoadControlAttribute, self.reply.attribute(QNetworkRequest.CacheLoadControlAttribute))
self.setAttribute(QNetworkRequest.CacheSaveControlAttribute, self.reply.attribute(QNetworkRequest.CacheSaveControlAttribute))
self.setAttribute(QNetworkRequest.SourceIsFromCacheAttribute, self.reply.attribute(QNetworkRequest.SourceIsFromCacheAttribute))
# attribute is undefined
#self.setAttribute(QNetworkRequest.DoNotBufferUploadDataAttribute, self.reply.attribute(QNetworkRequest.DoNotBufferUploadDataAttribute))
self.metaDataChanged.emit()
def bytesAvailable(self):
"""How many bytes in the buffer are available to be read
"""
return len(self.buffer) + QNetworkReply.bytesAvailable(self)
def readInternal(self):
"""New data available to read
"""
s = self.reply.readAll()
self.data += s
self.buffer += s
self.readyRead.emit()
def readData(self, size):
"""Return up to size bytes from buffer
"""
size = min(size, len(self.buffer))
data, self.buffer = self.buffer[:size], self.buffer[size:]
return str(data)
class WebPage(QWebPage):
"""Override QWebPage to set User-Agent and JavaScript messages
"""
def __init__(self, user_agent, confirm=True):
QWebPage.__init__(self)
self.user_agent = user_agent
self.confirm = confirm
def userAgentForUrl(self, url):
return self.user_agent
def javaScriptAlert(self, frame, message):
"""Override default JavaScript alert popup and print results
"""
common.logger.debug('Alert:' + message)
def javaScriptConfirm(self, frame, message):
"""Override default JavaScript confirm popup and print results
"""
common.logger.debug('Confirm:' + message)
return self.confirm
def javaScriptPrompt(self, frame, message, default):
"""Override default JavaScript prompt popup and print results
"""
common.logger.debug('Prompt:%s%s' % (message, default))
def javaScriptConsoleMessage(self, message, line_number, source_id):
"""Print JavaScript console messages
"""
common.logger.debug('Console:%s%s%s' % (message, line_number, source_id))
def shouldInterruptJavaScript(self):
"""Disable javascript interruption dialog box
"""
return True
class JQueryBrowser(QWebView):
"""Render webpages using webkit
"""
def __init__(self, base_url=None, gui=False, user_agent=None, proxy=None, allowed_media=None, allowed_regex='.*?', timeout=20, delay=5, enable_plugins=True):#, cache_file=None):
"""
base_url is the domain that will be crawled
gui is whether to show webkit window or run headless
user_agent is used to set the user-agent when downloading content
proxy is a QNetworkProxy to download through
allowed_media are the media extensions to allow
allowed_regex is a regular expressions of URLS to allow
timeout is the maximum amount of seconds to wait for a request
delay is the minimum amount of seconds to wait between requests
"""
self.app = QApplication(sys.argv) # must instantiate first
QWebView.__init__(self)
webpage = WebPage(user_agent or settings.user_agent)
allowed_media = allowed_media or ['css', 'js']
manager = NetworkAccessManager(proxy, allowed_media, allowed_regex)
manager.finished.connect(self.finished)
webpage.setNetworkAccessManager(manager)
self.setPage(webpage)
self.setHtml('<html><head></head><body>No content loaded</body></html>', QUrl('http://localhost'))
self.timeout = timeout
self.delay = delay
#self.cache = pdict.PersistentDict(cache_file or settings.cache_file) # cache to store webpages
self.base_url = base_url
self.jquery_lib = None
#enable flash plugin etc.
self.settings().setAttribute(QWebSettings.PluginsEnabled, enable_plugins)
QTimer.singleShot(0, self.run) # start crawling when all events processed
if gui: self.show()
self.app.exec_() # start GUI thread
def set_proxy(self, proxy):
self.page().networkAccessManager().setProxy(proxy)
def current_url(self):
"""Return current URL
"""
return str(self.url().toString())
def current_html(self):
"""Return current rendered HTML
"""
return unicode(self.page().mainFrame().toHtml())
def get(self, url=None, script=None, retries=1, inject=True):
"""Load given url in webkit and return html when loaded
script is some javasript to exexute that will change the loaded page (eg form submission)
retries is how many times to try downloading this URL or executing this script
inject is whether to inject JQuery into the document
"""
t1 = time()
self.base_url = self.base_url or url # set base URL if not set
#html = self.cache.get(key, {}).get('value')
#if html:
# self.debug('Load cache ' + key)
# self.setHtml(html, QUrl(self.base_url))
#else:
if 1:
loop = QEventLoop()
timer = QTimer()
timer.setSingleShot(True)
timer.timeout.connect(loop.quit)
self.loadFinished.connect(loop.quit)
if url:
self.load(QUrl(url))
elif script:
self.js(script)
timer.start(self.timeout * 1000)
loop.exec_() # delay here until download finished or timeout
if timer.isActive():
# downloaded successfully
timer.stop()
parsed_html = self.current_html()
#if key:
# self.cache[key] = html
self.wait(self.delay - (time() - t1))
else:
# didn't download in time
if retries > 0:
common.logger.debug('Timeout - retrying')
parsed_html = self.get(url, script=script, retries=retries-1, inject=inject)
else:
common.logger.debug('Timed out')
parsed_html = ''
return parsed_html
def wait(self, secs=1):
"""Wait for delay time
"""
deadline = time() + secs
while time() < deadline:
sleep(0.1)
self.app.processEvents()
#print 'wait', wait_secs
# randomize the delay so less suspicious
#wait_secs += 0.5 * self.delay * (random.random() - 0.5)
#time.sleep(max(0, wait_secs))
def jsget(self, script, retries=1, inject=True):
"""Execute JavaScript that will cause page submission, and wait for page to load
"""
return self.get(script=script, retries=retries, inject=inject)
def js(self, script):
"""Shortcut to execute javascript on current document and return result
"""
self.app.processEvents()
return qstring_to_unicode(self.page().mainFrame().evaluateJavaScript(script).toString())
def inject_jquery(self):
"""Inject jquery library into this webpage for easier manipulation
"""
if self.jquery_lib is None:
url = 'http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js'
self.jquery_lib = urllib2.urlopen(url).read()
self.js(self.jquery_lib)
def click(self, pattern):
"""Click all elements that match the pattern
"""
for e in self.page().mainFrame().findAllElements(pattern):
e.evaluateJavaScript("var evObj = document.createEvent('MouseEvents'); evObj.initEvent('click', true, true); this.dispatchEvent(evObj);")
def attr(self, pattern, name, value=None):
"""Set attribute if value is defined, else get
"""
if value is None:
# want to get attribute
return str(self.page().mainFrame().findFirstElement(pattern).attribute(name))
else:
for e in self.page().mainFrame().findAllElements(pattern):
e.setAttribute(name, value)
def fill(self, pattern, value):
"""Set text of these elements to value
"""
for e in self.page().mainFrame().findAllElements(pattern):
tag = str(e.tagName()).lower()
if tag == 'input':
#e.setAttribute('value', value)
e.evaluateJavaScript('this.value = "%s"' % value)
else:
e.setPlainText(value)
def find(self, pattern):
"""Returns whether element matching xpath pattern exists
"""
return self.page().mainFrame().findAllElements(pattern)
def data(self, url):
"""Get data for this downloaded resource, if exists
"""
record = self.page().networkAccessManager().cache().data(QUrl(url))
if record:
data = record.readAll()
record.reset()
else:
data = None
return data
def run(self):
"""Override this method in subclass to automate interaction with website
"""
self.app.processEvents()
self.get('http://code.google.com/p/webscraping/')
#print self.data('http://www.google-analytics.com/ga.js')
QTimer.singleShot(5000, self.app.quit)
def finished(self, reply):
"""Override this method in subclasses to process downloaded urls
"""
pass
#print reply.url().toString(), ':', len(reply.data)
def closeEvent(self, event):
"""Catch the close window event and stop the script
"""
sys.exit(self.app.quit())
if __name__ == '__main__':
JQueryBrowser(gui=True)
| Python |
__doc__ = 'default application wide settings'
import logging
cache_file = '.cache.db' # file to use for pdict cache
user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0' # user agent for HTTP requests
log_level = logging.INFO # logging level
log_file = 'webscraping.log' # default logging file
default_encoding = 'utf-8'
| Python |
__doc__ = """
This module implements a subset of the XPath standard:
- tags
- indices
- attributes
- descendants
Plus a few extensions useful to my work:
- attributes can contain regular expressions
- indices can be negative
Generally XPath solutions will normalize the HTML into XHTML before selecting nodes.
However this module tries to navigate the HTML structure directly without normalizing.
In some cases I have found this faster/more accurate than using lxml.html and in other cases less so.
"""
#TODO:
# - convert to class to more efficiently handle html
# - and buffer tree selections
# - parent
# - search by text: text() == '...'
# - return xpath for most similar to text
# - change to breadth first search for faster finish with single element
import re
import urllib2
from urlparse import urljoin, urlsplit
from optparse import OptionParser
import adt
import common
import settings
USE_BUFFER = False
def search(html, xpath, remove=None):
"""Query HTML document using XPath
remove is a list of tags to ignore
>>> search('<span>1</span><div>abc<a>LINK 1</a><div><a>LINK 2</a>def</div>abc</div>ghi<div><a>LINK 3</a>jkl</div>', '/div/a')
['LINK 1', 'LINK 3']
>>> search('<div>abc<a class="link">LINK 1</a><div><a>LINK 2</a>def</div>abc</div>ghi<div><a class="link">LINK 3</a>jkl</div>', '/div[1]/a[@class="link"]')
['LINK 1']
>>> search('<div>abc<a class="link">LINK 1</a><div><a>LINK 2</a>def</div>abc</div>ghi<div><a class="link">LINK 3</a>jkl</div>', '/div[1]//a')
['LINK 1', 'LINK 2']
>>> search('<div>abc<a class="link">LINK 1</a></div>', '/div/a/@class')
['link']
# test searching unicode
>>> search(u'<a href="http://www.google.com" class="flink">google</a>', '//a[@class="flink"]')
[u'google']
# test scraping a large amount of content
len(search('<div><span>!</span></div>' * 10000, '//span'))
10000
"""
orig_html = html
html = clean_html(html, remove)
contexts = [html] # initial context is entire webpage
parent_attributes = []
for tag_i, (separator, tag, index, attributes) in enumerate(xpath_iter(xpath)):
children = []
if tag == '..':
# parent
raise common.WebScrapingError('.. not yet supported')
elif tag == 'text()':
# extract child text
for context in contexts:
children.append(common.remove_tags(context, keep_children=False))
elif tag.startswith('@'):
# selecting attribute
name = tag[1:].lower()
for a in parent_attributes:
children.append(a.get(name, ''))
else:
# have tag
parent_attributes = []
for context in contexts:
# search direct children if / and all descendants if //
matches = (separator == '' and find_children or find_descendants)(context, tag)
# XXX change to iterator
abs_index = index
if abs_index is not None and abs_index < 0:
# support negative indices
abs_index += len(matches) + 1
for child_i, child in enumerate(matches):
if index is None or abs_index == child_i + 1:
# matches index if defined
child_attributes = get_attributes(child)
if match_attributes(attributes, child_attributes):
# child matches tag and any defined indices or attributes
children.append(get_content(child))
parent_attributes.append(child_attributes)
if not children and tag == 'tbody':
pass # skip tbody, which firefox includes in xpath when does not exist
else:
contexts = children
if not contexts:
attributes_s = attributes and ''.join('[@%s="%s"]' % a for a in attributes) or ''
common.logger.debug('No matches for <%s%s%s> (tag %d)' % (tag, index and '[%d]' % index or '', attributes_s, tag_i + 1))
break
return contexts
def get(*args, **kwargs):
"""Return first element from search
"""
return common.first(search(*args, **kwargs))
def clean_html(html, tags):
"""Remove specified unhelpful tags and comments
"""
html = re.compile('<!--.*?-->', re.DOTALL).sub('', html) # remove comments
if tags:
# XXX combine tag list into single regex, if can match same at start and end
for tag in tags:
html = re.compile('<' + tag + '[^>]*?/>', re.DOTALL | re.IGNORECASE).sub('', html)
html = re.compile('<' + tag + '[^>]*?>.*?</' + tag + '>', re.DOTALL | re.IGNORECASE).sub('', html)
html = re.compile('<' + tag + '[^>]*?>', re.DOTALL | re.IGNORECASE).sub('', html)
return html
def xpath_iter(xpath):
"""Return an iterator of the xpath parsed into the separator, tag, index, and attributes
>>> list(xpath_iter('/div[1]//span[@class="text"]'))
[('', 'div', 1, []), ('/', 'span', None, [('class', 'text')])]
>>> list(xpath_iter('//li[-2]'))
[('/', 'li', -2, [])]
>>> list(xpath_iter('/div[@id="content"]//span[1][@class="text"][@title=""]/a'))
[('', 'div', None, [('id', 'content')]), ('/', 'span', 1, [('class', 'text'), ('title', '')]), ('', 'a', None, [])]
"""
for separator, token in re.compile('(|/|\.\.)/([^/]+)').findall(xpath):
index, attributes = None, []
if '[' in token:
tag = token[:token.find('[')]
for attribute in re.compile('\[(.*?)\]').findall(token):
try:
index = int(attribute)
except ValueError:
match = re.compile('@(.*?)=["\']?(.*?)["\']?$').search(attribute)
if match:
key, value = match.groups()
attributes.append((key.lower(), value.lower()))
else:
raise common.WebScrapingError('Unknown format: ' + attribute)
else:
tag = token
yield separator, tag, index, attributes
attributes_regex = re.compile('([\w-]+)\s*=\s*(".*?"|\'.*?\'|\w+)', re.DOTALL)
def get_attributes(html):
"""Extract the attributes of the passed HTML tag
>>> get_attributes('<div id="ID" name="MY NAME" max-width="20" class=abc>content <span class="inner name">SPAN</span></div>')
{'max-width': '20', 'class': 'abc', 'id': 'ID', 'name': 'MY NAME'}
"""
for i, c in enumerate(html):
if c == '>':
if USE_BUFFER:
html = buffer(html, 0, i)
else:
html = html[:i]
break
return dict((name.lower().strip(), value.strip('\'" ')) for (name, value) in attributes_regex.findall(html))
def match_attributes(desired_attributes, available_attributes):
"""Returns True if all of desired attributes are in available attributes
Supports regex, which is not part of the XPath standard but is so useful!
>>> match_attributes([], {})
True
>>> match_attributes([('class', 'test')], {})
False
>>> match_attributes([], {'id':'test', 'class':'test2'})
True
>>> match_attributes([('class', 'test')], {'id':'test', 'class':'test2'})
False
>>> match_attributes([('class', 'test')], {'id':'test2', 'class':'test'})
True
>>> match_attributes([('class', 'test'), ('id', 'content')], {'id':'test', 'class':'content'})
False
>>> match_attributes([('class', 'test'), ('id', 'content')], {'id':'content', 'class':'test'})
True
>>> match_attributes([('class', 'test\d')], {'id':'test', 'class':'test2'})
True
>>> match_attributes([('class', 'test\d')], {'id':'test2', 'class':'test'})
False
"""
for name, value in desired_attributes:
if name not in available_attributes or not re.match(re.compile(value + '$', re.IGNORECASE), available_attributes[name]):
return False
return True
content_regex = re.compile('<.*?>(.*)</.*?>$', re.DOTALL)
def get_content(html, default=''):
"""Extract the child HTML of a the passed HTML tag
>>> get_content('<div id="ID" name="NAME">content <span>SPAN</span></div>')
'content <span>SPAN</span>'
"""
match = content_regex.match(html)
if match:
content = match.groups()[0]
else:
content = default
return content
def find_children(html, tag):
"""Find children with this tag type
>>> [str(b) for b in find_children('<span>1</span><div>abc<div>def</div>abc</div>ghi<div>jkl</div>', 'div')]
['<div>abc<div>def</div>abc</div>', '<div>jkl</div>']
"""
results = []
found = True
while found:
html = jump_next_tag(html)
if html:
tag_html, html = split_tag(html)
if tag_html:
if tag.lower() in ('*', get_tag(tag_html).lower()):
results.append(tag_html)
else:
found = False
else:
found = False
return results
def find_descendants(html, tag):
"""Find descendants with this tag type
>>> [str(b) for b in find_descendants('<span>1</span><div>abc<div>def</div>abc</div>ghi<div>jkl</div>', 'div')]
['<div>abc<div>def</div>abc</div>', '<div>def</div>', '<div>jkl</div>']
"""
if tag == '*':
raise common.WebScrapingError("`*' not currently supported for // because too inefficient")
results = []
for match in re.compile('<%s' % tag, re.DOTALL | re.IGNORECASE).finditer(html):
if USE_BUFFER:
tag_html = buffer(html, match.start())
else:
tag_html = html[match.start():]
tag_html, _ = split_tag(tag_html)
results.append(tag_html)
return results
tag_regex = re.compile('<([\w\:]+)')
def jump_next_tag(html):
"""Return html at start of next tag
>>> str(jump_next_tag('<div>abc</div>'))
'<div>abc</div>'
>>> str(jump_next_tag(' <div>abc</div>'))
'<div>abc</div>'
>>> str(jump_next_tag('</span> <div>abc</div>'))
'<div>abc</div>'
>>> str(jump_next_tag('<br> <div>abc</div>'))
'<div>abc</div>'
"""
while 1:
match = tag_regex.search(html)
if match:
if match.groups()[0].lower() in common.EMPTY_TAGS:
if USE_BUFFER:
html = buffer(html, match.end())
else:
html = html[match.end():]
else:
if USE_BUFFER:
return buffer(html, match.start())
else:
return html[match.start():]
else:
return None
def get_tag(html):
"""Find tag type at this location
>>> get_tag('<div>abc</div>')
'div'
>>> get_tag(' <div>')
>>> get_tag('div')
"""
match = tag_regex.match(html)
if match:
return match.groups()[0]
else:
return None
splits = adt.HashDict()
def split_tag(html):
"""Extract starting tag and contents from HTML
>>> [str(s) for s in split_tag('<div>abc<div>def</div>abc</div>ghi<div>jkl</div>')]
['<div>abc<div>def</div>abc</div>', 'ghi<div>jkl</div>']
>>> [str(s) for s in split_tag('<br /><div>abc</div>')]
['<br />', '<div>abc</div>']
>>> [str(s) for s in split_tag('<div>abc<div>def</div>abc</span>')]
['<div>abc<div>def</div>abc</span></div>', '']
"""
if html in splits:
i, tag = splits[html]
else:
i = None
tag = get_tag(html)
depth = 0 # how far nested
for match in re.compile('</?%s.*?>' % tag, re.DOTALL | re.IGNORECASE).finditer(html):
if html[match.start() + 1] == '/':
depth -= 1
elif html[match.end() - 2] == '/':
pass # tag starts and ends (eg <br />)
else:
depth += 1
if depth == 0:
# found top level match
i = match.end()
break
#splits[html] = i, tag
if i is None:
return html + '</%s>' % tag, ''
else:
if USE_BUFFER:
return html[:i], buffer(html, i)
else:
return html[:i], html[i:]
a_re = re.compile('//a/@href')
js_re = re.compile('location.href ?= ?[\'"](.*?)[\'"]')
def get_links(html, url=None, local=True, external=True):
"""Return all links from html and convert relative to absolute if source url is provided
local determines whether to include links from same domain
external determines whether to include linkes from other domains
"""
def normalize_link(link):
if urlsplit(link).scheme in ('http', 'https', ''):
if '#' in link:
link = link[:link.index('#')]
if url:
link = urljoin(url, link)
if not local and common.same_domain(url, link):
# local links not included
link = None
if not external and not common.same_domain(url, link):
# external links not included
link = None
else:
link = None # ignore mailto, etc
return link
a_links = a_re.search(html)
js_links = js_re.findall(html)
links = []
for link in a_links + js_links:
try:
link = normalize_link(link)
except UnicodeError:
pass
else:
if link and link not in links:
links.append(link)
return links
| Python |
__doc__ = """
Description: Helper methods to download and crawl web content using threads
Website: http://code.google.com/p/webscraping/
License: LGPL
"""
import os
import gzip
import re
import time
import random
import urllib
import urllib2
from urlparse import urljoin
from StringIO import StringIO
import subprocess
from datetime import datetime, timedelta
from collections import defaultdict, deque
import socket
from threading import Thread, Event
try:
import hashlib
except ImportError:
import md5 as hashlib
import adt
import alg
import common
import settings
try:
import pdict
except ImportError:
# sqlite not installed
pdict = None
SLEEP_TIME = 0.1 # how long to sleep when waiting for network activity
class Download(object):
def __init__(self, cache=None, cache_file=None, read_cache=True, write_cache=True, use_network=True,
user_agent=None, timeout=30, delay=5, proxies=None, proxy_file=None, opener=None,
headers=None, data=None, num_retries=0, num_redirects=1,
force_html=False, force_ascii=False, max_size=None, default='', pattern=None):
"""
`cache' is a pdict object to use for the cache
`cache_file' sets filename to store cached data
`read_cache' sets whether to read from the cache
`write_cache' sets whether to write to the cache
`use_network' sets whether to download content not in the cache
`user_agent' sets the User Agent to download content with
`timeout' is the maximum amount of time to wait for http response
`delay' is the minimum amount of time (in seconds) to wait after downloading content from a domain per proxy
`proxies' is a list of proxies to cycle through when downloading content
`opener' sets an optional opener to use instead of using urllib2 directly
`headers' are the headers to include in the request
`data' is what to post at the URL
`num_retries' sets how many times to try downloading a URL when get an error
`num_redirects' sets how many times the URL is allowed to be redirected, to avoid infinite loop
`force_html' sets whether to download non-text data
`force_ascii' sets whether to only return ascii characters
`max_size' determines maximum number of bytes that will be downloaded, or None to disable
`default' is what to return when no content can be downloaded
`pattern' is a regular expression that the downloaded HTML has to match to be considered a valid download
"""
socket.setdefaulttimeout(timeout)
cache_file = cache_file or settings.cache_file
if pdict:
self.cache = cache or pdict.PersistentDict(cache_file)
else:
self.cache = None
if read_cache or write_cache:
common.logger.info('Cache disabled because could not import pdict')
self.settings = adt.Bag(
read_cache = read_cache,
write_cache = write_cache,
use_network = use_network,
delay = delay,
proxies = (common.read_list(proxy_file) if proxy_file else []) or proxies or [],
proxy_file = proxy_file,
user_agent = user_agent or settings.user_agent,
opener = opener,
headers = headers,
data = data,
num_retries = num_retries,
num_redirects = num_redirects,
force_html = force_html,
force_ascii = force_ascii,
max_size = max_size,
default = default,
pattern = pattern
)
self.last_load_time = self.last_mtime = time.time()
def get(self, url, **kwargs):
"""Download this URL and return the HTML. Data is cached so only have to download once.
`url' is what to download
`kwargs' can override any of the arguments passed to constructor
"""
self.reload_proxies()
self.final_url = None # for tracking redirects
self.response_code = '' # keep response code
self.response_headers = {} # keep response headers
# update settings with any local overrides
settings = adt.Bag(self.settings)
settings.update(kwargs)
# check cache for whether this content is already downloaded
key = self.get_key(url, settings.data)
if self.cache and settings.read_cache:
try:
html = self.cache[key]
if html and settings.pattern and not re.compile(settings.pattern, re.DOTALL | re.IGNORECASE).search(html):
# invalid result from download
html = None
except KeyError:
pass # have not downloaded yet
else:
if not html and settings.num_retries > 0:
# try downloading again
common.logger.debug('Redownloading')
else:
# return previously downloaded content
return html or settings.default
if not settings.use_network:
# only want previously cached content
return settings.default
html = None
# attempt downloading content at URL
while html is None:
# crawl slowly for each domain to reduce risk of being blocked
settings.proxy = random.choice(settings.proxies) if settings.proxies else None
self.throttle(url, delay=settings.delay, proxy=settings.proxy)
html = self.fetch(url, headers=settings.headers, data=settings.data, proxy=settings.proxy, user_agent=settings.user_agent, opener=settings.opener, pattern=settings.pattern)
if settings.num_retries == 0:
break # don't try downloading again
else:
settings.num_retries -= 1
if html:
if settings.num_redirects > 0:
# allowed to redirect
redirect_url = self.get_redirect(url=url, html=html)
if redirect_url:
# found a redirection
common.logger.info('%s redirecting to %s' % (url, redirect_url))
settings.num_redirects -= 1
html = self.get(redirect_url, **settings) or ''
# make relative links absolute so will still work after redirect
relative_re = re.compile('(<\s*a[^>]+href\s*=\s*["\']?)(?!http)([^"\'>]+)', re.IGNORECASE)
html = relative_re.sub(lambda m: m.group(1) + urljoin(url, m.group(2)), html)
html = self.clean_content(html=html, max_size=settings.max_size, force_html=settings.force_html, force_ascii=settings.force_ascii)
if self.cache and settings.write_cache:
# cache results
self.cache[key] = html
if url != self.final_url:
# cache what URL was redirected to
self.cache.meta(key, dict(url=self.final_url))
# return default if no content
return html or settings.default
def get_key(self, url, data=None):
"""Create key for storing in database
"""
key = url
if data:
key += ' ' + str(data)
return key
def clean_content(self, html, max_size, force_html, force_ascii):
"""Clean up downloaded content
"""
if max_size is not None and len(html) > max_size:
common.logger.info('Too big: %s' % len(html))
html = '' # too big to store
elif force_html and not common.is_html(html):
common.logger.info('Not html')
html = '' # non-html content
elif force_ascii:
html = common.to_ascii(html) # remove non-ascii characters
return html
def get_redirect(self, url, html):
"""Check for meta redirects and return redirect URL if found
"""
match = re.compile('<meta[^>]*?url=(.*?)["\']', re.IGNORECASE).search(html)
if match:
return urljoin(url, common.unescape(match.groups()[0].strip()))
def fetch(self, url, headers=None, data=None, proxy=None, user_agent=None, opener=None, pattern=None):
"""Simply download the url and return the content
"""
common.logger.info('Downloading %s' % url)
# create opener with headers
opener = opener or urllib2.build_opener()
if proxy:
if url.lower().startswith('https://'):
opener.add_handler(urllib2.ProxyHandler({'https' : proxy}))
else:
opener.add_handler(urllib2.ProxyHandler({'http' : proxy}))
default_headers = {'User-agent': user_agent or settings.user_agent, 'Accept-encoding': 'gzip', 'Referer': url, 'Accept-Language': 'en-us,en;q=0.5'}
headers = headers and default_headers.update(headers) or default_headers
if isinstance(data, dict):
data = urllib.urlencode(data)
try:
response = opener.open(urllib2.Request(url, data, headers))
content = response.read()
if response.headers.get('content-encoding') == 'gzip':
# data came back gzip-compressed so decompress it
content = gzip.GzipFile(fileobj=StringIO(content)).read()
self.final_url = response.url # store where redirected to
if pattern and not re.compile(pattern, re.DOTALL | re.IGNORECASE).search(content):
# invalid result from download
content = None
common.logger.info('Content did not match expected pattern - %s' % url)
self.response_code = '200'
self.response_headers = dict(response.headers)
except Exception, e:
if hasattr(e, 'code'):
self.response_code = e.code
# so many kinds of errors are possible here so just catch them all
common.logger.info('Error: %s %s' % (url, e))
content, self.final_url = None, url
return content
domains = adt.HashDict()
def throttle(self, url, delay, proxy=None, variance=0.5):
"""Delay a minimum time for each domain per proxy by storing last access time
`url' is what intend to download
`delay' is the minimum amount of time (in seconds) to wait after downloading content from this domain
`proxy' is the proxy to download through
`variance' is the amount of randomness in delay, 0-1
"""
key = str(proxy) + ':' + common.get_domain(url)
start = datetime.now()
while datetime.now() < Download.domains.get(key, start):
time.sleep(SLEEP_TIME)
# update domain timestamp to when can query next
Download.domains[key] = datetime.now() + timedelta(seconds=delay * (1 + variance * (random.random() - 0.5)))
def reload_proxies(self):
"""Check every 10 minutes for updated proxy file
"""
if self.settings.proxy_file and time.time() - self.last_load_time > 10 * 60:
self.last_load_time = time.time()
if os.path.exists(self.settings.proxy_file):
if os.stat(self.settings.proxy_file).st_mtime != self.last_mtime:
self.last_mtime = os.stat(self.settings.proxy_file).st_mtime
self.settings.proxies = common.read_list(self.settings.proxy_file)
common.logger.debug('Reloaded proxies.')
def geocode(self, address, delay=5):
"""Geocode address using Google's API and return dictionary of useful fields
"""
try:
import simplejson as json
except ImportError:
import json
url = 'http://maps.google.com/maps/api/geocode/json?address=%s&sensor=false' % urllib.quote_plus(address)
html = self.get(url, delay=delay)
results = defaultdict(str)
if html:
try:
geo_data = json.loads(html)
except Exception, e:
common.logger.debug(str(e))
return {}
for result in geo_data.get('results', []):
for e in result['address_components']:
types, value = e['types'], e['long_name']
if 'street_number' in types:
results['number'] = value
elif 'route' in types:
results['street'] = value
elif 'postal_code' in types:
results['postcode'] = value
elif 'locality' in types:
results['suburb'] = value
elif 'administrative_area_level_1' in types:
results['state'] = value
elif 'administrative_area_level_2' in types:
results['county'] = value
elif 'administrative_area_level_3' in types:
results['district'] = value
elif 'country' in types:
results['country'] = value
results['full_address'] = result['formatted_address']
results['lat'] = result['geometry']['location']['lat']
results['lng'] = result['geometry']['location']['lng']
if 'street' in results:
results['address'] = (results['number'] + ' ' + results['street']).strip()
if not results:
# error geocoding - try again later
common.logger.debug('delete invalid geocode')
if self.cache:
del self.cache[url]
return results
def get_emails(self, website, max_depth=1, max_urls=None, max_emails=None):
"""Crawl this website and return all emails found
"""
scraped = adt.HashDict()
c = CrawlerCallback(max_depth=max_depth)
outstanding = deque([website])
emails = []
while outstanding and (max_urls is None or len(scraped) < max_urls) \
and (max_emails is None or len(emails) < max_emails):
url = outstanding.popleft()
scraped[url] = True
html = self.get(url, delay=1)
if html:
for email in alg.extract_emails(html):
if email not in emails:
emails.append(email)
if len(emails) == max_emails:
break
outstanding.extend(c.crawl(self, url, html))
return list(emails)
def gcache_get(self, url, **kwargs):
"""Get page from google cache
"""
return self.get('http://www.google.com/search?&q=cache%3A' + urllib.quote(url), **kwargs)
def gtrans_get(self, url, **kwargs):
"""Get page via Google Translation
"""
url = 'http://translate.google.com/translate?sl=nl&anno=2&u=%s' % urllib.quote(url)
html = self.get(url, **kwargs)
if html:
m = re.compile(r'<frame src="([^"]+)" name=c>', re.DOTALL|re.IGNORECASE).search(html)
if m:
frame_src = urljoin(url, common.unescape(m.groups()[0].strip()))
# force to check redirect here
if kwargs.has_key('num_redirects'): kwargs['num_redirects'] = 1
html = self.get(frame_src, **kwargs)
if html:
# remove google translations content
return re.compile(r'<span class="google-src-text".+?</span>', re.DOTALL|re.IGNORECASE).sub('', html)
def whois(self, url, timeout=10):
"""Query whois info
"""
domain = common.get_domain(url)
if domain:
text = ''
key = 'whois_%s' % domain
try:
if self.cache:
text = self.cache[key]
else:
raise KeyError()
except KeyError:
# try online whois app
query_url = 'http://whois.chinaz.com/%s' % domain
html = self.get(query_url)
match = re.compile("<script src='(request.aspx\?domain=.*?)'></script>").search(html)
if match:
script_url = urljoin(query_url, match.groups()[0])
text = self.get(script_url, read_cache=False)
if '@' not in text:
if self.cache:
del self.cache[query_url]
# failed, so try local whois command
r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE)
start = time.time()
while r.poll() is None:
time.sleep(0.5)
if time.time() - start > timeout:
try:
r.kill()
except Exception, e:
pass
break
if r.poll() != 1:
text = r.communicate()[0]
if '@' in text:
if self.cache:
self.cache[key] = text
return text
def save_as(self, url, filename=None, save_dir='images'):
"""Download url and save into disk.
"""
if url:
_bytes = self.get(url, num_redirects=0)
if _bytes:
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, filename or '%s.%s' % (hashlib.md5(url).hexdigest(), common.get_extension(url)))
open(save_path, 'wb').write(_bytes)
return save_path
def threaded_get(url=None, urls=None, num_threads=10, cb=None, post=False, depth=False, **kwargs):
"""Download these urls in parallel
`url[s]' are the webpages to download
`num_threads' determines the number of threads to download urls with
`cb' is called after each download with the HTML of the download
the arguments are the url and downloaded html
whatever URLs are returned are added to the crawl queue
`post' is whether to use POST instead of default GET
`depth' sets to traverse depth first rather than the default breadth first
"""
class DownloadThread(Thread):
"""Download data
"""
processing = deque()
def __init__(self):
Thread.__init__(self)
def run(self):
D = Download(**kwargs)
while urls or DownloadThread.processing:
# keep track that are processing url
DownloadThread.processing.append(1)
try:
if depth:
url = urls.popleft()
else:
url = urls.pop()
except IndexError:
# currently no urls to process
DownloadThread.processing.popleft()
# so check again later
time.sleep(SLEEP_TIME)
else:
# download this url
try:
html = (D.post if post else D.get)(url, **kwargs)
if cb:
# use callback to process downloaded HTML
urls.extend(cb(D, url, html) or [])
finally:
# have finished processing
# make sure this is called even on exception
DownloadThread.processing.popleft()
# put urls into thread safe queue
urls = urls or []
if url: urls.append(url)
urls = deque(urls)
threads = [DownloadThread() for i in range(num_threads)]
for thread in threads:
thread.start()
# wait for threads to finish
for thread in threads:
thread.join()
class CrawlerCallback:
"""Example callback to crawl the website
"""
found = adt.HashDict(int) # track depth of found URLs
def __init__(self, output_file=None, max_links=100, max_depth=1, allowed_urls='', banned_urls='^$', robots=None, crawl_existing=True):
"""
`output_file' is where to save scraped data
`max_links' is the maximum number of links to follow per page
`max_depth' is the maximum depth to follow links into website (use None for no limit)
`allowed_urls' is a regex for allowed urls, defaults to all urls
`banned_urls' is a regex for banned urls, defaults to no urls
`robots': RobotFileParser object to determine which urls allowed to crawl
`crawl_existing' sets whether to crawl content already downloaded previously in the cache
"""
if output_file:
self.writer = common.UnicodeWriter(output_file)
else:
self.writer = None
self.max_links = max_links
self.max_depth = max_depth
self.allowed_urls = re.compile(allowed_urls)
self.banned_urls = re.compile(banned_urls)
self.robots = robots
self.crawl_existing = crawl_existing
def __call__(self, D, url, html):
"""Scrape HTML
"""
self.scrape(D, url, html)
return self.crawl(D, url, html)
def scrape(self, D, url, html):
"""Reimplement this in subclass to scrape data
"""
pass
def crawl(self, D, url, html):
"""Crawl website html and return list of URLs crawled
"""
def normalize(link):
"""Normalize the link to avoid duplicates
"""
if '#' in link:
# remove internal links to avoid duplicates
link = link[:link.index('#')]
link = common.unescape(link) # remove & from link
return urljoin(url, link) # support relative links
def valid(link):
"""Check if should crawl this link
"""
# check if a media file
if common.get_extension(link) not in common.MEDIA_EXTENSIONS:
# check if a proper HTTP link
if link.lower().startswith('http'):
# only crawl within website
if common.same_domain(domain, link):
# passes regex
if self.allowed_urls.match(link) and not self.banned_urls.match(link):
# not blocked by robots.txt
if not self.robots or self.robots.can_fetch(settings.user_agent, link):
# allowed to recrawl
if self.crawl_existing or (D.cache and link not in D.cache):
return True
return False
domain = common.get_domain(url)
depth = CrawlerCallback.found[url]
outstanding = []
if depth != self.max_depth:
# extract links to continue crawling
links_re = re.compile('<a[^>]+href=["\'](.*?)["\']', re.IGNORECASE)
for link in links_re.findall(html):
link = normalize(link)
if link not in CrawlerCallback.found:
CrawlerCallback.found[link] = depth + 1
if valid(link):
# is a new link
outstanding.append(link)
if len(outstanding) == self.max_links:
break
return outstanding
| Python |
# -*- coding: utf-8 -*-
__doc__ = 'Common web scraping related functions'
import os
import re
import sys
import csv
csv.field_size_limit(sys.maxint)
import time
import glob
import string
import urllib
import string
import urllib2
import urlparse
import cookielib
import itertools
import htmlentitydefs
import logging
import threading
from datetime import datetime, timedelta
import adt
import settings
class WebScrapingError(Exception):
pass
# known media file extensions
MEDIA_EXTENSIONS = ['ai', 'aif', 'aifc', 'aiff', 'asc', 'au', 'avi', 'bcpio', 'bin', 'c', 'cc', 'ccad', 'cdf', 'class', 'cpio', 'cpt', 'csh', 'css', 'csv', 'dcr', 'dir', 'dms', 'doc', 'drw', 'dvi', 'dwg', 'dxf', 'dxr', 'eps', 'etx', 'exe', 'ez', 'f', 'f90', 'fli', 'flv', 'gif', 'gtar', 'gz', 'h', 'hdf', 'hh', 'hqx', 'ice', 'ico', 'ief', 'iges', 'igs', 'ips', 'ipx', 'jpe', 'jpeg', 'jpg', 'js', 'kar', 'latex', 'lha', 'lsp', 'lzh', 'm', 'man', 'me', 'mesh', 'mid', 'midi', 'mif', 'mime', 'mov', 'movie', 'mp2', 'mp3', 'mpe', 'mpeg', 'mpg', 'mpga', 'ms', 'msh', 'nc', 'oda', 'pbm', 'pdb', 'pdf', 'pgm', 'pgn', 'png', 'pnm', 'pot', 'ppm', 'pps', 'ppt', 'ppz', 'pre', 'prt', 'ps', 'qt', 'ra', 'ram', 'ras', 'rgb', 'rm', 'roff', 'rpm', 'rtf', 'rtx', 'scm', 'set', 'sgm', 'sgml', 'sh', 'shar', 'silo', 'sit', 'skd', 'skm', 'skp', 'skt', 'smi', 'smil', 'snd', 'sol', 'spl', 'src', 'step', 'stl', 'stp', 'sv4cpio', 'sv4crc', 'swf', 't', 'tar', 'tcl', 'tex', 'texi', 'tif', 'tiff', 'tr', 'tsi', 'tsp', 'tsv', 'txt', 'unv', 'ustar', 'vcd', 'vda', 'viv', 'vivo', 'vrml', 'w2p', 'wav', 'wmv', 'wrl', 'xbm', 'xlc', 'xll', 'xlm', 'xls', 'xlw', 'xml', 'xpm', 'xsl', 'xwd', 'xyz', 'zip']
# tags that do not contain content
EMPTY_TAGS = 'br', 'hr', 'meta', 'link', 'base', 'img', 'embed', 'param', 'area', 'col', 'input'
def to_ascii(html):
"""Return ascii part of html
"""
return ''.join(c for c in html if ord(c) < 128)
def to_int(s):
"""Return integer from this string
>>> to_int('90')
90
>>> to_int('-90.2432')
-90
>>> to_int('a90a')
90
>>> to_int('a')
0
"""
return int(to_float(s))
def to_float(s):
"""Return float from this string
"""
valid = string.digits + '.-'
return float(''.join(c for c in s if c in valid) or 0)
def to_unicode(obj, encoding=settings.default_encoding):
"""Convert obj to unicode
"""
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = obj.decode(encoding, 'ignore')
return obj
def html_to_unicode(html, charset=settings.default_encoding):
"""Convert html to unicode, decoding by charset
"""
m = re.compile(r'''<meta\s+http-equiv=["']Content-Type["']\s+content=["'][^"']*?charset=([a-zA-z\d\-]+)["']''', re.IGNORECASE).search(html)
if m:
charset = m.groups()[0].strip().lower()
return to_unicode(html, charset)
def is_html(html):
"""Returns whether content is HTML
"""
try:
result = re.search('html|head|body', html) is not None
except TypeError:
result = False
return result
def unique(l):
"""Remove duplicates from list, while maintaining order
>>> unique([3,6,4,4,6])
[3, 6, 4]
>>> unique([])
[]
>>> unique([3,6,4])
[3, 6, 4]
"""
checked = []
for e in l:
if e not in checked:
checked.append(e)
return checked
def nth(l, i, default=''):
"""Return nth item from list or default value if out of range
"""
try:
return l[i]
except IndexError:
return default
def first(l, default=''):
"""Return first element from list or default value if out of range
>>> first([1,2,3])
1
>>> first([], None)
"""
return nth(l, i=0, default=default)
def last(l, default=''):
"""Return last element from list or default value if out of range
"""
return nth(l, i=-1, default=default)
def pad(l, size, default=None, end=True):
"""Return list of given size
Insert elements of default value if too small
Remove elements if too large
Manipulate end of list if end is True, else start
>>> pad(range(5), 5)
[0, 1, 2, 3, 4]
>>> pad(range(5), 3)
[0, 1, 2]
>>> pad(range(5), 7, -1)
[0, 1, 2, 3, 4, -1, -1]
>>> pad(range(5), 7, end=False)
[None, None, 0, 1, 2, 3, 4]
"""
while len(l) < size:
if end:
l.append(default)
else:
l.insert(0, default)
while len(l) > size:
if end:
l.pop()
else:
l.pop(0)
return l
def remove_tags(html, keep_children=True):
"""Remove HTML tags leaving just text
If keep children is True then keep text within child tags
>>> remove_tags('hello <b>world</b>!')
'hello world!'
>>> remove_tags('hello <b>world</b>!', False)
'hello !'
>>> remove_tags('hello <br>world<br />!', False)
'hello world!'
"""
html = re.sub('<(%s)[^>]*>' % '|'.join(EMPTY_TAGS), '', html)
if not keep_children:
# XXX does not work for multiple nested tags
html = re.compile('<.*?>(.*?)</.*?>', re.DOTALL).sub('', html)
return re.compile('<[^<]*?>').sub('', html)
def unescape(text, encoding=settings.default_encoding, keep_unicode=False):
"""Interpret escape characters
>>> unescape('<hello &%20world>\xc2\x85')
'<hello & world>...'
"""
def fixup(m):
text = m.group(0)
if text[:2] == '&#':
# character reference
try:
if text[:3] == '&#x':
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
try:
text = to_unicode(text, encoding)
except UnicodeError:
pass
#text = text.replace(' ', ' ').replace('&', '&').replace('<', '<').replace('>', '>')
text = re.sub('&#?\w+;', fixup, text)
text = urllib.unquote(text)
if keep_unicode:
return text
try:
text = text.encode(encoding, 'ignore')
except UnicodeError:
pass
if encoding != 'utf-8':
return text
# remove annoying characters
chars = {
'\xc2\x82' : ',', # High code comma
'\xc2\x84' : ',,', # High code double comma
'\xc2\x85' : '...', # Tripple dot
'\xc2\x88' : '^', # High carat
'\xc2\x91' : '\x27', # Forward single quote
'\xc2\x92' : '\x27', # Reverse single quote
'\xc2\x93' : '\x22', # Forward double quote
'\xc2\x94' : '\x22', # Reverse double quote
'\xc2\x95' : ' ',
'\xc2\x96' : '-', # High hyphen
'\xc2\x97' : '--', # Double hyphen
'\xc2\x99' : ' ',
'\xc2\xa0' : ' ',
'\xc2\xa6' : '|', # Split vertical bar
'\xc2\xab' : '<<', # Double less than
'\xc2\xae' : '®',
'\xc2\xbb' : '>>', # Double greater than
'\xc2\xbc' : '1/4', # one quarter
'\xc2\xbd' : '1/2', # one half
'\xc2\xbe' : '3/4', # three quarters
'\xca\xbf' : '\x27', # c-single quote
'\xcc\xa8' : '', # modifier - under curve
'\xcc\xb1' : '' # modifier - under line
}
def replace_chars(match):
char = match.group(0)
return chars[char]
return re.sub('(' + '|'.join(chars.keys()) + ')', replace_chars, text)
def normalize(s, encoding=settings.default_encoding):
"""Return normalized string
>>> normalize('''<span>Tel.: 029 - 12345678 </span>''')
'Tel.: 029 - 12345678'
"""
return re.sub('\s+', ' ', unescape(remove_tags(s), encoding=encoding, keep_unicode=isinstance(s, unicode))).strip()
def safe(s):
"""Return safe version of string for URLs
>>> safe('U@#$_#^&*-2')
'U_-2'
"""
safe_chars = string.letters + string.digits + '-_ '
return ''.join(c for c in s if c in safe_chars).replace(' ', '-')
def pretty(s):
"""Return pretty version of string for display
>>> pretty('hello_world')
'Hello World'
"""
return re.sub('[-_]', ' ', s.title())
def pretty_paragraph(s):
"""Return pretty version of paragraph for display
"""
s = re.sub('<(br|hr|/li)[^>]*>', '\n', s, re.IGNORECASE)
s = unescape(remove_tags(s))
def fixup(m):
text = m.group(0)
if '\r' in text or '\n' in text: return '\n'
return ' '
return re.sub('\s+', fixup, s).strip()
def get_extension(url):
"""Return extension from given URL
>>> get_extension('hello_world.JPG')
'jpg'
>>> get_extension('http://www.google-analytics.com/__utm.gif?utmwv=1.3&utmn=420639071')
'gif'
"""
return os.path.splitext(urlparse.urlsplit(url).path)[-1].lower().replace('.', '')
def get_domain(url):
"""Extract the domain from the given URL
>>> get_domain('http://www.google.com.au/tos.html')
'google.com.au'
>>> get_domain('www.google.com')
'google.com'
"""
m = re.compile(r"^.*://(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})").search(url)
if m:
# an IP address
return m.groups()[0]
suffixes = 'ac', 'ad', 'ae', 'aero', 'af', 'ag', 'ai', 'al', 'am', 'an', 'ao', 'aq', 'ar', 'arpa', 'as', 'asia', 'at', 'au', 'aw', 'ax', 'az', 'ba', 'bb', 'bd', 'be', 'bf', 'bg', 'bh', 'bi', 'biz', 'bj', 'bm', 'bn', 'bo', 'br', 'bs', 'bt', 'bv', 'bw', 'by', 'bz', 'ca', 'cat', 'cc', 'cd', 'cf', 'cg', 'ch', 'ci', 'ck', 'cl', 'cm', 'cn', 'co', 'com', 'coop', 'cr', 'cu', 'cv', 'cx', 'cy', 'cz', 'de', 'dj', 'dk', 'dm', 'do', 'dz', 'ec', 'edu', 'ee', 'eg', 'er', 'es', 'et', 'eu', 'fi', 'fj', 'fk', 'fm', 'fo', 'fr', 'ga', 'gb', 'gd', 'ge', 'gf', 'gg', 'gh', 'gi', 'gl', 'gm', 'gn', 'gov', 'gp', 'gq', 'gr', 'gs', 'gt', 'gu', 'gw', 'gy', 'hk', 'hm', 'hn', 'hr', 'ht', 'hu', 'id', 'ie', 'il', 'im', 'in', 'info', 'int', 'io', 'iq', 'ir', 'is', 'it', 'je', 'jm', 'jo', 'jobs', 'jp', 'ke', 'kg', 'kh', 'ki', 'km', 'kn', 'kp', 'kr', 'kw', 'ky', 'kz', 'la', 'lb', 'lc', 'li', 'lk', 'lr', 'ls', 'lt', 'lu', 'lv', 'ly', 'ma', 'mc', 'md', 'me', 'mg', 'mh', 'mil', 'mk', 'ml', 'mm', 'mn', 'mo', 'mobi', 'mp', 'mq', 'mr', 'ms', 'mt', 'mu', 'mv', 'mw', 'mx', 'my', 'mz', 'na', 'name', 'nc', 'ne', 'net', 'nf', 'ng', 'ni', 'nl', 'no', 'np', 'nr', 'nu', 'nz', 'om', 'org', 'pa', 'pe', 'pf', 'pg', 'ph', 'pk', 'pl', 'pm', 'pn', 'pr', 'pro', 'ps', 'pt', 'pw', 'py', 'qa', 're', 'ro', 'rs', 'ru', 'rw', 'sa', 'sb', 'sc', 'sd', 'se', 'sg', 'sh', 'si', 'sj', 'sk', 'sl', 'sm', 'sn', 'so', 'sr', 'st', 'su', 'sv', 'sy', 'sz', 'tc', 'td', 'tel', 'tf', 'tg', 'th', 'tj', 'tk', 'tl', 'tm', 'tn', 'to', 'tp', 'tr', 'tt', 'tv', 'tw', 'tz', 'ua', 'ug', 'uk', 'us', 'uy', 'uz', 'va', 'vc', 've', 'vg', 'vi', 'vn', 'vu', 'wf', 'ws', 'xn', 'ye', 'yt', 'za', 'zm', 'zw'
url = re.sub('^.*://', '', url).partition('/')[0].lower()
domain = []
for section in url.split('.'):
if section in suffixes:
domain.append(section)
else:
domain = [section]
return '.'.join(domain)
def same_domain(url1, url2):
"""Return whether URLs belong to same domain
>>> same_domain('http://www.google.com.au', 'code.google.com')
True
>>> same_domain('http://www.facebook.com', 'http://www.myspace.com')
False
"""
server1 = get_domain(url1)
server2 = get_domain(url2)
return server1 and server2 and (server1 in server2 or server2 in server1)
def pretty_duration(dt):
"""Return english description of this time difference
>>> from datetime import timedelta
>>> pretty_duration(timedelta(seconds=1))
'1 second'
>>> pretty_duration(timedelta(hours=1))
'1 hour'
>>> pretty_duration(timedelta(days=2))
'2 days'
"""
if isinstance(dt, datetime):
# convert datetime to timedelta
dt = datetime.now() - dt
if not isinstance(dt, timedelta):
return ''
if dt.days >= 2*365:
return '%d years' % int(dt.days / 365)
elif dt.days >= 365:
return '1 year'
elif dt.days >= 60:
return '%d months' % int(dt.days / 30)
elif dt.days > 21:
return '1 month'
elif dt.days >= 14:
return '%d weeks' % int(dt.days / 7)
elif dt.days >= 7:
return '1 week'
elif dt.days > 1:
return '%d days' % dt.days
elif dt.days == 1:
return '1 day'
elif dt.seconds >= 2*60*60:
return '%d hours' % int(dt.seconds / 3600)
elif dt.seconds >= 60*60:
return '1 hour'
elif dt.seconds >= 2*60:
return '%d minutes' % int(dt.seconds / 60)
elif dt.seconds >= 60:
return '1 minute'
elif dt.seconds > 1:
return '%d seconds' % dt.seconds
elif dt.seconds == 1:
return '1 second'
else:
return ''
def read_list(file):
"""Return file as list if exists
"""
l = []
if os.path.exists(file):
l.extend(open(file).read().splitlines())
else:
logger.debug('%s not found' % file)
return l
class UnicodeWriter(object):
"""A CSV writer that produces Excel-compatibly CSV files from unicode data.
file can either be a filename or a file object
>>> from StringIO import StringIO
>>> fp = StringIO()
>>> writer = UnicodeWriter(fp, quoting=csv.QUOTE_MINIMAL)
>>> writer.writerow(['a', '1'])
>>> writer.flush()
>>> fp.seek(0)
>>> fp.read().strip()
'a,1'
"""
def __init__(self, file, encoding=settings.default_encoding, mode='wb', unique=False, quoting=csv.QUOTE_ALL, **argv):
self.encoding = encoding
self.unique = unique
if hasattr(file, 'write'):
self.fp = file
else:
self.fp = open(file, mode)
if self.unique:
self.rows = adt.HashDict() # cache the rows that have already been written
for row in csv.reader(open(self.fp.name)):
self.rows[str(row)] = True
self.writer = csv.writer(self.fp, quoting=quoting, **argv)
def cell(self, s):
if isinstance(s, basestring):
if isinstance(s, unicode):
s = s.encode(self.encoding, 'ignore')
s = unescape(s, self.encoding)
elif s is None:
s = ''
else:
s = str(s)
return s
def writerow(self, row):
row = [self.cell(col) for col in row]
if self.unique:
if str(row) not in self.rows:
self.writer.writerow(row)
self.rows[str(row)] = True
else:
self.writer.writerow(row)
def writerows(self, rows):
for row in rows:
self.writerow(row)
def flush(self):
self.fp.flush()
def close(self):
self.fp.close()
def firefox_cookie(file=None, tmp_sqlite_file='cookies.sqlite', tmp_cookie_file='cookies.txt'):
"""Create a cookie jar from this FireFox 3 sqlite cookie database
>>> cj = firefox_cookie()
>>> opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
>>> url = 'http://code.google.com/p/webscraping'
>>> html = opener.open(url).read()
"""
# XXX remove temporary files
if file is None:
try:
file = glob.glob(os.path.expanduser('~/.mozilla/firefox/*.default/cookies.sqlite'))[0]
except IndexError:
raise WebScrapingError('Can not find filefox cookie file')
import sqlite3
# copy firefox cookie file locally to avoid locking problems
open(tmp_sqlite_file, 'w').write(open(file).read())
con = sqlite3.connect(tmp_sqlite_file)
cur = con.cursor()
cur.execute('select host, path, isSecure, expiry, name, value from moz_cookies')
# create standard cookies file that can be interpreted by cookie jar
fp = open(tmp_cookie_file, 'w')
fp.write('# Netscape HTTP Cookie File\n')
fp.write('# http://www.netscape.com/newsref/std/cookie_spec.html\n')
fp.write('# This is a generated file! Do not edit.\n')
ftstr = ['FALSE', 'TRUE']
for item in cur.fetchall():
row = '%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (item[0], ftstr[item[0].startswith('.')], item[1], ftstr[item[2]], item[3], item[4], item[5])
fp.write(row)
#print row
fp.close()
cookie_jar = cookielib.MozillaCookieJar()
cookie_jar.load(tmp_cookie_file)
return cookie_jar
def start_threads(fun, num_threads=20, args=(), wait=True):
"""Start up threads
"""
threads = [threading.Thread(target=fun, args=args) for i in range(num_threads)]
# Start threads one by one
for thread in threads:
thread.start()
# Wait for all threads to finish
if wait:
for thread in threads:
thread.join()
def get_logger(output_file=settings.log_file, stdout=True, level=settings.log_level):
"""Create a logger instance
"""
logger = logging.getLogger(output_file)
# void duplicate handlers
if not logger.handlers:
try:
file_handler = logging.FileHandler(output_file)
except IOError:
pass # can not write file
else:
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(file_handler)
if stdout:
logger.addHandler(logging.StreamHandler())
logger.setLevel(level)
return logger
logger = get_logger()
| Python |
__doc__ = """
Description: webscraping library
Website: http://code.google.com/p/webscraping/
License: LGPL
"""
if __name__ == '__main__':
import doctest
for name in ['adt', 'alg', 'common', 'download', 'pdict', 'settings', 'webkit', 'xpath']:
module = __import__(name)
print name
print doctest.testmod(module)
| Python |
__doc__ = """
Description: High level abstract datatypes
Website: http://code.google.com/p/webscraping/
License: LGPL
"""
from datetime import datetime, timedelta
from collections import defaultdict, deque
try:
import hashlib
except ImportError:
import md5 as hashlib
class Bag(dict):
"""Dictionary object with attribute like access
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
def __getattr__(self, name):
return self.get(name)
def __setattr__(self, name, value):
self[name] = value
class HashDict:
"""For storing keys with large amounts of data where don't need need original value
"""
def __init__(self, default_factory=str):
self.d = defaultdict(default_factory)
def __len__(self):
return self.d.__len__()
def __contains__(self, name):
return self.d.__contains__(self.get_hash(name))
def __getitem__(self, name):
return self.d.__getitem__(self.get_hash(name))
def __setitem__(self, name, value):
return self.d.__setitem__(self.get_hash(name), value)
def get(self, name, default=None):
return self.d.get(self.get_hash(name), default)
def get_hash(self, value):
return hash(value)
#return hashlib.md5(value).hexdigest()
| Python |
'''
Module which brings history information about files from Mercurial.
@author: Rodrigo Damazio
'''
import re
import subprocess
REVISION_REGEX = re.compile(r'(?P<hash>[0-9a-f]{12}):.*')
def _GetOutputLines(args):
'''
Runs an external process and returns its output as a list of lines.
@param args: the arguments to run
'''
process = subprocess.Popen(args,
stdout=subprocess.PIPE,
universal_newlines = True,
shell = False)
output = process.communicate()[0]
return output.splitlines()
def FillMercurialRevisions(filename, parsed_file):
'''
Fills the revs attribute of all strings in the given parsed file with
a list of revisions that touched the lines corresponding to that string.
@param filename: the name of the file to get history for
@param parsed_file: the parsed file to modify
'''
# Take output of hg annotate to get revision of each line
output_lines = _GetOutputLines(['hg', 'annotate', '-c', filename])
# Create a map of line -> revision (key is list index, line 0 doesn't exist)
line_revs = ['dummy']
for line in output_lines:
rev_match = REVISION_REGEX.match(line)
if not rev_match:
raise 'Unexpected line of output from hg: %s' % line
rev_hash = rev_match.group('hash')
line_revs.append(rev_hash)
for str in parsed_file.itervalues():
# Get the lines that correspond to each string
start_line = str['startLine']
end_line = str['endLine']
# Get the revisions that touched those lines
revs = []
for line_number in range(start_line, end_line + 1):
revs.append(line_revs[line_number])
# Merge with any revisions that were already there
# (for explict revision specification)
if 'revs' in str:
revs += str['revs']
# Assign the revisions to the string
str['revs'] = frozenset(revs)
def DoesRevisionSuperceed(filename, rev1, rev2):
'''
Tells whether a revision superceeds another.
This essentially means that the older revision is an ancestor of the newer
one.
This also returns True if the two revisions are the same.
@param rev1: the revision that may be superceeding the other
@param rev2: the revision that may be superceeded
@return: True if rev1 superceeds rev2 or they're the same
'''
if rev1 == rev2:
return True
# TODO: Add filename
args = ['hg', 'log', '-r', 'ancestors(%s)' % rev1, '--template', '{node|short}\n', filename]
output_lines = _GetOutputLines(args)
return rev2 in output_lines
def NewestRevision(filename, rev1, rev2):
'''
Returns which of two revisions is closest to the head of the repository.
If none of them is the ancestor of the other, then we return either one.
@param rev1: the first revision
@param rev2: the second revision
'''
if DoesRevisionSuperceed(filename, rev1, rev2):
return rev1
return rev2 | Python |
#!/usr/bin/python
'''
Entry point for My Tracks i18n tool.
@author: Rodrigo Damazio
'''
import mytracks.files
import mytracks.translate
import mytracks.validate
import sys
def Usage():
print 'Usage: %s <command> [<language> ...]\n' % sys.argv[0]
print 'Commands are:'
print ' cleanup'
print ' translate'
print ' validate'
sys.exit(1)
def Translate(languages):
'''
Asks the user to interactively translate any missing or oudated strings from
the files for the given languages.
@param languages: the languages to translate
'''
validator = mytracks.validate.Validator(languages)
validator.Validate()
missing = validator.missing_in_lang()
outdated = validator.outdated_in_lang()
for lang in languages:
untranslated = missing[lang] + outdated[lang]
if len(untranslated) == 0:
continue
translator = mytracks.translate.Translator(lang)
translator.Translate(untranslated)
def Validate(languages):
'''
Computes and displays errors in the string files for the given languages.
@param languages: the languages to compute for
'''
validator = mytracks.validate.Validator(languages)
validator.Validate()
error_count = 0
if (validator.valid()):
print 'All files OK'
else:
for lang, missing in validator.missing_in_master().iteritems():
print 'Missing in master, present in %s: %s:' % (lang, str(missing))
error_count = error_count + len(missing)
for lang, missing in validator.missing_in_lang().iteritems():
print 'Missing in %s, present in master: %s:' % (lang, str(missing))
error_count = error_count + len(missing)
for lang, outdated in validator.outdated_in_lang().iteritems():
print 'Outdated in %s: %s:' % (lang, str(outdated))
error_count = error_count + len(outdated)
return error_count
if __name__ == '__main__':
argv = sys.argv
argc = len(argv)
if argc < 2:
Usage()
languages = mytracks.files.GetAllLanguageFiles()
if argc == 3:
langs = set(argv[2:])
if not langs.issubset(languages):
raise 'Language(s) not found'
# Filter just to the languages specified
languages = dict((lang, lang_file)
for lang, lang_file in languages.iteritems()
if lang in langs or lang == 'en' )
cmd = argv[1]
if cmd == 'translate':
Translate(languages)
elif cmd == 'validate':
error_count = Validate(languages)
else:
Usage()
error_count = 0
print '%d errors found.' % error_count
| Python |
'''
Module which prompts the user for translations and saves them.
TODO: implement
@author: Rodrigo Damazio
'''
class Translator(object):
'''
classdocs
'''
def __init__(self, language):
'''
Constructor
'''
self._language = language
def Translate(self, string_names):
print string_names | Python |
'''
Module which compares languague files to the master file and detects
issues.
@author: Rodrigo Damazio
'''
import os
from mytracks.parser import StringsParser
import mytracks.history
class Validator(object):
def __init__(self, languages):
'''
Builds a strings file validator.
Params:
@param languages: a dictionary mapping each language to its corresponding directory
'''
self._langs = {}
self._master = None
self._language_paths = languages
parser = StringsParser()
for lang, lang_dir in languages.iteritems():
filename = os.path.join(lang_dir, 'strings.xml')
parsed_file = parser.Parse(filename)
mytracks.history.FillMercurialRevisions(filename, parsed_file)
if lang == 'en':
self._master = parsed_file
else:
self._langs[lang] = parsed_file
self._Reset()
def Validate(self):
'''
Computes whether all the data in the files for the given languages is valid.
'''
self._Reset()
self._ValidateMissingKeys()
self._ValidateOutdatedKeys()
def valid(self):
return (len(self._missing_in_master) == 0 and
len(self._missing_in_lang) == 0 and
len(self._outdated_in_lang) == 0)
def missing_in_master(self):
return self._missing_in_master
def missing_in_lang(self):
return self._missing_in_lang
def outdated_in_lang(self):
return self._outdated_in_lang
def _Reset(self):
# These are maps from language to string name list
self._missing_in_master = {}
self._missing_in_lang = {}
self._outdated_in_lang = {}
def _ValidateMissingKeys(self):
'''
Computes whether there are missing keys on either side.
'''
master_keys = frozenset(self._master.iterkeys())
for lang, file in self._langs.iteritems():
keys = frozenset(file.iterkeys())
missing_in_master = keys - master_keys
missing_in_lang = master_keys - keys
if len(missing_in_master) > 0:
self._missing_in_master[lang] = missing_in_master
if len(missing_in_lang) > 0:
self._missing_in_lang[lang] = missing_in_lang
def _ValidateOutdatedKeys(self):
'''
Computers whether any of the language keys are outdated with relation to the
master keys.
'''
for lang, file in self._langs.iteritems():
outdated = []
for key, str in file.iteritems():
# Get all revisions that touched master and language files for this
# string.
master_str = self._master[key]
master_revs = master_str['revs']
lang_revs = str['revs']
if not master_revs or not lang_revs:
print 'WARNING: No revision for %s in %s' % (key, lang)
continue
master_file = os.path.join(self._language_paths['en'], 'strings.xml')
lang_file = os.path.join(self._language_paths[lang], 'strings.xml')
# Assume that the repository has a single head (TODO: check that),
# and as such there is always one revision which superceeds all others.
master_rev = reduce(
lambda r1, r2: mytracks.history.NewestRevision(master_file, r1, r2),
master_revs)
lang_rev = reduce(
lambda r1, r2: mytracks.history.NewestRevision(lang_file, r1, r2),
lang_revs)
# If the master version is newer than the lang version
if mytracks.history.DoesRevisionSuperceed(lang_file, master_rev, lang_rev):
outdated.append(key)
if len(outdated) > 0:
self._outdated_in_lang[lang] = outdated
| Python |
'''
Module for dealing with resource files (but not their contents).
@author: Rodrigo Damazio
'''
import os.path
from glob import glob
import re
MYTRACKS_RES_DIR = 'MyTracks/res'
ANDROID_MASTER_VALUES = 'values'
ANDROID_VALUES_MASK = 'values-*'
def GetMyTracksDir():
'''
Returns the directory in which the MyTracks directory is located.
'''
path = os.getcwd()
while not os.path.isdir(os.path.join(path, MYTRACKS_RES_DIR)):
if path == '/':
raise 'Not in My Tracks project'
# Go up one level
path = os.path.split(path)[0]
return path
def GetAllLanguageFiles():
'''
Returns a mapping from all found languages to their respective directories.
'''
mytracks_path = GetMyTracksDir()
res_dir = os.path.join(mytracks_path, MYTRACKS_RES_DIR, ANDROID_VALUES_MASK)
language_dirs = glob(res_dir)
master_dir = os.path.join(mytracks_path, MYTRACKS_RES_DIR, ANDROID_MASTER_VALUES)
if len(language_dirs) == 0:
raise 'No languages found!'
if not os.path.isdir(master_dir):
raise 'Couldn\'t find master file'
language_tuples = [(re.findall(r'.*values-([A-Za-z-]+)', dir)[0],dir) for dir in language_dirs]
language_tuples.append(('en', master_dir))
return dict(language_tuples)
| Python |
'''
Module which parses a string XML file.
@author: Rodrigo Damazio
'''
from xml.parsers.expat import ParserCreate
import re
#import xml.etree.ElementTree as ET
class StringsParser(object):
'''
Parser for string XML files.
This object is not thread-safe and should be used for parsing a single file at
a time, only.
'''
def Parse(self, file):
'''
Parses the given file and returns a dictionary mapping keys to an object
with attributes for that key, such as the value, start/end line and explicit
revisions.
In addition to the standard XML format of the strings file, this parser
supports an annotation inside comments, in one of these formats:
<!-- KEEP_PARENT name="bla" -->
<!-- KEEP_PARENT name="bla" rev="123456789012" -->
Such an annotation indicates that we're explicitly inheriting form the
master file (and the optional revision says that this decision is compatible
with the master file up to that revision).
@param file: the name of the file to parse
'''
self._Reset()
# Unfortunately expat is the only parser that will give us line numbers
self._xml_parser = ParserCreate()
self._xml_parser.StartElementHandler = self._StartElementHandler
self._xml_parser.EndElementHandler = self._EndElementHandler
self._xml_parser.CharacterDataHandler = self._CharacterDataHandler
self._xml_parser.CommentHandler = self._CommentHandler
file_obj = open(file)
self._xml_parser.ParseFile(file_obj)
file_obj.close()
return self._all_strings
def _Reset(self):
self._currentString = None
self._currentStringName = None
self._currentStringValue = None
self._all_strings = {}
def _StartElementHandler(self, name, attrs):
if name != 'string':
return
if 'name' not in attrs:
return
assert not self._currentString
assert not self._currentStringName
self._currentString = {
'startLine' : self._xml_parser.CurrentLineNumber,
}
if 'rev' in attrs:
self._currentString['revs'] = [attrs['rev']]
self._currentStringName = attrs['name']
self._currentStringValue = ''
def _EndElementHandler(self, name):
if name != 'string':
return
assert self._currentString
assert self._currentStringName
self._currentString['value'] = self._currentStringValue
self._currentString['endLine'] = self._xml_parser.CurrentLineNumber
self._all_strings[self._currentStringName] = self._currentString
self._currentString = None
self._currentStringName = None
self._currentStringValue = None
def _CharacterDataHandler(self, data):
if not self._currentString:
return
self._currentStringValue += data
_KEEP_PARENT_REGEX = re.compile(r'\s*KEEP_PARENT\s+'
r'name\s*=\s*[\'"]?(?P<name>[a-z0-9_]+)[\'"]?'
r'(?:\s+rev=[\'"]?(?P<rev>[0-9a-f]{12})[\'"]?)?\s*',
re.MULTILINE | re.DOTALL)
def _CommentHandler(self, data):
keep_parent_match = self._KEEP_PARENT_REGEX.match(data)
if not keep_parent_match:
return
name = keep_parent_match.group('name')
self._all_strings[name] = {
'keepParent' : True,
'startLine' : self._xml_parser.CurrentLineNumber,
'endLine' : self._xml_parser.CurrentLineNumber
}
rev = keep_parent_match.group('rev')
if rev:
self._all_strings[name]['revs'] = [rev] | Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from distutils.core import setup
required = []
if sys.version_info[:3] < (2, 5, 0):
required.append('elementtree')
setup(
name='gdata',
version='2.0.16',
description='Python client library for Google data APIs',
long_description = """\
The Google data Python client library makes it easy to interact with
Google services through the Google Data APIs. This library provides data
models and service modules for the the following Google data services:
- Google Calendar data API
- Google Contacts data API
- Google Spreadsheets data API
- Google Document List data APIs
- Google Base data API
- Google Apps Provisioning API
- Google Apps Email Migration API
- Google Apps Email Settings API
- Picasa Web Albums Data API
- Google Code Search Data API
- YouTube Data API
- Google Webmaster Tools Data API
- Blogger Data API
- Google Health API
- Google Book Search API
- Google Analytics API
- Google Finance API
- Google Sites Data API
- Google Content API For Shopping
- Google App Marketplace API
- Google Content API for Shopping
- core Google data API functionality
The core Google data code provides sufficient functionality to use this
library with any Google data API (even if a module hasn't been written for
it yet). For example, this client can be used with the Notebook API. This
library may also be used with any Atom Publishing Protocol service (AtomPub).
""",
author='Jeffrey Scudder',
author_email='j.s@google.com',
license='Apache 2.0',
url='http://code.google.com/p/gdata-python-client/',
packages=[
'atom',
'gdata',
'gdata.Crypto',
'gdata.Crypto.Cipher',
'gdata.Crypto.Hash',
'gdata.Crypto.Protocol',
'gdata.Crypto.PublicKey',
'gdata.Crypto.Util',
'gdata.acl',
'gdata.alt',
'gdata.analytics',
'gdata.apps',
'gdata.apps.adminsettings',
'gdata.apps.audit',
'gdata.apps.emailsettings',
'gdata.apps.groups',
'gdata.apps.migration',
'gdata.apps.multidomain',
'gdata.apps.organization',
'gdata.blogger',
'gdata.books',
'gdata.calendar',
'gdata.calendar_resource',
'gdata.codesearch',
'gdata.contacts',
'gdata.contentforshopping',
'gdata.docs',
'gdata.dublincore',
'gdata.exif',
'gdata.finance',
'gdata.geo',
'gdata.health',
'gdata.media',
'gdata.notebook',
'gdata.oauth',
'gdata.opensearch',
'gdata.photos',
'gdata.projecthosting',
'gdata.sites',
'gdata.spreadsheet',
'gdata.spreadsheets',
'gdata.tlslite',
'gdata.tlslite.integration',
'gdata.tlslite.utils',
'gdata.webmastertools',
'gdata.youtube',
],
package_dir = {'gdata':'src/gdata', 'atom':'src/atom'},
install_requires=required
)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import ConfigParser
import cookielib
import fnmatch
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# Constants for version control names. Used by GuessVCSName.
VCS_GIT = "Git"
VCS_MERCURIAL = "Mercurial"
VCS_SUBVERSION = "Subversion"
VCS_UNKNOWN = "Unknown"
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = ['application/javascript', 'application/x-javascript',
'application/x-freemind']
VCS_ABBREVIATIONS = {
VCS_MERCURIAL.lower(): VCS_MERCURIAL,
"hg": VCS_MERCURIAL,
VCS_SUBVERSION.lower(): VCS_SUBVERSION,
"svn": VCS_SUBVERSION,
VCS_GIT.lower(): VCS_GIT,
}
# The result of parsing Subversion's [auto-props] setting.
svn_auto_props_map = None
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
## elif e.code >= 500 and e.code < 600:
## # Server Error - try again.
## continue
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to '%default'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=",afshar@google.com",
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default="gdata-python-client-library-contributors@googlegroups.com",
help="Add CC (comma separated email addresses).")
group.add_option("--private", action="store_true", dest="private",
default=False,
help="Make the issue restricted to reviewers and those CCed")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--base_url", action="store", dest="base_url", default=None,
help="Base repository URL (listed as \"Base URL\" when "
"viewing issue). If omitted, will be guessed automatically "
"for SVN repos and left blank for others.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Base revision/branch/tree to diff against. Use "
"rev1:rev2 range to review already committed changeset.")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=True,
help="Send notification email to reviewers.")
group.add_option("--vcs", action="store", dest="vcs",
metavar="VCS", default=None,
help=("Version control system (optional, usually upload.py "
"already guesses the right VCS)."))
group.add_option("--emulate_svn_auto_props", action="store_true",
dest="emulate_svn_auto_props", default=False,
help=("Emulate Subversion's auto properties feature."))
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines,
env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# Base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> (hash before, hash after) of base file.
# Hashes for "no such file" are represented as None.
self.hashes = {}
# Map of new filename -> old filename for renames.
self.renames = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the files, so we can upload them along with our diff.
# Special used by git to indicate "no such content".
NULL_HASH = "0"*40
extra_args = extra_args[:]
if self.options.revision:
extra_args = [self.options.revision] + extra_args
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
gitdiff = RunShell(["git", "diff", "--no-ext-diff", "--full-index", "-M"]
+ extra_args, env=env)
def IsFileNew(filename):
return filename in self.hashes and self.hashes[filename][0] is None
def AddSubversionPropertyChange(filename):
"""Add svn's property change information into the patch if given file is
new file.
We use Subversion's auto-props setting to retrieve its property.
See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for
Subversion's [auto-props] setting.
"""
if self.options.emulate_svn_auto_props and IsFileNew(filename):
svnprops = GetSubversionPropertyChanges(filename)
if svnprops:
svndiff.append("\n" + svnprops + "\n")
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/(.*)$", line)
if match:
# Add auto property here for previously seen file.
if filename is not None:
AddSubversionPropertyChange(filename)
filecount += 1
# Intentionally use the "after" filename so we can show renames.
filename = match.group(2)
svndiff.append("Index: %s\n" % filename)
if match.group(1) != match.group(2):
self.renames[match.group(2)] = match.group(1)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.(\w+)", line)
if match:
before, after = (match.group(1), match.group(2))
if before == NULL_HASH:
before = None
if after == NULL_HASH:
after = None
self.hashes[filename] = (before, after)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
# Add auto property for the last seen file.
assert filename is not None
AddSubversionPropertyChange(filename)
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetFileContent(self, file_hash, is_binary):
"""Returns the content of a file identified by its git hash."""
data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
universal_newlines=not is_binary)
if retcode:
ErrorExit("Got error status from 'git show %s'" % file_hash)
return data
def GetBaseFile(self, filename):
hash_before, hash_after = self.hashes.get(filename, (None,None))
base_content = None
new_content = None
is_binary = self.IsBinary(filename)
status = None
if filename in self.renames:
status = "A +" # Match svn attribute name for renames.
if filename not in self.hashes:
# If a rename doesn't change the content, we never get a hash.
base_content = RunShell(["git", "show", "HEAD:" + filename])
elif not hash_before:
status = "A"
base_content = ""
elif not hash_after:
status = "D"
else:
status = "M"
is_image = self.IsImage(filename)
# Grab the before/after content if we need it.
# We should include file contents if it's text or it's an image.
if not is_binary or is_image:
# Grab the base content if we don't have it already.
if base_content is None and hash_before:
base_content = self.GetFileContent(hash_before, is_binary)
# Only include the "after" file if it's an image; otherwise it
# it is reconstructed from the diff.
if is_image and hash_after:
new_content = self.GetFileContent(hash_after, is_binary)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCSName():
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an string indicating which VCS is detected.
Returns:
A pair (vcs, output). vcs is a string indicating which VCS was detected
and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, or VCS_UNKNOWN.
output is a string containing any interesting output from the vcs
detection routine, or None if there is nothing interesting.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return (VCS_MERCURIAL, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return (VCS_SUBVERSION, None)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return (VCS_GIT, None)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
return (VCS_UNKNOWN, None)
def GuessVCS(options):
"""Helper to guess the version control system.
This verifies any user-specified VersionControlSystem (by command line
or environment variable). If the user didn't specify one, this examines
the current directory, guesses which VersionControlSystem we're using,
and returns an instance of the appropriate class. Exit with an error
if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
vcs = options.vcs
if not vcs:
vcs = os.environ.get("CODEREVIEW_VCS")
if vcs:
v = VCS_ABBREVIATIONS.get(vcs.lower())
if v is None:
ErrorExit("Unknown version control system %r specified." % vcs)
(vcs, extra_output) = (v, None)
else:
(vcs, extra_output) = GuessVCSName()
if vcs == VCS_MERCURIAL:
if extra_output is None:
extra_output = RunShell(["hg", "root"]).strip()
return MercurialVCS(options, extra_output)
elif vcs == VCS_SUBVERSION:
return SubversionVCS(options)
elif vcs == VCS_GIT:
return GitVCS(options)
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def CheckReviewer(reviewer):
"""Validate a reviewer -- either a nickname or an email addres.
Args:
reviewer: A nickname or an email address.
Calls ErrorExit() if it is an invalid email address.
"""
if "@" not in reviewer:
return # Assume nickname
parts = reviewer.split("@")
if len(parts) > 2:
ErrorExit("Invalid email address: %r" % reviewer)
assert len(parts) == 2
if "." not in parts[1]:
ErrorExit("Invalid email address: %r" % reviewer)
def LoadSubversionAutoProperties():
"""Returns the content of [auto-props] section of Subversion's config file as
a dictionary.
Returns:
A dictionary whose key-value pair corresponds the [auto-props] section's
key-value pair.
In following cases, returns empty dictionary:
- config file doesn't exist, or
- 'enable-auto-props' is not set to 'true-like-value' in [miscellany].
"""
# Todo(hayato): Windows users might use different path for configuration file.
subversion_config = os.path.expanduser("~/.subversion/config")
if not os.path.exists(subversion_config):
return {}
config = ConfigParser.ConfigParser()
config.read(subversion_config)
if (config.has_section("miscellany") and
config.has_option("miscellany", "enable-auto-props") and
config.getboolean("miscellany", "enable-auto-props") and
config.has_section("auto-props")):
props = {}
for file_pattern in config.options("auto-props"):
props[file_pattern] = ParseSubversionPropertyValues(
config.get("auto-props", file_pattern))
return props
else:
return {}
def ParseSubversionPropertyValues(props):
"""Parse the given property value which comes from [auto-props] section and
returns a list whose element is a (svn_prop_key, svn_prop_value) pair.
See the following doctest for example.
>>> ParseSubversionPropertyValues('svn:eol-style=LF')
[('svn:eol-style', 'LF')]
>>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg')
[('svn:mime-type', 'image/jpeg')]
>>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable')
[('svn:eol-style', 'LF'), ('svn:executable', '*')]
"""
key_value_pairs = []
for prop in props.split(";"):
key_value = prop.split("=")
assert len(key_value) <= 2
if len(key_value) == 1:
# If value is not given, use '*' as a Subversion's convention.
key_value_pairs.append((key_value[0], "*"))
else:
key_value_pairs.append((key_value[0], key_value[1]))
return key_value_pairs
def GetSubversionPropertyChanges(filename):
"""Return a Subversion's 'Property changes on ...' string, which is used in
the patch file.
Args:
filename: filename whose property might be set by [auto-props] config.
Returns:
A string like 'Property changes on |filename| ...' if given |filename|
matches any entries in [auto-props] section. None, otherwise.
"""
global svn_auto_props_map
if svn_auto_props_map is None:
svn_auto_props_map = LoadSubversionAutoProperties()
all_props = []
for file_pattern, props in svn_auto_props_map.items():
if fnmatch.fnmatch(filename, file_pattern):
all_props.extend(props)
if all_props:
return FormatSubversionPropertyChanges(filename, all_props)
return None
def FormatSubversionPropertyChanges(filename, props):
"""Returns Subversion's 'Property changes on ...' strings using given filename
and properties.
Args:
filename: filename
props: A list whose element is a (svn_prop_key, svn_prop_value) pair.
Returns:
A string which can be used in the patch file for Subversion.
See the following doctest for example.
>>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')])
Property changes on: foo.cc
___________________________________________________________________
Added: svn:eol-style
+ LF
<BLANKLINE>
"""
prop_changes_lines = [
"Property changes on: %s" % filename,
"___________________________________________________________________"]
for key, value in props:
prop_changes_lines.append("Added: " + key)
prop_changes_lines.append(" + " + value)
return "\n".join(prop_changes_lines) + "\n"
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
base = options.base_url
if isinstance(vcs, SubversionVCS):
# Guessing the base field is only supported for Subversion.
# Note: Fetching base files may become deprecated in future releases.
guessed_base = vcs.GuessBase(options.download_base)
if base:
if guessed_base and base != guessed_base:
print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \
(base, guessed_base)
else:
base = guessed_base
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
CheckReviewer(reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
CheckReviewer(cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
if options.private:
if options.issue:
print "Warning: Private flag ignored when updating an existing issue."
else:
form_fields.append(("private", "1"))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import coverage
import all_tests
import atom.core
import atom.http_core
import atom.mock_http_core
import atom.auth
import atom.client
import gdata.gauth
import gdata.client
import gdata.data
import gdata.blogger.data
import gdata.blogger.client
import gdata.spreadsheets.data
from gdata.test_config import settings
# Ensure that coverage tests execute the live requests to the servers, but
# allow use of cached server responses to speed up repeated runs.
settings.RUN_LIVE_TESTS = True
settings.CLEAR_CACHE = False
def suite():
return unittest.TestSuite((atom_tests.core_test.suite(),))
if __name__ == '__main__':
coverage.erase()
coverage.start()
unittest.TextTestRunner().run(all_tests.suite())
coverage.stop()
coverage.report([atom.core, atom.http_core, atom.auth, atom.data,
atom.mock_http_core, atom.client, gdata.gauth, gdata.client,
gdata.core, gdata.data, gdata.blogger.data, gdata.blogger.client,
gdata.spreadsheets.data])
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.url
import gdata.test_config as conf
class UrlTest(unittest.TestCase):
def testParseUrl(self):
url = atom.url.parse_url('http://www.google.com/calendar/feeds')
self.assert_(url.protocol == 'http')
self.assert_(url.port is None)
self.assert_(url.host == 'www.google.com')
self.assert_(url.path == '/calendar/feeds')
self.assert_(url.params == {})
url = atom.url.parse_url('http://example.com:6091/calendar/feeds')
self.assert_(url.protocol == 'http')
self.assert_(url.host == 'example.com')
self.assert_(url.port == '6091')
self.assert_(url.path == '/calendar/feeds')
self.assert_(url.params == {})
url = atom.url.parse_url('/calendar/feeds?foo=bar')
self.assert_(url.protocol is None)
self.assert_(url.host is None)
self.assert_(url.path == '/calendar/feeds')
self.assert_(len(url.params.keys()) == 1)
self.assert_('foo' in url.params)
self.assert_(url.params['foo'] == 'bar')
url = atom.url.parse_url('/calendar/feeds?my+foo=bar%3Dx')
self.assert_(len(url.params.keys()) == 1)
self.assert_('my foo' in url.params)
self.assert_(url.params['my foo'] == 'bar=x')
def testUrlToString(self):
url = atom.url.Url(port=80)
url.host = 'example.com'
self.assert_(str(url), '//example.com:80')
url = atom.url.Url(protocol='http', host='example.com', path='/feed')
url.params['has spaces'] = 'sneaky=values?&!'
self.assert_(url.to_string() == (
'http://example.com/feed?has+spaces=sneaky%3Dvalues%3F%26%21'))
def testGetRequestUri(self):
url = atom.url.Url(protocol='http', host='example.com', path='/feed')
url.params['has spaces'] = 'sneaky=values?&!'
self.assert_(url.get_request_uri() == (
'/feed?has+spaces=sneaky%3Dvalues%3F%26%21'))
self.assert_(url.get_param_string() == (
'has+spaces=sneaky%3Dvalues%3F%26%21'))
def testComparistons(self):
url1 = atom.url.Url(protocol='http', host='example.com', path='/feed',
params={'x':'1', 'y':'2'})
url2 = atom.url.Url(host='example.com', port=80, path='/feed',
params={'y':'2', 'x':'1'})
self.assertEquals(url1, url2)
url3 = atom.url.Url(host='example.com', port=81, path='/feed',
params={'x':'1', 'y':'2'})
self.assert_(url1 != url3)
self.assert_(url2 != url3)
url4 = atom.url.Url(protocol='ftp', host='example.com', path='/feed',
params={'x':'1', 'y':'2'})
self.assert_(url1 != url4)
self.assert_(url2 != url4)
self.assert_(url3 != url4)
def suite():
return conf.build_suite([UrlTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.mock_http
import atom.http
class MockHttpClientUnitTest(unittest.TestCase):
def setUp(self):
self.client = atom.mock_http.MockHttpClient()
def testRepondToGet(self):
mock_response = atom.http_interface.HttpResponse(body='Hooray!',
status=200, reason='OK')
self.client.add_response(mock_response, 'GET',
'http://example.com/hooray')
response = self.client.request('GET', 'http://example.com/hooray')
self.assertEquals(len(self.client.recordings), 1)
self.assertEquals(response.status, 200)
self.assertEquals(response.read(), 'Hooray!')
def testRecordResponse(self):
# Turn on pass-through record mode.
self.client.real_client = atom.http.ProxiedHttpClient()
live_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
live_response_body = live_response.read()
self.assertEquals(live_response.status, 200)
self.assertEquals(live_response_body.startswith('<?xml'), True)
# Requery for the now canned data.
self.client.real_client = None
canned_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
# The canned response should be the stored response.
canned_response_body = canned_response.read()
self.assertEquals(canned_response.status, 200)
self.assertEquals(canned_response_body, live_response_body)
def testUnrecordedRequest(self):
try:
self.client.request('POST', 'http://example.org')
self.fail()
except atom.mock_http.NoRecordingFound:
pass
def suite():
return unittest.TestSuite(
(unittest.makeSuite(MockHttpClientUnitTest,'test'),))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import os
import unittest
import atom.service
import atom.mock_http_core
import gdata.test_config as conf
class AtomServiceUnitTest(unittest.TestCase):
def testBuildUriWithNoParams(self):
x = atom.service.BuildUri('/base/feeds/snippets')
self.assert_(x == '/base/feeds/snippets')
def testBuildUriWithParams(self):
# Add parameters to a URI
x = atom.service.BuildUri('/base/feeds/snippets', url_params={'foo': 'bar',
'bq': 'digital camera'})
self.assert_(x == '/base/feeds/snippets?foo=bar&bq=digital+camera')
self.assert_(x.startswith('/base/feeds/snippets'))
self.assert_(x.count('?') == 1)
self.assert_(x.count('&') == 1)
self.assert_(x.index('?') < x.index('&'))
self.assert_(x.index('bq=digital+camera') != -1)
# Add parameters to a URI that already has parameters
x = atom.service.BuildUri('/base/feeds/snippets?bq=digital+camera',
url_params={'foo': 'bar', 'max-results': '250'})
self.assert_(x.startswith('/base/feeds/snippets?bq=digital+camera'))
self.assert_(x.count('?') == 1)
self.assert_(x.count('&') == 2)
self.assert_(x.index('?') < x.index('&'))
self.assert_(x.index('max-results=250') != -1)
self.assert_(x.index('foo=bar') != -1)
def testBuildUriWithoutParameterEscaping(self):
x = atom.service.BuildUri('/base/feeds/snippets',
url_params={'foo': ' bar', 'bq': 'digital camera'},
escape_params=False)
self.assert_(x.index('foo= bar') != -1)
self.assert_(x.index('bq=digital camera') != -1)
def testParseHttpUrl(self):
atom_service = atom.service.AtomService('code.google.com')
self.assertEquals(atom_service.server, 'code.google.com')
(host, port, ssl, path) = atom.service.ProcessUrl(atom_service,
'http://www.google.com/service/subservice?name=value')
self.assertEquals(ssl, False)
self.assertEquals(host, 'www.google.com')
self.assertEquals(port, 80)
self.assertEquals(path, '/service/subservice?name=value')
def testParseHttpUrlWithPort(self):
atom_service = atom.service.AtomService('code.google.com')
self.assertEquals(atom_service.server, 'code.google.com')
(host, port, ssl, path) = atom.service.ProcessUrl(atom_service,
'http://www.google.com:12/service/subservice?name=value&newname=newvalue')
self.assertEquals(ssl, False)
self.assertEquals(host, 'www.google.com')
self.assertEquals(port, 12)
self.assert_(path.startswith('/service/subservice?'))
self.assert_(path.find('name=value') >= len('/service/subservice?'))
self.assert_(path.find('newname=newvalue') >= len('/service/subservice?'))
def testParseHttpsUrl(self):
atom_service = atom.service.AtomService('code.google.com')
self.assertEquals(atom_service.server, 'code.google.com')
(host, port, ssl, path) = atom.service.ProcessUrl(atom_service,
'https://www.google.com/service/subservice?name=value&newname=newvalue')
self.assertEquals(ssl, True)
self.assertEquals(host, 'www.google.com')
self.assertEquals(port, 443)
self.assert_(path.startswith('/service/subservice?'))
self.assert_(path.find('name=value') >= len('/service/subservice?'))
self.assert_(path.find('newname=newvalue') >= len('/service/subservice?'))
def testParseHttpsUrlWithPort(self):
atom_service = atom.service.AtomService('code.google.com')
self.assertEquals(atom_service.server, 'code.google.com')
(host, port, ssl, path) = atom.service.ProcessUrl(atom_service,
'https://www.google.com:13981/service/subservice?name=value&newname=newvalue')
self.assertEquals(ssl, True)
self.assertEquals(host, 'www.google.com')
self.assertEquals(port, 13981)
self.assert_(path.startswith('/service/subservice?'))
self.assert_(path.find('name=value') >= len('/service/subservice?'))
self.assert_(path.find('newname=newvalue') >= len('/service/subservice?'))
def testSetBasicAuth(self):
client = atom.service.AtomService()
client.UseBasicAuth('foo', 'bar')
token = client.token_store.find_token('http://')
self.assert_(isinstance(token, atom.service.BasicAuthToken))
self.assertEquals(token.auth_header, 'Basic Zm9vOmJhcg==')
client.UseBasicAuth('','')
token = client.token_store.find_token('http://')
self.assert_(isinstance(token, atom.service.BasicAuthToken))
self.assertEquals(token.auth_header, 'Basic Og==')
def testProcessUrlWithStringForService(self):
(server, port, ssl, uri) = atom.service.ProcessUrl(
service='www.google.com', url='/base/feeds/items')
self.assertEquals(server, 'www.google.com')
self.assertEquals(port, 80)
self.assertEquals(ssl, False)
self.assert_(uri.startswith('/base/feeds/items'))
client = atom.service.AtomService()
client.server = 'www.google.com'
client.ssl = True
(server, port, ssl, uri) = atom.service.ProcessUrl(
service=client, url='/base/feeds/items')
self.assertEquals(server, 'www.google.com')
self.assertEquals(ssl, True)
self.assert_(uri.startswith('/base/feeds/items'))
(server, port, ssl, uri) = atom.service.ProcessUrl(service=None,
url='https://www.google.com/base/feeds/items')
self.assertEquals(server, 'www.google.com')
self.assertEquals(port, 443)
self.assertEquals(ssl, True)
self.assert_(uri.startswith('/base/feeds/items'))
def testHostHeaderContainsNonDefaultPort(self):
client = atom.service.AtomService()
client.http_client.v2_http_client = atom.mock_http_core.EchoHttpClient()
response = client.Get('http://example.com')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:None')
response = client.Get('https://example.com')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:None')
response = client.Get('https://example.com:8080')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:8080')
response = client.Get('http://example.com:1234')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:1234')
def testBadHttpsProxyRaisesRealException(self):
"""Test that real exceptions are raised when there is an error connecting to
a host with an https proxy
"""
client = atom.service.AtomService(server='example.com')
client.server = 'example.com'
os.environ['https_proxy'] = 'http://example.com'
self.assertRaises(atom.http.ProxyError,
atom.service.PrepareConnection, client, 'https://example.com')
def suite():
return conf.build_suite([AtomServiceUnitTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.http_core
import StringIO
class UriTest(unittest.TestCase):
def test_parse_uri(self):
uri = atom.http_core.parse_uri('http://www.google.com/test?q=foo&z=bar')
self.assert_(uri.scheme == 'http')
self.assert_(uri.host == 'www.google.com')
self.assert_(uri.port is None)
self.assert_(uri.path == '/test')
self.assert_(uri.query == {'z':'bar', 'q':'foo'})
def test_static_parse_uri(self):
uri = atom.http_core.Uri.parse_uri('http://test.com/?token=foo&x=1')
self.assertEqual(uri.scheme, 'http')
self.assertEqual(uri.host, 'test.com')
self.assert_(uri.port is None)
self.assertEqual(uri.query, {'token':'foo', 'x':'1'})
def test_modify_request_no_request(self):
uri = atom.http_core.parse_uri('http://www.google.com/test?q=foo&z=bar')
request = uri.modify_request()
self.assert_(request.uri.scheme == 'http')
self.assert_(request.uri.host == 'www.google.com')
# If no port was provided, the HttpClient is responsible for determining
# the default.
self.assert_(request.uri.port is None)
self.assert_(request.uri.path.startswith('/test'))
self.assertEqual(request.uri.query, {'z': 'bar', 'q': 'foo'})
self.assert_(request.method is None)
self.assert_(request.headers == {})
self.assert_(request._body_parts == [])
def test_modify_request_http_with_set_port(self):
request = atom.http_core.HttpRequest(uri=atom.http_core.Uri(port=8080),
method='POST')
request.add_body_part('hello', 'text/plain')
uri = atom.http_core.parse_uri('//example.com/greet')
self.assert_(uri.query == {})
self.assert_(uri._get_relative_path() == '/greet')
self.assert_(uri.host == 'example.com')
self.assert_(uri.port is None)
uri.ModifyRequest(request)
self.assert_(request.uri.host == 'example.com')
# If no scheme was provided, the URI will not add one, but the HttpClient
# should assume the request is HTTP.
self.assert_(request.uri.scheme is None)
self.assert_(request.uri.port == 8080)
self.assert_(request.uri.path == '/greet')
self.assert_(request.method == 'POST')
self.assert_(request.headers['Content-Type'] == 'text/plain')
def test_modify_request_use_default_ssl_port(self):
request = atom.http_core.HttpRequest(
uri=atom.http_core.Uri(scheme='https'), method='PUT')
request.add_body_part('hello', 'text/plain')
uri = atom.http_core.parse_uri('/greet')
uri.modify_request(request)
self.assert_(request.uri.host is None)
self.assert_(request.uri.scheme == 'https')
# If no port was provided, leave the port as None, it is up to the
# HttpClient to set the correct default port.
self.assert_(request.uri.port is None)
self.assert_(request.uri.path == '/greet')
self.assert_(request.method == 'PUT')
self.assert_(request.headers['Content-Type'] == 'text/plain')
self.assert_(len(request._body_parts) == 1)
self.assert_(request._body_parts[0] == 'hello')
def test_to_string(self):
uri = atom.http_core.Uri(host='www.google.com', query={'q':'sippycode'})
uri_string = uri._to_string()
self.assert_(uri_string == 'http://www.google.com/?q=sippycode')
class HttpRequestTest(unittest.TestCase):
def test_request_with_one_body_part(self):
request = atom.http_core.HttpRequest()
self.assert_(len(request._body_parts) == 0)
self.assert_('Content-Length' not in request.headers)
self.assert_(not 'Content-Type' in request.headers)
self.assert_(not 'Content-Length' in request.headers)
request.add_body_part('this is a test', 'text/plain')
self.assert_(len(request._body_parts) == 1)
self.assert_(request.headers['Content-Type'] == 'text/plain')
self.assert_(request._body_parts[0] == 'this is a test')
self.assert_(request.headers['Content-Length'] == str(len(
'this is a test')))
def test_add_file_without_size(self):
virtual_file = StringIO.StringIO('this is a test')
request = atom.http_core.HttpRequest()
try:
request.add_body_part(virtual_file, 'text/plain')
self.fail('We should have gotten an UnknownSize error.')
except atom.http_core.UnknownSize:
pass
request.add_body_part(virtual_file, 'text/plain', len('this is a test'))
self.assert_(len(request._body_parts) == 1)
self.assert_(request.headers['Content-Type'] == 'text/plain')
self.assert_(request._body_parts[0].read() == 'this is a test')
self.assert_(request.headers['Content-Length'] == str(len(
'this is a test')))
def test_copy(self):
request = atom.http_core.HttpRequest(
uri=atom.http_core.Uri(scheme='https', host='www.google.com'),
method='POST', headers={'test':'1', 'ok':'yes'})
request.add_body_part('body1', 'text/plain')
request.add_body_part('<html>body2</html>', 'text/html')
copied = request._copy()
self.assert_(request.uri.scheme == copied.uri.scheme)
self.assert_(request.uri.host == copied.uri.host)
self.assert_(request.method == copied.method)
self.assert_(request.uri.path == copied.uri.path)
self.assert_(request.headers == copied.headers)
self.assert_(request._body_parts == copied._body_parts)
copied.headers['test'] = '2'
copied._body_parts[1] = '<html>body3</html>'
self.assert_(request.headers != copied.headers)
self.assert_(request._body_parts != copied._body_parts)
def suite():
return unittest.TestSuite((unittest.makeSuite(UriTest,'test'),
unittest.makeSuite(HttpRequestTest,'test')))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# -*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder@gmail.com (Jeff Scudder)'
import sys
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom.data
import atom.core
import gdata.test_config as conf
XML_ENTRY_1 = """<?xml version='1.0'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:g='http://base.google.com/ns/1.0'>
<category scheme="http://base.google.com/categories/itemtypes"
term="products"/>
<id> http://www.google.com/test/id/url </id>
<title type='text'>Testing 2000 series laptop</title>
<content type='xhtml'>
<div xmlns='http://www.w3.org/1999/xhtml'>A Testing Laptop</div>
</content>
<link rel='alternate' type='text/html'
href='http://www.provider-host.com/123456789'/>
<link rel='license'
href='http://creativecommons.org/licenses/by-nc/2.5/rdf'/>
<g:label>Computer</g:label>
<g:label>Laptop</g:label>
<g:label>testing laptop</g:label>
<g:item_type>products</g:item_type>
</entry>"""
class AuthorTest(unittest.TestCase):
def setUp(self):
self.author = atom.data.Author()
def testEmptyAuthorShouldHaveEmptyExtensionLists(self):
self.assert_(isinstance(self.author._other_elements, list))
self.assertEqual(len(self.author._other_elements), 0)
self.assert_(isinstance(self.author._other_attributes, dict))
self.assertEqual(len(self.author._other_attributes), 0)
def testNormalAuthorShouldHaveNoExtensionElements(self):
self.author.name = atom.data.Name(text='Jeff Scudder')
self.assertEqual(self.author.name.text, 'Jeff Scudder')
self.assertEqual(len(self.author._other_elements), 0)
new_author = atom.core.XmlElementFromString(self.author.ToString(),
atom.data.Author)
self.assertEqual(len(new_author._other_elements), 0)
self.assertEqual(new_author.name.text, 'Jeff Scudder')
self.author.extension_elements.append(atom.data.ExtensionElement(
'foo', text='bar'))
self.assertEqual(len(self.author.extension_elements), 1)
self.assertEqual(self.author.name.text, 'Jeff Scudder')
new_author = atom.core.parse(self.author.ToString(), atom.data.Author)
self.assertEqual(len(self.author.extension_elements), 1)
self.assertEqual(new_author.name.text, 'Jeff Scudder')
def testEmptyAuthorToAndFromStringShouldMatch(self):
string_from_author = self.author.ToString()
new_author = atom.core.XmlElementFromString(string_from_author,
atom.data.Author)
string_from_new_author = new_author.ToString()
self.assertEqual(string_from_author, string_from_new_author)
def testAuthorWithNameToAndFromStringShouldMatch(self):
self.author.name = atom.data.Name()
self.author.name.text = 'Jeff Scudder'
string_from_author = self.author.ToString()
new_author = atom.core.XmlElementFromString(string_from_author,
atom.data.Author)
string_from_new_author = new_author.ToString()
self.assertEqual(string_from_author, string_from_new_author)
self.assertEqual(self.author.name.text, new_author.name.text)
def testExtensionElements(self):
self.author.extension_attributes['foo1'] = 'bar'
self.author.extension_attributes['foo2'] = 'rab'
self.assertEqual(self.author.extension_attributes['foo1'], 'bar')
self.assertEqual(self.author.extension_attributes['foo2'], 'rab')
new_author = atom.core.parse(str(self.author), atom.data.Author)
self.assertEqual(new_author.extension_attributes['foo1'], 'bar')
self.assertEqual(new_author.extension_attributes['foo2'], 'rab')
def testConvertFullAuthorToAndFromString(self):
TEST_AUTHOR = """<?xml version="1.0" encoding="utf-8"?>
<author xmlns="http://www.w3.org/2005/Atom">
<name xmlns="http://www.w3.org/2005/Atom">John Doe</name>
<email xmlns="http://www.w3.org/2005/Atom">john@example.com</email>
<uri>http://www.google.com</uri>
</author>"""
author = atom.core.parse(TEST_AUTHOR, atom.data.Author)
self.assertEqual(author.name.text, 'John Doe')
self.assertEqual(author.email.text, 'john@example.com')
self.assertEqual(author.uri.text, 'http://www.google.com')
class EmailTest(unittest.TestCase):
def setUp(self):
self.email = atom.data.Email()
def testEmailToAndFromString(self):
self.email.text = 'This is a test'
new_email = atom.core.parse(self.email.to_string(), atom.data.Email)
self.assertEqual(self.email.text, new_email.text)
self.assertEqual(self.email.extension_elements,
new_email.extension_elements)
class NameTest(unittest.TestCase):
def setUp(self):
self.name = atom.data.Name()
def testEmptyNameToAndFromStringShouldMatch(self):
string_from_name = self.name.ToString()
new_name = atom.core.XmlElementFromString(string_from_name,
atom.data.Name)
string_from_new_name = new_name.ToString()
self.assertEqual(string_from_name, string_from_new_name)
def testText(self):
self.assert_(self.name.text is None)
self.name.text = 'Jeff Scudder'
self.assertEqual(self.name.text, 'Jeff Scudder')
new_name = atom.core.parse(self.name.to_string(), atom.data.Name)
self.assertEqual(new_name.text, self.name.text)
def testExtensionElements(self):
self.name.extension_attributes['foo'] = 'bar'
self.assertEqual(self.name.extension_attributes['foo'], 'bar')
new_name = atom.core.parse(self.name.ToString(), atom.data.Name)
self.assertEqual(new_name.extension_attributes['foo'], 'bar')
class ExtensionElementTest(unittest.TestCase):
def setUp(self):
self.ee = atom.data.ExtensionElement('foo')
self.EXTENSION_TREE = """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<g:author xmlns:g="http://www.google.com">
<g:name>John Doe
<g:foo yes="no" up="down">Bar</g:foo>
</g:name>
</g:author>
</feed>"""
def testEmptyEEShouldProduceEmptyString(self):
pass
def testEEParsesTreeCorrectly(self):
deep_tree = atom.core.xml_element_from_string(self.EXTENSION_TREE,
atom.data.ExtensionElement)
self.assertEqual(deep_tree.tag, 'feed')
self.assertEqual(deep_tree.namespace, 'http://www.w3.org/2005/Atom')
self.assert_(deep_tree.children[0].tag == 'author')
self.assert_(deep_tree.children[0].namespace == 'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].tag == 'name')
self.assert_(deep_tree.children[0].children[0].namespace ==
'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].text.strip() == 'John Doe')
self.assert_(deep_tree.children[0].children[0].children[0].text.strip() ==
'Bar')
foo = deep_tree.children[0].children[0].children[0]
self.assert_(foo.tag == 'foo')
self.assert_(foo.namespace == 'http://www.google.com')
self.assert_(foo.attributes['up'] == 'down')
self.assert_(foo.attributes['yes'] == 'no')
self.assert_(foo.children == [])
def testEEToAndFromStringShouldMatch(self):
string_from_ee = self.ee.ToString()
new_ee = atom.core.xml_element_from_string(string_from_ee,
atom.data.ExtensionElement)
string_from_new_ee = new_ee.ToString()
self.assert_(string_from_ee == string_from_new_ee)
deep_tree = atom.core.xml_element_from_string(self.EXTENSION_TREE,
atom.data.ExtensionElement)
string_from_deep_tree = deep_tree.ToString()
new_deep_tree = atom.core.xml_element_from_string(string_from_deep_tree,
atom.data.ExtensionElement)
string_from_new_deep_tree = new_deep_tree.ToString()
self.assert_(string_from_deep_tree == string_from_new_deep_tree)
class LinkTest(unittest.TestCase):
def setUp(self):
self.link = atom.data.Link()
def testLinkToAndFromString(self):
self.link.href = 'test href'
self.link.hreflang = 'english'
self.link.type = 'text/html'
self.link.extension_attributes['foo'] = 'bar'
self.assert_(self.link.href == 'test href')
self.assert_(self.link.hreflang == 'english')
self.assert_(self.link.type == 'text/html')
self.assert_(self.link.extension_attributes['foo'] == 'bar')
new_link = atom.core.parse(self.link.ToString(), atom.data.Link)
self.assert_(self.link.href == new_link.href)
self.assert_(self.link.type == new_link.type)
self.assert_(self.link.hreflang == new_link.hreflang)
self.assert_(self.link.extension_attributes['foo'] ==
new_link.extension_attributes['foo'])
def testLinkType(self):
test_link = atom.data.Link(type='text/html')
self.assertEqual(test_link.type, 'text/html')
class GeneratorTest(unittest.TestCase):
def setUp(self):
self.generator = atom.data.Generator()
def testGeneratorToAndFromString(self):
self.generator.uri = 'www.google.com'
self.generator.version = '1.0'
self.generator.extension_attributes['foo'] = 'bar'
self.assert_(self.generator.uri == 'www.google.com')
self.assert_(self.generator.version == '1.0')
self.assert_(self.generator.extension_attributes['foo'] == 'bar')
new_generator = atom.core.parse(self.generator.ToString(), atom.data.Generator)
self.assert_(self.generator.uri == new_generator.uri)
self.assert_(self.generator.version == new_generator.version)
self.assert_(self.generator.extension_attributes['foo'] ==
new_generator.extension_attributes['foo'])
class TitleTest(unittest.TestCase):
def setUp(self):
self.title = atom.data.Title()
def testTitleToAndFromString(self):
self.title.type = 'text'
self.title.text = 'Less: <'
self.assert_(self.title.type == 'text')
self.assert_(self.title.text == 'Less: <')
new_title = atom.core.parse(str(self.title), atom.data.Title)
self.assert_(self.title.type == new_title.type)
self.assert_(self.title.text == new_title.text)
class SubtitleTest(unittest.TestCase):
def setUp(self):
self.subtitle = atom.data.Subtitle()
def testTitleToAndFromString(self):
self.subtitle.type = 'text'
self.subtitle.text = 'sub & title'
self.assert_(self.subtitle.type == 'text')
self.assert_(self.subtitle.text == 'sub & title')
new_subtitle = atom.core.parse(self.subtitle.ToString(),
atom.data.Subtitle)
self.assert_(self.subtitle.type == new_subtitle.type)
self.assert_(self.subtitle.text == new_subtitle.text)
class SummaryTest(unittest.TestCase):
def setUp(self):
self.summary = atom.data.Summary()
def testTitleToAndFromString(self):
self.summary.type = 'text'
self.summary.text = 'Less: <'
self.assert_(self.summary.type == 'text')
self.assert_(self.summary.text == 'Less: <')
new_summary = atom.core.parse(self.summary.ToString(), atom.data.Summary)
self.assert_(self.summary.type == new_summary.type)
self.assert_(self.summary.text == new_summary.text)
class CategoryTest(unittest.TestCase):
def setUp(self):
self.category = atom.data.Category()
def testCategoryToAndFromString(self):
self.category.term = 'x'
self.category.scheme = 'y'
self.category.label = 'z'
self.assert_(self.category.term == 'x')
self.assert_(self.category.scheme == 'y')
self.assert_(self.category.label == 'z')
new_category = atom.core.parse(self.category.to_string(),
atom.data.Category)
self.assert_(self.category.term == new_category.term)
self.assert_(self.category.scheme == new_category.scheme)
self.assert_(self.category.label == new_category.label)
class ContributorTest(unittest.TestCase):
def setUp(self):
self.contributor = atom.data.Contributor()
def testContributorToAndFromString(self):
self.contributor.name = atom.data.Name(text='J Scud')
self.contributor.email = atom.data.Email(text='nobody@nowhere')
self.contributor.uri = atom.data.Uri(text='http://www.google.com')
self.assert_(self.contributor.name.text == 'J Scud')
self.assert_(self.contributor.email.text == 'nobody@nowhere')
self.assert_(self.contributor.uri.text == 'http://www.google.com')
new_contributor = atom.core.parse(self.contributor.ToString(),
atom.data.Contributor)
self.assert_(self.contributor.name.text == new_contributor.name.text)
self.assert_(self.contributor.email.text == new_contributor.email.text)
self.assert_(self.contributor.uri.text == new_contributor.uri.text)
class IdTest(unittest.TestCase):
def setUp(self):
self.my_id = atom.data.Id()
def testIdToAndFromString(self):
self.my_id.text = 'my nifty id'
self.assert_(self.my_id.text == 'my nifty id')
new_id = atom.core.parse(self.my_id.ToString(), atom.data.Id)
self.assert_(self.my_id.text == new_id.text)
class IconTest(unittest.TestCase):
def setUp(self):
self.icon = atom.data.Icon()
def testIconToAndFromString(self):
self.icon.text = 'my picture'
self.assert_(self.icon.text == 'my picture')
new_icon = atom.core.parse(str(self.icon), atom.data.Icon)
self.assert_(self.icon.text == new_icon.text)
class LogoTest(unittest.TestCase):
def setUp(self):
self.logo = atom.data.Logo()
def testLogoToAndFromString(self):
self.logo.text = 'my logo'
self.assert_(self.logo.text == 'my logo')
new_logo = atom.core.parse(self.logo.ToString(), atom.data.Logo)
self.assert_(self.logo.text == new_logo.text)
class RightsTest(unittest.TestCase):
def setUp(self):
self.rights = atom.data.Rights()
def testContributorToAndFromString(self):
self.rights.text = 'you have the right to remain silent'
self.rights.type = 'text'
self.assert_(self.rights.text == 'you have the right to remain silent')
self.assert_(self.rights.type == 'text')
new_rights = atom.core.parse(self.rights.ToString(), atom.data.Rights)
self.assert_(self.rights.text == new_rights.text)
self.assert_(self.rights.type == new_rights.type)
class UpdatedTest(unittest.TestCase):
def setUp(self):
self.updated = atom.data.Updated()
def testUpdatedToAndFromString(self):
self.updated.text = 'my time'
self.assert_(self.updated.text == 'my time')
new_updated = atom.core.parse(self.updated.ToString(), atom.data.Updated)
self.assert_(self.updated.text == new_updated.text)
class PublishedTest(unittest.TestCase):
def setUp(self):
self.published = atom.data.Published()
def testPublishedToAndFromString(self):
self.published.text = 'pub time'
self.assert_(self.published.text == 'pub time')
new_published = atom.core.parse(self.published.ToString(),
atom.data.Published)
self.assert_(self.published.text == new_published.text)
class FeedEntryParentTest(unittest.TestCase):
"""The test accesses hidden methods in atom.FeedEntryParent"""
def testConvertToAndFromElementTree(self):
# Use entry because FeedEntryParent doesn't have a tag or namespace.
original = atom.data.Entry()
copy = atom.data.FeedEntryParent()
original.author.append(atom.data.Author(name=atom.data.Name(
text='J Scud')))
self.assert_(original.author[0].name.text == 'J Scud')
self.assert_(copy.author == [])
original.id = atom.data.Id(text='test id')
self.assert_(original.id.text == 'test id')
self.assert_(copy.id is None)
copy._harvest_tree(original._to_tree())
self.assert_(original.author[0].name.text == copy.author[0].name.text)
self.assert_(original.id.text == copy.id.text)
class EntryTest(unittest.TestCase):
def testConvertToAndFromString(self):
entry = atom.data.Entry()
entry.author.append(atom.data.Author(name=atom.data.Name(text='js')))
entry.title = atom.data.Title(text='my test entry')
self.assert_(entry.author[0].name.text == 'js')
self.assert_(entry.title.text == 'my test entry')
new_entry = atom.core.parse(entry.ToString(), atom.data.Entry)
self.assert_(new_entry.author[0].name.text == 'js')
self.assert_(new_entry.title.text == 'my test entry')
def testEntryCorrectlyConvertsActualData(self):
entry = atom.core.parse(XML_ENTRY_1, atom.data.Entry)
self.assert_(entry.category[0].scheme ==
'http://base.google.com/categories/itemtypes')
self.assert_(entry.category[0].term == 'products')
self.assert_(entry.id.text == ' http://www.google.com/test/id/url ')
self.assert_(entry.title.text == 'Testing 2000 series laptop')
self.assert_(entry.title.type == 'text')
self.assert_(entry.content.type == 'xhtml')
#TODO check all other values for the test entry
def testEntryWithFindElementAndFindAttribute(self):
entry = atom.data.Entry()
entry.link.append(atom.data.Link(rel='self', href='x'))
entry.link.append(atom.data.Link(rel='foo', href='y'))
entry.link.append(atom.data.Link(rel='edit',href='z'))
self_link = None
edit_link = None
for link in entry.get_elements('link', 'http://www.w3.org/2005/Atom'):
ignored1, ignored2, attributes = link.__class__._get_rules(2)
if link.get_attributes('rel')[0].value == 'self':
self_link = link.get_attributes('href')[0].value
elif link.get_attributes('rel')[0].value == 'edit':
edit_link = link.get_attributes('href')[0].value
self.assertEqual(self_link, 'x')
self.assertEqual(edit_link, 'z')
def testAppControl(self):
TEST_BASE_ENTRY = """<?xml version='1.0'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:g='http://base.google.com/ns/1.0'>
<category scheme="http://base.google.com/categories/itemtypes"
term="products"/>
<title type='text'>Testing 2000 series laptop</title>
<content type='xhtml'>
<div xmlns='http://www.w3.org/1999/xhtml'>A Testing Laptop</div>
</content>
<app:control xmlns:app='http://purl.org/atom/app#'>
<app:draft>yes</app:draft>
<gm:disapproved
xmlns:gm='http://base.google.com/ns-metadata/1.0'/>
</app:control>
<link rel='alternate' type='text/html'
href='http://www.provider-host.com/123456789'/>
<g:label>Computer</g:label>
<g:label>Laptop</g:label>
<g:label>testing laptop</g:label>
<g:item_type>products</g:item_type>
</entry>"""
entry = atom.core.parse(TEST_BASE_ENTRY, atom.data.Entry)
self.assertEquals(entry.control.draft.text, 'yes')
self.assertEquals(len(entry.control.extension_elements), 1)
self.assertEquals(entry.control.extension_elements[0].tag, 'disapproved')
class ControlTest(unittest.TestCase):
def testVersionRuleGeneration(self):
self.assertEqual(atom.core._get_qname(atom.data.Control, 1),
'{http://purl.org/atom/app#}control')
self.assertEqual(atom.data.Control._get_rules(1)[0],
'{http://purl.org/atom/app#}control')
def testVersionedControlFromString(self):
xml_v1 = """<control xmlns="http://purl.org/atom/app#">
<draft>no</draft></control>"""
xml_v2 = """<control xmlns="http://www.w3.org/2007/app">
<draft>no</draft></control>"""
control_v1 = atom.core.parse(xml_v1, atom.data.Control, 1)
control_v2 = atom.core.parse(xml_v2, atom.data.Control, 2)
self.assert_(control_v1 is not None)
self.assert_(control_v2 is not None)
# Parsing with mismatched version numbers should return None.
self.assert_(atom.core.parse(xml_v1, atom.data.Control, 2) is None)
self.assert_(atom.core.parse(xml_v2, atom.data.Control, 1) is None)
def testConvertToAndFromString(self):
control = atom.data.Control()
control.text = 'some text'
control.draft = atom.data.Draft(text='yes')
self.assertEquals(control.draft.text, 'yes')
self.assertEquals(control.text, 'some text')
self.assert_(isinstance(control.draft, atom.data.Draft))
new_control = atom.core.parse(str(control), atom.data.Control)
self.assertEquals(control.draft.text, new_control.draft.text)
self.assertEquals(control.text, new_control.text)
self.assert_(isinstance(new_control.draft, atom.data.Draft))
class DraftTest(unittest.TestCase):
def testConvertToAndFromString(self):
draft = atom.data.Draft()
draft.text = 'maybe'
draft.extension_attributes['foo'] = 'bar'
self.assertEquals(draft.text, 'maybe')
self.assertEquals(draft.extension_attributes['foo'], 'bar')
new_draft = atom.core.parse(str(draft), atom.data.Draft)
self.assertEquals(draft.text, new_draft.text)
self.assertEquals(draft.extension_attributes['foo'],
new_draft.extension_attributes['foo'])
class SourceTest(unittest.TestCase):
def testConvertToAndFromString(self):
source = atom.data.Source()
source.author.append(atom.data.Author(name=atom.data.Name(text='js')))
source.title = atom.data.Title(text='my test source')
source.generator = atom.data.Generator(text='gen')
self.assert_(source.author[0].name.text == 'js')
self.assert_(source.title.text == 'my test source')
self.assert_(source.generator.text == 'gen')
new_source = atom.core.parse(source.ToString(), atom.data.Source)
self.assert_(new_source.author[0].name.text == 'js')
self.assert_(new_source.title.text == 'my test source')
self.assert_(new_source.generator.text == 'gen')
class FeedTest(unittest.TestCase):
def testConvertToAndFromString(self):
feed = atom.data.Feed()
feed.author.append(atom.data.Author(name=atom.data.Name(text='js')))
feed.title = atom.data.Title(text='my test source')
feed.generator = atom.data.Generator(text='gen')
feed.entry.append(atom.data.Entry(author=[atom.data.Author(
name=atom.data.Name(text='entry author'))]))
self.assert_(feed.author[0].name.text == 'js')
self.assert_(feed.title.text == 'my test source')
self.assert_(feed.generator.text == 'gen')
self.assert_(feed.entry[0].author[0].name.text == 'entry author')
new_feed = atom.core.parse(feed.ToString(), atom.data.Feed)
self.assert_(new_feed.author[0].name.text == 'js')
self.assert_(new_feed.title.text == 'my test source')
self.assert_(new_feed.generator.text == 'gen')
self.assert_(new_feed.entry[0].author[0].name.text == 'entry author')
def testPreserveEntryOrder(self):
test_xml = (
'<feed xmlns="http://www.w3.org/2005/Atom">'
'<entry><id>0</id></entry>'
'<entry><id>1</id></entry>'
'<title>Testing Order</title>'
'<entry><id>2</id></entry>'
'<entry><id>3</id></entry>'
'<entry><id>4</id></entry>'
'<entry><id>5</id></entry>'
'<entry><id>6</id></entry>'
'<entry><id>7</id></entry>'
'<author/>'
'<entry><id>8</id></entry>'
'<id>feed_id</id>'
'<entry><id>9</id></entry>'
'</feed>')
feed = atom.core.parse(test_xml, atom.data.Feed)
for i in xrange(10):
self.assertEqual(feed.entry[i].id.text, str(i))
feed = atom.core.parse(feed.ToString(), atom.data.Feed)
for i in xrange(10):
self.assertEqual(feed.entry[i].id.text, str(i))
temp = feed.entry[3]
feed.entry[3] = feed.entry[4]
feed.entry[4] = temp
self.assert_(feed.entry[2].id.text == '2')
self.assert_(feed.entry[3].id.text == '4')
self.assert_(feed.entry[4].id.text == '3')
self.assert_(feed.entry[5].id.text == '5')
feed = atom.core.parse(feed.to_string(), atom.data.Feed)
self.assertEqual(feed.entry[2].id.text, '2')
self.assertEqual(feed.entry[3].id.text, '4')
self.assertEqual(feed.entry[4].id.text, '3')
self.assertEqual(feed.entry[5].id.text, '5')
class ContentEntryParentTest(unittest.TestCase):
"""The test accesses hidden methods in atom.FeedEntryParent"""
def setUp(self):
self.content = atom.data.Content()
def testConvertToAndFromElementTree(self):
self.content.text = 'my content'
self.content.type = 'text'
self.content.src = 'my source'
self.assert_(self.content.text == 'my content')
self.assert_(self.content.type == 'text')
self.assert_(self.content.src == 'my source')
new_content = atom.core.parse(self.content.ToString(), atom.data.Content)
self.assert_(self.content.text == new_content.text)
self.assert_(self.content.type == new_content.type)
self.assert_(self.content.src == new_content.src)
def testContentConstructorSetsSrc(self):
new_content = atom.data.Content(src='abcd')
self.assertEquals(new_content.src, 'abcd')
def testContentFromString(self):
content_xml = '<content xmlns="http://www.w3.org/2005/Atom" type="test"/>'
content = atom.core.parse(content_xml, atom.data.Content)
self.assert_(isinstance(content, atom.data.Content))
self.assertEqual(content.type, 'test')
class PreserveUnkownElementTest(unittest.TestCase):
"""Tests correct preservation of XML elements which are non Atom"""
def setUp(self):
GBASE_ATTRIBUTE_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gm='http://base.google.com/ns-metadata/1.0'>
<id>http://www.google.com/base/feeds/attributes</id>
<updated>2006-11-01T20:35:59.578Z</updated>
<category scheme='http://base.google.com/categories/itemtypes'
term='online jobs'></category>
<category scheme='http://base.google.com/categories/itemtypes'
term='jobs'></category>
<title type='text'>histogram for query: [item type:jobs]</title>
<link rel='alternate' type='text/html'
href='http://base.google.com'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/base/attributes/jobs'></link>
<generator version='1.0'
uri='http://base.google.com'>GoogleBase</generator>
<openSearch:totalResults>16</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>16</openSearch:itemsPerPage>
<entry>
<id>http://www.google.com/base/feeds/attributes/job+industy</id>
<updated>2006-11-01T20:36:00.100Z</updated>
<title type='text'>job industry(text)</title>
<content type='text'>Attribute"job industry" of type text.
</content>
<gm:attribute name='job industry' type='text' count='4416629'>
<gm:value count='380772'>it internet</gm:value>
<gm:value count='261565'>healthcare</gm:value>
<gm:value count='142018'>information technology</gm:value>
<gm:value count='124622'>accounting</gm:value>
<gm:value count='111311'>clerical and administrative</gm:value>
<gm:value count='82928'>other</gm:value>
<gm:value count='77620'>sales and sales management</gm:value>
<gm:value count='68764'>information systems</gm:value>
<gm:value count='65859'>engineering and architecture</gm:value>
<gm:value count='64757'>sales</gm:value>
</gm:attribute>
</entry>
</feed>"""
self.feed = atom.core.parse(GBASE_ATTRIBUTE_FEED,
atom.data.Feed)
def testCaptureOpenSearchElements(self):
self.assertEquals(self.feed.FindExtensions('totalResults')[0].tag,
'totalResults')
self.assertEquals(self.feed.FindExtensions('totalResults')[0].namespace,
'http://a9.com/-/spec/opensearchrss/1.0/')
open_search_extensions = self.feed.FindExtensions(
namespace='http://a9.com/-/spec/opensearchrss/1.0/')
self.assertEquals(len(open_search_extensions), 3)
for element in open_search_extensions:
self.assertEquals(element.namespace,
'http://a9.com/-/spec/opensearchrss/1.0/')
def testCaptureMetaElements(self):
meta_elements = self.feed.entry[0].FindExtensions(
namespace='http://base.google.com/ns-metadata/1.0')
self.assertEquals(len(meta_elements), 1)
self.assertEquals(meta_elements[0].attributes['count'], '4416629')
self.assertEquals(len(meta_elements[0].children), 10)
def testCaptureMetaChildElements(self):
meta_elements = self.feed.entry[0].FindExtensions(
namespace='http://base.google.com/ns-metadata/1.0')
meta_children = meta_elements[0].FindChildren(
namespace='http://base.google.com/ns-metadata/1.0')
self.assertEquals(len(meta_children), 10)
for child in meta_children:
self.assertEquals(child.tag, 'value')
class LinkFinderTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(XML_ENTRY_1, atom.data.Entry)
def testLinkFinderGetsLicenseLink(self):
self.assert_(isinstance(self.entry.GetLink('license'), atom.data.Link))
self.assert_(isinstance(self.entry.GetLicenseLink(), atom.data.Link))
self.assertEquals(self.entry.GetLink('license').href,
'http://creativecommons.org/licenses/by-nc/2.5/rdf')
self.assertEquals(self.entry.get_license_link().href,
'http://creativecommons.org/licenses/by-nc/2.5/rdf')
self.assertEquals(self.entry.GetLink('license').rel, 'license')
self.assertEquals(self.entry.FindLicenseLink(),
'http://creativecommons.org/licenses/by-nc/2.5/rdf')
def testLinkFinderGetsAlternateLink(self):
self.assert_(isinstance(self.entry.GetLink('alternate'),
atom.data.Link))
self.assertEquals(self.entry.GetLink('alternate').href,
'http://www.provider-host.com/123456789')
self.assertEquals(self.entry.FindAlternateLink(),
'http://www.provider-host.com/123456789')
self.assertEquals(self.entry.GetLink('alternate').rel, 'alternate')
class AtomBaseTest(unittest.TestCase):
def testAtomBaseConvertsExtensions(self):
# Using Id because it adds no additional members.
atom_base = atom.data.Id()
extension_child = atom.data.ExtensionElement('foo',
namespace='http://ns0.com')
extension_grandchild = atom.data.ExtensionElement('bar',
namespace='http://ns0.com')
extension_child.children.append(extension_grandchild)
atom_base.extension_elements.append(extension_child)
self.assertEquals(len(atom_base.extension_elements), 1)
self.assertEquals(len(atom_base.extension_elements[0].children), 1)
self.assertEquals(atom_base.extension_elements[0].tag, 'foo')
self.assertEquals(atom_base.extension_elements[0].children[0].tag, 'bar')
element_tree = atom_base._to_tree()
self.assert_(element_tree.find('{http://ns0.com}foo') is not None)
self.assert_(element_tree.find('{http://ns0.com}foo').find(
'{http://ns0.com}bar') is not None)
class UtfParsingTest(unittest.TestCase):
def setUp(self):
self.test_xml = u"""<?xml version="1.0" encoding="utf-8"?>
<entry xmlns='http://www.w3.org/2005/Atom'>
<id>http://www.google.com/test/id/url</id>
<title type='αλφα'>αλφα</title>
</entry>"""
def testMemberStringEncoding(self):
atom_entry = atom.core.parse(self.test_xml, atom.data.Entry)
self.assert_(isinstance(atom_entry.title.type, unicode))
self.assertEqual(atom_entry.title.type, u'\u03B1\u03BB\u03C6\u03B1')
self.assertEqual(atom_entry.title.text, u'\u03B1\u03BB\u03C6\u03B1')
# Setting object members to unicode strings is supported.
atom_entry.title.type = u'\u03B1\u03BB\u03C6\u03B1'
xml = atom_entry.ToString()
# The unicode code points should be converted to XML escaped sequences.
self.assert_('αλφα' in xml)
# Make sure that we can use plain text when MEMBER_STRING_ENCODING is utf8
atom_entry.title.type = "plain text"
atom_entry.title.text = "more text"
xml = atom_entry.ToString()
self.assert_("plain text" in xml)
self.assert_("more text" in xml)
# Test something else than utf-8
atom.core.STRING_ENCODING = 'iso8859_7'
atom_entry = atom.core.parse(self.test_xml, atom.data.Entry)
self.assert_(atom_entry.title.type == u'\u03B1\u03BB\u03C6\u03B1')
self.assert_(atom_entry.title.text == u'\u03B1\u03BB\u03C6\u03B1')
# Test using unicode strings directly for object members
atom_entry = atom.core.parse(self.test_xml, atom.data.Entry)
self.assert_(atom_entry.title.type == u'\u03B1\u03BB\u03C6\u03B1')
self.assert_(atom_entry.title.text == u'\u03B1\u03BB\u03C6\u03B1')
# Make sure that we can use plain text when MEMBER_STRING_ENCODING is
# unicode
atom_entry.title.type = "plain text"
atom_entry.title.text = "more text"
xml = atom_entry.ToString()
self.assert_("plain text" in xml)
self.assert_("more text" in xml)
def testConvertExampleXML(self):
GBASE_STRING_ENCODING_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:gm='http://base.google.com/ns-metadata/1.0'
xmlns:g='http://base.google.com/ns/1.0'
xmlns:batch='http://schemas.google.com/gdata/batch'>
<id>http://www.google.com/base/feeds/snippets/1749</id>
<published>2007-12-09T03:13:07.000Z</published>
<updated>2008-01-07T03:26:46.000Z</updated>
<category scheme='http://base.google.com/categories/itemtypes'
term='Products'/>
<title type='text'>Digital Camera Cord Fits DSC-R1 S40</title>
<content type='html'>SONY \xC2\xB7 Cybershot Digital Camera Usb
Cable DESCRIPTION This is a 2.5 USB 2.0 A to Mini B (5 Pin)
high quality digital camera cable used for connecting your
Sony Digital Cameras and Camcoders. Backward
Compatible with USB 2.0, 1.0 and 1.1. Fully ...</content>
<link rel='alternate' type='text/html'
href='http://adfarm.mediaplex.com/ad/ck/711-5256-8196-2mm'/>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/base/feeds/snippets/1749'/>
<author>
<name>eBay</name>
</author>
<g:item_type type='text'>Products</g:item_type>
<g:item_language type='text'>EN</g:item_language>
<g:target_country type='text'>US</g:target_country>
<g:price type='floatUnit'>0.99 usd</g:price>
<g:image_link
type='url'>http://www.example.com/pict/27_1.jpg</g:image_link>
<g:category type='text'>Cameras & Photo>Digital Camera
Accessories>Cables</g:category>
<g:category type='text'>Cords & USB Cables</g:category>
<g:customer_id type='int'>11729</g:customer_id>
<g:id type='text'>270195049057</g:id>
<g:expiration_date
type='dateTime'>2008-02-06T03:26:46Z</g:expiration_date>
</entry>"""
try:
entry = atom.core.parse(GBASE_STRING_ENCODING_ENTRY,
atom.data.Entry)
except UnicodeDecodeError:
self.fail('Error when converting XML')
class VersionedXmlTest(unittest.TestCase):
def test_monoversioned_parent_with_multiversioned_child(self):
v2_rules = atom.data.Entry._get_rules(2)
self.assert_('{http://www.w3.org/2007/app}control' in v2_rules[1])
entry_xml = """<entry xmlns='http://www.w3.org/2005/Atom'>
<app:control xmlns:app='http://www.w3.org/2007/app'>
<app:draft>yes</app:draft>
</app:control>
</entry>"""
entry = e = atom.core.parse(entry_xml, atom.data.Entry, version=2)
self.assert_(entry is not None)
self.assert_(entry.control is not None)
self.assert_(entry.control.draft is not None)
self.assertEqual(entry.control.draft.text, 'yes')
# v1 rules should not parse v2 XML.
entry = e = atom.core.parse(entry_xml, atom.data.Entry, version=1)
self.assert_(entry is not None)
self.assert_(entry.control is None)
# The default version should be v1.
entry = e = atom.core.parse(entry_xml, atom.data.Entry)
self.assert_(entry is not None)
self.assert_(entry.control is None)
class DataModelSanityTest(unittest.TestCase):
def test_xml_elements(self):
conf.check_data_classes(self, [
atom.data.Feed, atom.data.Source, atom.data.Logo,
atom.data.Control, atom.data.Draft, atom.data.Generator])
def suite():
return conf.build_suite([AuthorTest, EmailTest, NameTest,
ExtensionElementTest, LinkTest, GeneratorTest,
TitleTest, SubtitleTest, SummaryTest, IdTest,
IconTest, LogoTest, RightsTest, UpdatedTest,
PublishedTest, FeedEntryParentTest, EntryTest,
ContentEntryParentTest, PreserveUnkownElementTest,
FeedTest, LinkFinderTest, AtomBaseTest,
UtfParsingTest, VersionedXmlTest,
DataModelSanityTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom.core
import gdata.test_config as conf
SAMPLE_XML = ('<outer xmlns="http://example.com/xml/1" '
'xmlns:two="http://example.com/xml/2">'
'<inner x="123"/>'
'<inner x="234" y="abc"/>'
'<inner>'
'<two:nested>Some Test</two:nested>'
'<nested>Different Namespace</nested>'
'</inner>'
'<other two:z="true"></other>'
'</outer>')
NO_NAMESPACE_XML = ('<foo bar="123"><baz>Baz Text!</baz></foo>')
V1_XML = ('<able xmlns="http://example.com/1" '
'xmlns:ex="http://example.com/ex/1">'
'<baker foo="42"/>'
'<ex:charlie>Greetings!</ex:charlie>'
'<same xmlns="http://example.com/s" x="true">'
'</able>')
V2_XML = ('<alpha xmlns="http://example.com/2" '
'xmlns:ex="http://example.com/ex/2">'
'<bravo bar="42"/>'
'<ex:charlie>Greetings!</ex:charlie>'
'<same xmlns="http://example.com/s" x="true">'
'</alpha>')
class Child(atom.core.XmlElement):
_qname = ('{http://example.com/1}child', '{http://example.com/2}child')
class Foo(atom.core.XmlElement):
_qname = 'foo'
class Example(atom.core.XmlElement):
_qname = '{http://example.com}foo'
child = Child
foos = [Foo]
tag = 'tag'
versioned_attr = ('attr', '{http://new_ns}attr')
# Example XmlElement subclass declarations.
class Inner(atom.core.XmlElement):
_qname = '{http://example.com/xml/1}inner'
my_x = 'x'
class Outer(atom.core.XmlElement):
_qname = '{http://example.com/xml/1}outer'
innards = [Inner]
class XmlElementTest(unittest.TestCase):
def testGetQName(self):
class Unversioned(atom.core.XmlElement):
_qname = '{http://example.com}foo'
class Versioned(atom.core.XmlElement):
_qname = ('{http://example.com/1}foo', '{http://example.com/2}foo')
self.assert_(
atom.core._get_qname(Unversioned, 1) == '{http://example.com}foo')
self.assert_(
atom.core._get_qname(Unversioned, 2) == '{http://example.com}foo')
self.assert_(
atom.core._get_qname(Versioned, 1) == '{http://example.com/1}foo')
self.assert_(
atom.core._get_qname(Versioned, 2) == '{http://example.com/2}foo')
def testConstructor(self):
e = Example()
self.assert_(e.child is None)
self.assert_(e.tag is None)
self.assert_(e.versioned_attr is None)
self.assert_(e.foos == [])
self.assert_(e.text is None)
def testGetRules(self):
rules1 = Example._get_rules(1)
self.assert_(rules1[0] == '{http://example.com}foo')
self.assert_(rules1[1]['{http://example.com/1}child'] == ('child', Child,
False))
self.assert_(rules1[1]['foo'] == ('foos', Foo, True))
self.assert_(rules1[2]['tag'] == 'tag')
self.assert_(rules1[2]['attr'] == 'versioned_attr')
# Check to make sure we don't recalculate the rules.
self.assert_(rules1 == Example._get_rules(1))
rules2 = Example._get_rules(2)
self.assert_(rules2[0] == '{http://example.com}foo')
self.assert_(rules2[1]['{http://example.com/2}child'] == ('child', Child,
False))
self.assert_(rules2[1]['foo'] == ('foos', Foo, True))
self.assert_(rules2[2]['tag'] == 'tag')
self.assert_(rules2[2]['{http://new_ns}attr'] == 'versioned_attr')
def testGetElements(self):
e = Example()
e.child = Child()
e.child.text = 'child text'
e.foos.append(Foo())
e.foos[0].text = 'foo1'
e.foos.append(Foo())
e.foos[1].text = 'foo2'
e._other_elements.append(atom.core.XmlElement())
e._other_elements[0]._qname = 'bar'
e._other_elements[0].text = 'other1'
e._other_elements.append(atom.core.XmlElement())
e._other_elements[1]._qname = 'child'
e._other_elements[1].text = 'other2'
self.contains_expected_elements(e.get_elements(),
['foo1', 'foo2', 'child text', 'other1', 'other2'])
self.contains_expected_elements(e.get_elements('child'),
['child text', 'other2'])
self.contains_expected_elements(
e.get_elements('child', 'http://example.com/1'), ['child text'])
self.contains_expected_elements(
e.get_elements('child', 'http://example.com/2'), [])
self.contains_expected_elements(
e.get_elements('child', 'http://example.com/2', 2), ['child text'])
self.contains_expected_elements(
e.get_elements('child', 'http://example.com/1', 2), [])
self.contains_expected_elements(
e.get_elements('child', 'http://example.com/2', 3), ['child text'])
self.contains_expected_elements(e.get_elements('bar'), ['other1'])
self.contains_expected_elements(e.get_elements('bar', version=2),
['other1'])
self.contains_expected_elements(e.get_elements('bar', version=3),
['other1'])
def contains_expected_elements(self, elements, expected_texts):
self.assert_(len(elements) == len(expected_texts))
for element in elements:
self.assert_(element.text in expected_texts)
def testConstructorKwargs(self):
e = Example('hello', child=Child('world'), versioned_attr='1')
self.assert_(e.text == 'hello')
self.assert_(e.child.text == 'world')
self.assert_(e.versioned_attr == '1')
self.assert_(e.foos == [])
self.assert_(e.tag is None)
e = Example(foos=[Foo('1', ignored=1), Foo(text='2')], tag='ok')
self.assert_(e.text is None)
self.assert_(e.child is None)
self.assert_(e.versioned_attr is None)
self.assert_(len(e.foos) == 2)
self.assert_(e.foos[0].text == '1')
self.assert_(e.foos[1].text == '2')
self.assert_('ignored' not in e.foos[0].__dict__)
self.assert_(e.tag == 'ok')
def testParseBasicXmlElement(self):
element = atom.core.xml_element_from_string(SAMPLE_XML,
atom.core.XmlElement)
inners = element.get_elements('inner')
self.assert_(len(inners) == 3)
self.assert_(inners[0].get_attributes('x')[0].value == '123')
self.assert_(inners[0].get_attributes('y') == [])
self.assert_(inners[1].get_attributes('x')[0].value == '234')
self.assert_(inners[1].get_attributes('y')[0].value == 'abc')
self.assert_(inners[2].get_attributes('x') == [])
inners = element.get_elements('inner', 'http://example.com/xml/1')
self.assert_(len(inners) == 3)
inners = element.get_elements(None, 'http://example.com/xml/1')
self.assert_(len(inners) == 4)
inners = element.get_elements()
self.assert_(len(inners) == 4)
inners = element.get_elements('other')
self.assert_(len(inners) == 1)
self.assert_(inners[0].get_attributes(
'z', 'http://example.com/xml/2')[0].value == 'true')
inners = element.get_elements('missing')
self.assert_(len(inners) == 0)
def testBasicXmlElementPreservesMarkup(self):
element = atom.core.xml_element_from_string(SAMPLE_XML,
atom.core.XmlElement)
tree1 = ElementTree.fromstring(SAMPLE_XML)
tree2 = ElementTree.fromstring(element.to_string())
self.assert_trees_similar(tree1, tree2)
def testSchemaParse(self):
outer = atom.core.xml_element_from_string(SAMPLE_XML, Outer)
self.assert_(isinstance(outer.innards, list))
self.assert_(len(outer.innards) == 3)
self.assert_(outer.innards[0].my_x == '123')
def testSchemaParsePreservesMarkup(self):
outer = atom.core.xml_element_from_string(SAMPLE_XML, Outer)
tree1 = ElementTree.fromstring(SAMPLE_XML)
tree2 = ElementTree.fromstring(outer.to_string())
self.assert_trees_similar(tree1, tree2)
found_x_and_y = False
found_x_123 = False
child = tree1.find('{http://example.com/xml/1}inner')
matching_children = tree2.findall(child.tag)
for match in matching_children:
if 'y' in match.attrib and match.attrib['y'] == 'abc':
if match.attrib['x'] == '234':
found_x_and_y = True
self.assert_(match.attrib['x'] == '234')
if 'x' in match.attrib and match.attrib['x'] == '123':
self.assert_('y' not in match.attrib)
found_x_123 = True
self.assert_(found_x_and_y)
self.assert_(found_x_123)
def testGenericTagAndNamespace(self):
element = atom.core.XmlElement(text='content')
# Try setting tag then namespace.
element.tag = 'foo'
self.assert_(element._qname == 'foo')
element.namespace = 'http://example.com/ns'
self.assert_(element._qname == '{http://example.com/ns}foo')
element = atom.core.XmlElement()
# Try setting namespace then tag.
element.namespace = 'http://example.com/ns'
self.assert_(element._qname == '{http://example.com/ns}')
element.tag = 'foo'
self.assert_(element._qname == '{http://example.com/ns}foo')
def assert_trees_similar(self, a, b):
"""Compares two XML trees for approximate matching."""
for child in a:
self.assert_(len(a.findall(child.tag)) == len(b.findall(child.tag)))
for child in b:
self.assert_(len(a.findall(child.tag)) == len(b.findall(child.tag)))
self.assert_(len(a) == len(b))
self.assert_(a.text == b.text)
self.assert_(a.attrib == b.attrib)
class UtilityFunctionTest(unittest.TestCase):
def testMatchQnames(self):
self.assert_(atom.core._qname_matches(
'foo', 'http://example.com', '{http://example.com}foo'))
self.assert_(atom.core._qname_matches(
None, None, '{http://example.com}foo'))
self.assert_(atom.core._qname_matches(
None, None, 'foo'))
self.assert_(atom.core._qname_matches(
None, None, None))
self.assert_(atom.core._qname_matches(
None, None, '{http://example.com}'))
self.assert_(atom.core._qname_matches(
'foo', None, '{http://example.com}foo'))
self.assert_(atom.core._qname_matches(
None, 'http://example.com', '{http://example.com}foo'))
self.assert_(atom.core._qname_matches(
None, '', 'foo'))
self.assert_(atom.core._qname_matches(
'foo', '', 'foo'))
self.assert_(atom.core._qname_matches(
'foo', '', 'foo'))
self.assert_(atom.core._qname_matches(
'foo', 'http://google.com', '{http://example.com}foo') == False)
self.assert_(atom.core._qname_matches(
'foo', 'http://example.com', '{http://example.com}bar') == False)
self.assert_(atom.core._qname_matches(
'foo', 'http://example.com', '{http://google.com}foo') == False)
self.assert_(atom.core._qname_matches(
'bar', 'http://example.com', '{http://google.com}foo') == False)
self.assert_(atom.core._qname_matches(
'foo', None, '{http://example.com}bar') == False)
self.assert_(atom.core._qname_matches(
None, 'http://google.com', '{http://example.com}foo') == False)
self.assert_(atom.core._qname_matches(
None, '', '{http://example.com}foo') == False)
self.assert_(atom.core._qname_matches(
'foo', '', 'bar') == False)
class Chars(atom.core.XmlElement):
_qname = u'{http://example.com/}chars'
y = 'y'
alpha = 'a'
class Strs(atom.core.XmlElement):
_qname = '{http://example.com/}strs'
chars = [Chars]
delta = u'd'
def parse(string):
return atom.core.xml_element_from_string(string, atom.core.XmlElement)
def create(tag, string):
element = atom.core.XmlElement(text=string)
element._qname = tag
return element
class CharacterEncodingTest(unittest.TestCase):
def testUnicodeInputString(self):
# Test parsing the inner text.
self.assertEqual(parse(u'<x>δ</x>').text, u'\u03b4')
self.assertEqual(parse(u'<x>\u03b4</x>').text, u'\u03b4')
# Test output valid XML.
self.assertEqual(parse(u'<x>δ</x>').to_string(), '<x>δ</x>')
self.assertEqual(parse(u'<x>\u03b4</x>').to_string(), '<x>δ</x>')
# Test setting the inner text and output valid XML.
e = create(u'x', u'\u03b4')
self.assertEqual(e.to_string(), '<x>δ</x>')
self.assertEqual(e.text, u'\u03b4')
self.assert_(isinstance(e.text, unicode))
self.assertEqual(create(u'x', '\xce\xb4'.decode('utf-8')).to_string(),
'<x>δ</x>')
def testUnicodeTagsAndAttributes(self):
# Begin with test to show underlying ElementTree behavior.
t = ElementTree.fromstring(u'<del\u03b4ta>test</del\u03b4ta>'.encode('utf-8'))
self.assertEqual(t.tag, u'del\u03b4ta')
self.assertEqual(parse(u'<\u03b4elta>test</\u03b4elta>')._qname,
u'\u03b4elta')
# Test unicode attribute names and values.
t = ElementTree.fromstring(u'<x \u03b4a="\u03b4b" />'.encode('utf-8'))
self.assertEqual(t.attrib, {u'\u03b4a': u'\u03b4b'})
self.assertEqual(parse(u'<x \u03b4a="\u03b4b" />').get_attributes(
u'\u03b4a')[0].value, u'\u03b4b')
x = create('x', None)
x._other_attributes[u'a'] = u'\u03b4elta'
self.assert_(x.to_string().startswith('<x a="δelta"'))
def testUtf8InputString(self):
# Test parsing inner text.
self.assertEqual(parse('<x>δ</x>').text, u'\u03b4')
self.assertEqual(parse(u'<x>\u03b4</x>'.encode('utf-8')).text, u'\u03b4')
self.assertEqual(parse('<x>\xce\xb4</x>').text, u'\u03b4')
# Test output valid XML.
self.assertEqual(parse('<x>δ</x>').to_string(), '<x>δ</x>')
self.assertEqual(parse(u'<x>\u03b4</x>'.encode('utf-8')).to_string(),
'<x>δ</x>')
self.assertEqual(parse('<x>\xce\xb4</x>').to_string(), '<x>δ</x>')
# Test setting the inner text and output valid XML.
e = create('x', '\xce\xb4')
self.assertEqual(e.to_string(), '<x>δ</x>')
# Don't change the encoding until the we convert to an XML string.
self.assertEqual(e.text, '\xce\xb4')
self.assert_(isinstance(e.text, str))
self.assert_(isinstance(e.to_string(), str))
self.assertEqual(create('x', u'\u03b4'.encode('utf-8')).to_string(),
'<x>δ</x>')
# Test attributes and values with UTF-8 inputs.
self.assertEqual(parse('<x \xce\xb4a="\xce\xb4b" />').get_attributes(
u'\u03b4a')[0].value, u'\u03b4b')
def testUtf8TagsAndAttributes(self):
self.assertEqual(
parse(u'<\u03b4elta>test</\u03b4elta>'.encode('utf-8'))._qname,
u'\u03b4elta')
self.assertEqual(parse('<\xce\xb4elta>test</\xce\xb4elta>')._qname,
u'\u03b4elta')
# Test an element with UTF-8 in the attribute value.
x = create('x', None)
x._other_attributes[u'a'] = '\xce\xb4'
self.assert_(x.to_string(encoding='UTF-8').startswith('<x a="δ"'))
self.assert_(x.to_string().startswith('<x a="δ"'))
def testOtherEncodingOnInputString(self):
BIG_ENDIAN = 0
LITTLE_ENDIAN = 1
# Test parsing inner text.
self.assertEqual(parse(u'<x>\u03b4</x>'.encode('utf-16')).text, u'\u03b4')
# Test output valid XML.
self.assertEqual(parse(u'<x>\u03b4</x>'.encode('utf-16')).to_string(),
'<x>δ</x>')
# Test setting the inner text and output valid XML.
e = create('x', u'\u03b4'.encode('utf-16'))
self.assertEqual(e.to_string(encoding='utf-16'), '<x>δ</x>')
# Don't change the encoding until the we convert to an XML string.
# Allow either little-endian or big-endian byte orderings.
self.assert_(e.text in ['\xff\xfe\xb4\x03', '\xfe\xff\x03\xb4'])
endianness = LITTLE_ENDIAN
if e.text == '\xfe\xff\x03\xb4':
endianness = BIG_ENDIAN
self.assert_(isinstance(e.text, str))
self.assert_(isinstance(e.to_string(encoding='utf-16'), str))
if endianness == LITTLE_ENDIAN:
self.assertEqual(
create('x', '\xff\xfe\xb4\x03').to_string(encoding='utf-16'),
'<x>δ</x>')
else:
self.assertEqual(
create('x', '\xfe\xff\x03\xb4').to_string(encoding='utf-16'),
'<x>δ</x>')
def testOtherEncodingInTagsAndAttributes(self):
self.assertEqual(
parse(u'<\u03b4elta>test</\u03b4elta>'.encode('utf-16'))._qname,
u'\u03b4elta')
# Test an element with UTF-16 in the attribute value.
x = create('x', None)
x._other_attributes[u'a'] = u'\u03b4'.encode('utf-16')
self.assert_(x.to_string(encoding='UTF-16').startswith('<x a="δ"'))
def suite():
return conf.build_suite([XmlElementTest, UtilityFunctionTest,
CharacterEncodingTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# This test may make an actual HTTP request.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import StringIO
import os.path
import atom.mock_http_core
import atom.http_core
class EchoClientTest(unittest.TestCase):
def test_echo_response(self):
client = atom.mock_http_core.EchoHttpClient()
# Send a bare-bones POST request.
request = atom.http_core.HttpRequest(method='POST',
uri=atom.http_core.Uri(host='www.jeffscudder.com', path='/'))
request.add_body_part('hello world!', 'text/plain')
response = client.request(request)
self.assert_(response.getheader('Echo-Host') == 'www.jeffscudder.com:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Scheme') is None)
self.assert_(response.getheader('Echo-Method') == 'POST')
self.assert_(response.getheader('Content-Length') == str(len(
'hello world!')))
self.assert_(response.getheader('Content-Type') == 'text/plain')
self.assert_(response.read() == 'hello world!')
# Test a path of None should default to /
request = atom.http_core.HttpRequest(method='POST',
uri=atom.http_core.Uri(host='www.jeffscudder.com', path=None))
response = client.request(request)
self.assert_(response.getheader('Echo-Host') == 'www.jeffscudder.com:None')
self.assert_(response.getheader('Echo-Method') == 'POST')
self.assert_(response.getheader('Echo-Uri') == '/')
# Send a multipart request.
request = atom.http_core.HttpRequest(method='POST',
uri=atom.http_core.Uri(scheme='https', host='www.jeffscudder.com',
port=8080, path='/multipart',
query={'test': 'true', 'happy': 'yes'}),
headers={'Authorization':'Test xyzzy', 'Testing':'True'})
request.add_body_part('start', 'text/plain')
request.add_body_part(StringIO.StringIO('<html><body>hi</body></html>'),
'text/html', len('<html><body>hi</body></html>'))
request.add_body_part('alert("Greetings!")', 'text/javascript')
response = client.request(request)
self.assert_(response.getheader('Echo-Host') == 'www.jeffscudder.com:8080')
self.assert_(
response.getheader('Echo-Uri') == '/multipart?test=true&happy=yes')
self.assert_(response.getheader('Echo-Scheme') == 'https')
self.assert_(response.getheader('Echo-Method') == 'POST')
self.assert_(response.getheader('Content-Type') == (
'multipart/related; boundary="%s"' % (atom.http_core.MIME_BOUNDARY,)))
expected_body = ('Media multipart posting'
'\r\n--%s\r\n'
'Content-Type: text/plain\r\n\r\n'
'start'
'\r\n--%s\r\n'
'Content-Type: text/html\r\n\r\n'
'<html><body>hi</body></html>'
'\r\n--%s\r\n'
'Content-Type: text/javascript\r\n\r\n'
'alert("Greetings!")'
'\r\n--%s--') % (atom.http_core.MIME_BOUNDARY,
atom.http_core.MIME_BOUNDARY, atom.http_core.MIME_BOUNDARY,
atom.http_core.MIME_BOUNDARY,)
self.assert_(response.read() == expected_body)
self.assert_(response.getheader('Content-Length') == str(
len(expected_body)))
class MockHttpClientTest(unittest.TestCase):
def setUp(self):
self.client = atom.mock_http_core.MockHttpClient()
def test_respond_with_recording(self):
request = atom.http_core.HttpRequest(method='GET')
atom.http_core.parse_uri('http://www.google.com/').modify_request(request)
self.client.add_response(request, 200, 'OK', body='Testing')
response = self.client.request(request)
self.assert_(response.status == 200)
self.assert_(response.reason == 'OK')
self.assert_(response.read() == 'Testing')
def test_save_and_load_recordings(self):
request = atom.http_core.HttpRequest(method='GET')
atom.http_core.parse_uri('http://www.google.com/').modify_request(request)
self.client.add_response(request, 200, 'OK', body='Testing')
response = self.client.request(request)
self.client._save_recordings('test_save_and_load_recordings')
self.client._recordings = []
try:
response = self.client.request(request)
self.fail('There should be no recording for this request.')
except atom.mock_http_core.NoRecordingFound:
pass
self.client._load_recordings('test_save_and_load_recordings')
response = self.client.request(request)
self.assert_(response.status == 200)
self.assert_(response.reason == 'OK')
self.assert_(response.read() == 'Testing')
def test_use_recordings(self):
request = atom.http_core.HttpRequest(method='GET')
atom.http_core.parse_uri('http://www.google.com/').modify_request(request)
self.client._load_or_use_client('test_use_recordings',
atom.http_core.HttpClient())
response = self.client.request(request)
if self.client.real_client:
self.client._save_recordings('test_use_recordings')
self.assert_(response.status in (200, 302))
self.assert_(response.reason in ('OK', 'Found'))
self.assert_(response.getheader('server') == 'gws')
body = response.read()
self.assert_(body.startswith('<!doctype html>') or
body.startswith('<HTML>'))
def test_match_request(self):
x = atom.http_core.HttpRequest('http://example.com/', 'GET')
y = atom.http_core.HttpRequest('http://example.com/', 'GET')
self.assert_(atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/', 'POST')
self.assert_(not atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/1', 'GET')
self.assert_(not atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/?gsessionid=1', 'GET')
self.assert_(not atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/?start_index=1', 'GET')
self.assert_(atom.mock_http_core._match_request(x, y))
x = atom.http_core.HttpRequest('http://example.com/?gsessionid=1', 'GET')
y = atom.http_core.HttpRequest('http://example.com/?gsessionid=1', 'GET')
self.assert_(atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/?gsessionid=2', 'GET')
self.assert_(not atom.mock_http_core._match_request(x, y))
y = atom.http_core.HttpRequest('http://example.com/', 'GET')
self.assert_(not atom.mock_http_core._match_request(x, y))
def test_use_named_sessions(self):
self.client._delete_recordings('mock_http_test.test_use_named_sessions')
self.client.use_cached_session('mock_http_test.test_use_named_sessions',
atom.mock_http_core.EchoHttpClient())
request = atom.http_core.HttpRequest('http://example.com', 'GET')
response = self.client.request(request)
self.assertEqual(response.getheader('Echo-Method'), 'GET')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:None')
# We will insert a Cache-Marker header to indicate that this is a
# recorded response, but initially it should not be present.
self.assertEqual(response.getheader('Cache-Marker'), None)
# Modify the recorded response to allow us to identify a cached result
# from an echoed result. We need to be able to check to see if this
# came from a recording.
self.assert_('Cache-Marker' not in self.client._recordings[0][1]._headers)
self.client._recordings[0][1]._headers['Cache-Marker'] = '1'
self.assert_('Cache-Marker' in self.client._recordings[0][1]._headers)
# Save the recorded responses.
self.client.close_session()
# Create a new client, and have it use the recorded session.
client = atom.mock_http_core.MockHttpClient()
client.use_cached_session('mock_http_test.test_use_named_sessions',
atom.mock_http_core.EchoHttpClient())
# Make the same request, which should use the recorded result.
response = client.request(request)
self.assertEqual(response.getheader('Echo-Method'), 'GET')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:None')
# We should now see the cache marker since the response is replayed.
self.assertEqual(response.getheader('Cache-Marker'), '1')
def suite():
return unittest.TestSuite((unittest.makeSuite(MockHttpClientTest, 'test'),
unittest.makeSuite(EchoClientTest, 'test')))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.auth
import atom.http_core
class BasicAuthTest(unittest.TestCase):
def test_modify_request(self):
http_request = atom.http_core.HttpRequest()
credentials = atom.auth.BasicAuth('Aladdin', 'open sesame')
self.assert_(credentials.basic_cookie == 'QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
credentials.modify_request(http_request)
self.assert_(http_request.headers[
'Authorization'] == 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
def suite():
return unittest.TestSuite((unittest.makeSuite(BasicAuthTest,'test'),))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.token_store
import atom.http_interface
import atom.service
import atom.url
class TokenStoreTest(unittest.TestCase):
def setUp(self):
self.token = atom.service.BasicAuthToken('aaa1', scopes=[
'http://example.com/', 'http://example.org'])
self.tokens = atom.token_store.TokenStore()
self.tokens.add_token(self.token)
def testAddAndFindTokens(self):
self.assert_(self.tokens.find_token('http://example.com/') == self.token)
self.assert_(self.tokens.find_token('http://example.org/') == self.token)
self.assert_(self.tokens.find_token('http://example.org/foo?ok=1') == (
self.token))
self.assert_(isinstance(self.tokens.find_token('http://example.net/'),
atom.http_interface.GenericToken))
self.assert_(isinstance(self.tokens.find_token('example.com/'),
atom.http_interface.GenericToken))
def testFindTokenUsingMultipleUrls(self):
self.assert_(self.tokens.find_token(
'http://example.com/') == self.token)
self.assert_(self.tokens.find_token(
'http://example.org/bar') == self.token)
self.assert_(isinstance(self.tokens.find_token(''),
atom.http_interface.GenericToken))
self.assert_(isinstance(self.tokens.find_token(
'http://example.net/'),
atom.http_interface.GenericToken))
def testFindTokenWithPartialScopes(self):
token = atom.service.BasicAuthToken('aaa1',
scopes=[atom.url.Url(host='www.example.com', path='/foo'),
atom.url.Url(host='www.example.net')])
token_store = atom.token_store.TokenStore()
token_store.add_token(token)
self.assert_(token_store.find_token(
'http://www.example.com/foobar') == token)
self.assert_(token_store.find_token(
'https://www.example.com:443/foobar') == token)
self.assert_(token_store.find_token(
'http://www.example.net/xyz') == token)
self.assert_(token_store.find_token('http://www.example.org/') != token)
self.assert_(isinstance(token_store.find_token('http://example.org/'),
atom.http_interface.GenericToken))
def suite():
return unittest.TestSuite((unittest.makeSuite(TokenStoreTest,'test'),))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import unittest
import gdata.service
import atom.mock_service
gdata.service.http_request_handler = atom.mock_service
class MockRequestTest(unittest.TestCase):
def setUp(self):
self.request_thumbprint = atom.mock_service.MockRequest('GET',
'http://www.google.com',
extra_headers={'Header1':'a', 'Header2':'b'})
def testIsMatch(self):
matching_request = atom.mock_service.MockRequest('GET',
'http://www.google.com', extra_headers={'Header1':'a',
'Header2':'b', 'Header3':'c'})
bad_url = atom.mock_service.MockRequest('GET', 'http://example.com',
extra_headers={'Header1':'a', 'Header2':'b', 'Header3':'c'})
# Should match because we don't check headers at the moment.
bad_header = atom.mock_service.MockRequest('GET',
'http://www.google.com', extra_headers={'Header1':'a',
'Header2':'1', 'Header3':'c'})
bad_verb = atom.mock_service.MockRequest('POST', 'http://www.google.com',
data='post data', extra_headers={'Header1':'a', 'Header2':'b'})
self.assertEquals(self.request_thumbprint.IsMatch(matching_request), True)
self.assertEquals(self.request_thumbprint.IsMatch(bad_url), False)
self.assertEquals(self.request_thumbprint.IsMatch(bad_header), True)
self.assertEquals(self.request_thumbprint.IsMatch(bad_verb), False)
class HttpRequestTest(unittest.TestCase):
def setUp(self):
atom.mock_service.recordings = []
self.client = gdata.service.GDataService()
def testSimpleRecordedGet(self):
recorded_request = atom.mock_service.MockRequest('GET', 'http://example.com/')
recorded_response = atom.mock_service.MockHttpResponse('Got it', 200,
'OK')
# Add a tuple mapping the mock request to the mock response
atom.mock_service.recordings.append((recorded_request, recorded_response))
# Try a couple of GET requests which should match the recorded request.
response = self.client.Get('http://example.com/', converter=str)
self.assertEquals(response, 'Got it')
self.client.server = 'example.com'
raw_response = self.client.handler.HttpRequest(self.client, 'GET', None,
'/')
self.assertEquals(raw_response.read(), 'Got it')
self.assertEquals(raw_response.status, 200)
self.assertEquals(raw_response.reason, 'OK')
class RecordRealHttpRequestsTest(unittest.TestCase):
def testRecordAndReuseResponse(self):
client = gdata.service.GDataService()
client.server = 'www.google.com'
atom.mock_service.recordings = []
atom.mock_service.real_request_handler = atom.service
# Record a response
real_response = atom.mock_service.HttpRequest(client, 'GET', None, 'http://www.google.com/')
# Enter 'replay' mode
atom.mock_service.real_request_handler = None
mock_response = atom.mock_service.HttpRequest(client, 'GET', None, 'http://www.google.com/')
self.assertEquals(real_response.reason, mock_response.reason)
self.assertEquals(real_response.status, mock_response.status)
self.assertEquals(real_response.read(), mock_response.read())
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.mock_http
import atom.http
class MockHttpClientUnitTest(unittest.TestCase):
def setUp(self):
self.client = atom.mock_http.MockHttpClient()
def testRepondToGet(self):
mock_response = atom.http_interface.HttpResponse(body='Hooray!',
status=200, reason='OK')
self.client.add_response(mock_response, 'GET',
'http://example.com/hooray')
response = self.client.request('GET', 'http://example.com/hooray')
self.assertEquals(len(self.client.recordings), 1)
self.assertEquals(response.status, 200)
self.assertEquals(response.read(), 'Hooray!')
def testRecordResponse(self):
# Turn on pass-through record mode.
self.client.real_client = atom.http.ProxiedHttpClient()
live_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
live_response_body = live_response.read()
self.assertEquals(live_response.status, 200)
self.assertEquals(live_response_body.startswith('<?xml'), True)
# Requery for the now canned data.
self.client.real_client = None
canned_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
# The canned response should be the stored response.
canned_response_body = canned_response.read()
self.assertEquals(canned_response.status, 200)
self.assertEquals(canned_response_body, live_response_body)
def testUnrecordedRequest(self):
try:
self.client.request('POST', 'http://example.org')
self.fail()
except atom.mock_http.NoRecordingFound:
pass
def suite():
return unittest.TestSuite(
(unittest.makeSuite(MockHttpClientUnitTest,'test'),))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import unittest
import atom.http_interface
import StringIO
class HttpResponseTest(unittest.TestCase):
def testConstructorWithStrings(self):
resp = atom.http_interface.HttpResponse(body='Hi there!', status=200,
reason='OK', headers={'Content-Length':'9'})
self.assertEqual(resp.read(amt=1), 'H')
self.assertEqual(resp.read(amt=2), 'i ')
self.assertEqual(resp.read(), 'there!')
self.assertEqual(resp.read(), '')
self.assertEqual(resp.reason, 'OK')
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('Content-Length'), '9')
self.assert_(resp.getheader('Missing') is None)
self.assertEqual(resp.getheader('Missing', default='yes'), 'yes')
def suite():
return unittest.TestSuite((unittest.makeSuite(HttpResponseTest,'test'),))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# This test may make an actual HTTP request.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.http_core
import atom.auth
import atom.client
import atom.mock_http_core
class AtomPubClientEchoTest(unittest.TestCase):
def test_simple_request_with_no_client_defaults(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
self.assert_(client.host is None)
self.assert_(client.auth_token is None)
# Make several equivalent requests.
responses = [client.request('GET', 'http://example.org/'),
client.request(http_request=atom.http_core.HttpRequest(
uri=atom.http_core.Uri('http', 'example.org', path='/'),
method='GET')),
client.request('GET',
http_request=atom.http_core.HttpRequest(
uri=atom.http_core.Uri('http', 'example.org',
path='/')))]
for response in responses:
self.assert_(response.getheader('Echo-Host') == 'example.org:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Scheme') == 'http')
self.assert_(response.getheader('Echo-Method') == 'GET')
self.assert_(response.getheader('User-Agent').startswith('gdata-py/'))
def test_auth_request_with_no_client_defaults(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
token = atom.auth.BasicAuth('Jeff', '123')
response = client.request('POST', 'https://example.net:8080/',
auth_token=token)
self.assert_(response.getheader('Echo-Host') == 'example.net:8080')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Scheme') == 'https')
self.assert_(response.getheader('Authorization') == 'Basic SmVmZjoxMjM=')
self.assert_(response.getheader('Echo-Method') == 'POST')
def test_request_with_client_defaults(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient(),
'example.com', atom.auth.BasicAuth('Jeff', '123'))
self.assert_(client.host == 'example.com')
self.assert_(client.auth_token is not None)
self.assert_(client.auth_token.basic_cookie == 'SmVmZjoxMjM=')
response = client.request('GET', 'http://example.org/')
self.assert_(response.getheader('Echo-Host') == 'example.org:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Scheme') == 'http')
self.assert_(response.getheader('Echo-Method') == 'GET')
self.assert_(response.getheader('Authorization') == 'Basic SmVmZjoxMjM=')
response = client.request('GET', '/')
self.assert_(response.getheader('Echo-Host') == 'example.com:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Scheme') == 'http')
self.assert_(response.getheader('Authorization') == 'Basic SmVmZjoxMjM=')
response = client.request('GET', '/',
http_request=atom.http_core.HttpRequest(
uri=atom.http_core.Uri(port=99)))
self.assert_(response.getheader('Echo-Host') == 'example.com:99')
self.assert_(response.getheader('Echo-Uri') == '/')
def test_get(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
response = client.get('http://example.com/simple')
self.assert_(response.getheader('Echo-Host') == 'example.com:None')
self.assert_(response.getheader('Echo-Uri') == '/simple')
self.assert_(response.getheader('Echo-Method') == 'GET')
response = client.Get(uri='http://example.com/simple2')
self.assert_(response.getheader('Echo-Uri') == '/simple2')
self.assert_(response.getheader('Echo-Method') == 'GET')
def test_modify_request_using_args(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
class RequestModifier(object):
def modify_request(self, http_request):
http_request.headers['Special'] = 'Set'
response = client.get('http://example.com/modified',
extra=RequestModifier())
self.assert_(response.getheader('Echo-Host') == 'example.com:None')
self.assert_(response.getheader('Echo-Uri') == '/modified')
self.assert_(response.getheader('Echo-Method') == 'GET')
self.assert_(response.getheader('Special') == 'Set')
def test_post(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
class TestData(object):
def modify_request(self, http_request):
http_request.add_body_part('test body', 'text/testdata')
response = client.Post(uri='http://example.com/', data=TestData())
self.assert_(response.getheader('Echo-Host') == 'example.com:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Method') == 'POST')
self.assert_(response.getheader('Content-Length') == str(len('test body')))
self.assert_(response.getheader('Content-Type') == 'text/testdata')
self.assert_(response.read(2) == 'te')
self.assert_(response.read() == 'st body')
response = client.post(data=TestData(), uri='http://example.com/')
self.assert_(response.read() == 'test body')
self.assert_(response.getheader('Content-Type') == 'text/testdata')
# Don't pass in a body, but use an extra kwarg to add the body to the
# http_request.
response = client.post(x=TestData(), uri='http://example.com/')
self.assert_(response.read() == 'test body')
def test_put(self):
body_text = '<put>test</put>'
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient())
class TestData(object):
def modify_request(self, http_request):
http_request.add_body_part(body_text, 'application/xml')
response = client.put('http://example.org', TestData())
self.assert_(response.getheader('Echo-Host') == 'example.org:None')
self.assert_(response.getheader('Echo-Uri') == '/')
self.assert_(response.getheader('Echo-Method') == 'PUT')
self.assert_(response.getheader('Content-Length') == str(len(body_text)))
self.assert_(response.getheader('Content-Type') == 'application/xml')
response = client.put(uri='http://example.org', data=TestData())
self.assert_(response.getheader('Content-Length') == str(len(body_text)))
self.assert_(response.getheader('Content-Type') == 'application/xml')
def test_delete(self):
client = atom.client.AtomPubClient(atom.mock_http_core.EchoHttpClient(),
source='my new app')
response = client.Delete('http://example.com/simple')
self.assertEqual(response.getheader('Echo-Host'), 'example.com:None')
self.assertEqual(response.getheader('Echo-Uri'), '/simple')
self.assertEqual(response.getheader('Echo-Method'), 'DELETE')
response = client.delete(uri='http://example.com/d')
self.assertEqual(response.getheader('Echo-Uri'), '/d')
self.assertEqual(response.getheader('Echo-Method'), 'DELETE')
self.assert_(
response.getheader('User-Agent').startswith('my new app gdata-py/'))
def suite():
return unittest.TestSuite((unittest.makeSuite(AtomPubClientEchoTest, 'test'),
))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder@gmail.com (Jeff Scudder)'
import sys
import unittest
import getopt
import getpass
import module_test_runner
import run_data_tests
import run_service_tests
if __name__ == '__main__':
run_data_tests.RunAllTests()
run_service_tests.GetValuesForTestSettingsAndRunAllTests()
| Python |
#!/usr/bin/python
import sys
import unittest
import module_test_runner
import getopt
import getpass
# Modules whose tests we will run.
import atom_tests.service_test
import gdata_tests.service_test
import gdata_tests.apps.service_test
import gdata_tests.books.service_test
import gdata_tests.calendar.service_test
import gdata_tests.docs.service_test
import gdata_tests.health.service_test
import gdata_tests.spreadsheet.service_test
import gdata_tests.spreadsheet.text_db_test
import gdata_tests.photos.service_test
import gdata_tests.contacts.service_test
import gdata_tests.blogger.service_test
import gdata_tests.youtube.service_test
import gdata_tests.health.service_test
import gdata_tests.contacts.profiles.service_test
def RunAllTests(username, password, spreadsheet_key, worksheet_key,
apps_username, apps_password, apps_domain):
test_runner = module_test_runner.ModuleTestRunner()
test_runner.modules = [atom_tests.service_test,
gdata_tests.service_test,
gdata_tests.apps.service_test,
gdata_tests.base.service_test,
gdata_tests.books.service_test,
gdata_tests.calendar.service_test,
gdata_tests.docs.service_test,
gdata_tests.health.service_test,
gdata_tests.spreadsheet.service_test,
gdata_tests.spreadsheet.text_db_test,
gdata_tests.contacts.service_test,
gdata_tests.youtube.service_test,
gdata_tests.photos.service_test,
gdata_tests.contacts.profiles.service_test,]
test_runner.settings = {'username':username, 'password':password,
'test_image_location':'testimage.jpg',
'ss_key':spreadsheet_key,
'ws_key':worksheet_key,
'apps_username':apps_username,
'apps_password':apps_password,
'apps_domain':apps_domain}
test_runner.RunAllTests()
def GetValuesForTestSettingsAndRunAllTests():
username = ''
password = ''
spreadsheet_key = ''
worksheet_key = ''
apps_domain = ''
apps_username = ''
apps_password = ''
print ('NOTE: Please run these tests only with a test account. '
'The tests may delete or update your data.')
try:
opts, args = getopt.getopt(sys.argv[1:], '', ['username=', 'password=',
'ss_key=', 'ws_key=',
'apps_username=',
'apps_password=',
'apps_domain='])
for o, a in opts:
if o == '--username':
username = a
elif o == '--password':
password = a
elif o == '--ss_key':
spreadsheet_key = a
elif o == '--ws_key':
worksheet_key = a
elif o == '--apps_username':
apps_username = a
elif o == '--apps_password':
apps_password = a
elif o == '--apps_domain':
apps_domain = a
except getopt.GetoptError:
pass
if username == '' and password == '':
print ('Missing --user and --pw command line arguments, '
'prompting for credentials.')
if username == '':
username = raw_input('Please enter your username: ')
if password == '':
password = getpass.getpass()
if spreadsheet_key == '':
spreadsheet_key = raw_input(
'Please enter the key for the test spreadsheet: ')
if worksheet_key == '':
worksheet_key = raw_input(
'Please enter the id for the worksheet to be edited: ')
if apps_username == '':
apps_username = raw_input('Please enter your Google Apps admin username: ')
if apps_password == '':
apps_password = getpass.getpass()
if apps_domain == '':
apps_domain = raw_input('Please enter your Google Apps domain: ')
RunAllTests(username, password, spreadsheet_key, worksheet_key,
apps_username, apps_password, apps_domain)
if __name__ == '__main__':
GetValuesForTestSettingsAndRunAllTests()
| Python |
#!/usr/bin/python
# -*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import sys
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
from gdata import test_data
import gdata.test_config as conf
class AuthorTest(unittest.TestCase):
def setUp(self):
self.author = atom.Author()
def testEmptyAuthorShouldHaveEmptyExtensionsList(self):
self.assert_(isinstance(self.author.extension_elements, list))
self.assert_(len(self.author.extension_elements) == 0)
def testNormalAuthorShouldHaveNoExtensionElements(self):
self.author.name = atom.Name(text='Jeff Scudder')
self.assert_(self.author.name.text == 'Jeff Scudder')
self.assert_(len(self.author.extension_elements) == 0)
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(len(self.author.extension_elements) == 0)
self.author.extension_elements.append(atom.ExtensionElement(
'foo', text='bar'))
self.assert_(len(self.author.extension_elements) == 1)
self.assert_(self.author.name.text == 'Jeff Scudder')
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(len(self.author.extension_elements) == 1)
self.assert_(new_author.name.text == 'Jeff Scudder')
def testEmptyAuthorToAndFromStringShouldMatch(self):
string_from_author = self.author.ToString()
new_author = atom.AuthorFromString(string_from_author)
string_from_new_author = new_author.ToString()
self.assert_(string_from_author == string_from_new_author)
def testAuthorWithNameToAndFromStringShouldMatch(self):
self.author.name = atom.Name()
self.author.name.text = 'Jeff Scudder'
string_from_author = self.author.ToString()
new_author = atom.AuthorFromString(string_from_author)
string_from_new_author = new_author.ToString()
self.assert_(string_from_author == string_from_new_author)
self.assert_(self.author.name.text == new_author.name.text)
def testExtensionElements(self):
self.author.extension_attributes['foo1'] = 'bar'
self.author.extension_attributes['foo2'] = 'rab'
self.assert_(self.author.extension_attributes['foo1'] == 'bar')
self.assert_(self.author.extension_attributes['foo2'] == 'rab')
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(new_author.extension_attributes['foo1'] == 'bar')
self.assert_(new_author.extension_attributes['foo2'] == 'rab')
def testConvertFullAuthorToAndFromString(self):
author = atom.AuthorFromString(test_data.TEST_AUTHOR)
self.assert_(author.name.text == 'John Doe')
self.assert_(author.email.text == 'johndoes@someemailadress.com')
self.assert_(author.uri.text == 'http://www.google.com')
class EmailTest(unittest.TestCase):
def setUp(self):
self.email = atom.Email()
def testEmailToAndFromString(self):
self.email.text = 'This is a test'
new_email = atom.EmailFromString(self.email.ToString())
self.assert_(self.email.text == new_email.text)
self.assert_(self.email.extension_elements ==
new_email.extension_elements)
class NameTest(unittest.TestCase):
def setUp(self):
self.name = atom.Name()
def testEmptyNameToAndFromStringShouldMatch(self):
string_from_name = self.name.ToString()
new_name = atom.NameFromString(string_from_name)
string_from_new_name = new_name.ToString()
self.assert_(string_from_name == string_from_new_name)
def testText(self):
self.assert_(self.name.text is None)
self.name.text = 'Jeff Scudder'
self.assert_(self.name.text == 'Jeff Scudder')
new_name = atom.NameFromString(self.name.ToString())
self.assert_(new_name.text == self.name.text)
def testExtensionElements(self):
self.name.extension_attributes['foo'] = 'bar'
self.assert_(self.name.extension_attributes['foo'] == 'bar')
new_name = atom.NameFromString(self.name.ToString())
self.assert_(new_name.extension_attributes['foo'] == 'bar')
class ExtensionElementTest(unittest.TestCase):
def setUp(self):
self.ee = atom.ExtensionElement('foo')
def testEmptyEEShouldProduceEmptyString(self):
pass
def testEEParsesTreeCorrectly(self):
deep_tree = atom.ExtensionElementFromString(test_data.EXTENSION_TREE)
self.assert_(deep_tree.tag == 'feed')
self.assert_(deep_tree.namespace == 'http://www.w3.org/2005/Atom')
self.assert_(deep_tree.children[0].tag == 'author')
self.assert_(deep_tree.children[0].namespace == 'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].tag == 'name')
self.assert_(deep_tree.children[0].children[0].namespace ==
'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].text.strip() == 'John Doe')
self.assert_(deep_tree.children[0].children[0].children[0].text.strip() ==
'Bar')
foo = deep_tree.children[0].children[0].children[0]
self.assert_(foo.tag == 'foo')
self.assert_(foo.namespace == 'http://www.google.com')
self.assert_(foo.attributes['up'] == 'down')
self.assert_(foo.attributes['yes'] == 'no')
self.assert_(foo.children == [])
def testEEToAndFromStringShouldMatch(self):
string_from_ee = self.ee.ToString()
new_ee = atom.ExtensionElementFromString(string_from_ee)
string_from_new_ee = new_ee.ToString()
self.assert_(string_from_ee == string_from_new_ee)
deep_tree = atom.ExtensionElementFromString(test_data.EXTENSION_TREE)
string_from_deep_tree = deep_tree.ToString()
new_deep_tree = atom.ExtensionElementFromString(string_from_deep_tree)
string_from_new_deep_tree = new_deep_tree.ToString()
self.assert_(string_from_deep_tree == string_from_new_deep_tree)
class LinkTest(unittest.TestCase):
def setUp(self):
self.link = atom.Link()
def testLinkToAndFromString(self):
self.link.href = 'test href'
self.link.hreflang = 'english'
self.link.type = 'text/html'
self.link.extension_attributes['foo'] = 'bar'
self.assert_(self.link.href == 'test href')
self.assert_(self.link.hreflang == 'english')
self.assert_(self.link.type == 'text/html')
self.assert_(self.link.extension_attributes['foo'] == 'bar')
new_link = atom.LinkFromString(self.link.ToString())
self.assert_(self.link.href == new_link.href)
self.assert_(self.link.type == new_link.type)
self.assert_(self.link.hreflang == new_link.hreflang)
self.assert_(self.link.extension_attributes['foo'] ==
new_link.extension_attributes['foo'])
def testLinkType(self):
test_link = atom.Link(link_type='text/html')
self.assert_(test_link.type == 'text/html')
class GeneratorTest(unittest.TestCase):
def setUp(self):
self.generator = atom.Generator()
def testGeneratorToAndFromString(self):
self.generator.uri = 'www.google.com'
self.generator.version = '1.0'
self.generator.extension_attributes['foo'] = 'bar'
self.assert_(self.generator.uri == 'www.google.com')
self.assert_(self.generator.version == '1.0')
self.assert_(self.generator.extension_attributes['foo'] == 'bar')
new_generator = atom.GeneratorFromString(self.generator.ToString())
self.assert_(self.generator.uri == new_generator.uri)
self.assert_(self.generator.version == new_generator.version)
self.assert_(self.generator.extension_attributes['foo'] ==
new_generator.extension_attributes['foo'])
class TitleTest(unittest.TestCase):
def setUp(self):
self.title = atom.Title()
def testTitleToAndFromString(self):
self.title.type = 'text'
self.title.text = 'Less: <'
self.assert_(self.title.type == 'text')
self.assert_(self.title.text == 'Less: <')
new_title = atom.TitleFromString(self.title.ToString())
self.assert_(self.title.type == new_title.type)
self.assert_(self.title.text == new_title.text)
class SubtitleTest(unittest.TestCase):
def setUp(self):
self.subtitle = atom.Subtitle()
def testTitleToAndFromString(self):
self.subtitle.type = 'text'
self.subtitle.text = 'sub & title'
self.assert_(self.subtitle.type == 'text')
self.assert_(self.subtitle.text == 'sub & title')
new_subtitle = atom.SubtitleFromString(self.subtitle.ToString())
self.assert_(self.subtitle.type == new_subtitle.type)
self.assert_(self.subtitle.text == new_subtitle.text)
class SummaryTest(unittest.TestCase):
def setUp(self):
self.summary = atom.Summary()
def testTitleToAndFromString(self):
self.summary.type = 'text'
self.summary.text = 'Less: <'
self.assert_(self.summary.type == 'text')
self.assert_(self.summary.text == 'Less: <')
new_summary = atom.SummaryFromString(self.summary.ToString())
self.assert_(self.summary.type == new_summary.type)
self.assert_(self.summary.text == new_summary.text)
class CategoryTest(unittest.TestCase):
def setUp(self):
self.category = atom.Category()
def testCategoryToAndFromString(self):
self.category.term = 'x'
self.category.scheme = 'y'
self.category.label = 'z'
self.assert_(self.category.term == 'x')
self.assert_(self.category.scheme == 'y')
self.assert_(self.category.label == 'z')
new_category = atom.CategoryFromString(self.category.ToString())
self.assert_(self.category.term == new_category.term)
self.assert_(self.category.scheme == new_category.scheme)
self.assert_(self.category.label == new_category.label)
class ContributorTest(unittest.TestCase):
def setUp(self):
self.contributor = atom.Contributor()
def testContributorToAndFromString(self):
self.contributor.name = atom.Name(text='J Scud')
self.contributor.email = atom.Email(text='nobody@nowhere')
self.contributor.uri = atom.Uri(text='http://www.google.com')
self.assert_(self.contributor.name.text == 'J Scud')
self.assert_(self.contributor.email.text == 'nobody@nowhere')
self.assert_(self.contributor.uri.text == 'http://www.google.com')
new_contributor = atom.ContributorFromString(self.contributor.ToString())
self.assert_(self.contributor.name.text == new_contributor.name.text)
self.assert_(self.contributor.email.text == new_contributor.email.text)
self.assert_(self.contributor.uri.text == new_contributor.uri.text)
class IdTest(unittest.TestCase):
def setUp(self):
self.my_id = atom.Id()
def testIdToAndFromString(self):
self.my_id.text = 'my nifty id'
self.assert_(self.my_id.text == 'my nifty id')
new_id = atom.IdFromString(self.my_id.ToString())
self.assert_(self.my_id.text == new_id.text)
class IconTest(unittest.TestCase):
def setUp(self):
self.icon = atom.Icon()
def testIconToAndFromString(self):
self.icon.text = 'my picture'
self.assert_(self.icon.text == 'my picture')
new_icon = atom.IconFromString(str(self.icon))
self.assert_(self.icon.text == new_icon.text)
class LogoTest(unittest.TestCase):
def setUp(self):
self.logo = atom.Logo()
def testLogoToAndFromString(self):
self.logo.text = 'my logo'
self.assert_(self.logo.text == 'my logo')
new_logo = atom.LogoFromString(self.logo.ToString())
self.assert_(self.logo.text == new_logo.text)
class RightsTest(unittest.TestCase):
def setUp(self):
self.rights = atom.Rights()
def testContributorToAndFromString(self):
self.rights.text = 'you have the right to remain silent'
self.rights.type = 'text'
self.assert_(self.rights.text == 'you have the right to remain silent')
self.assert_(self.rights.type == 'text')
new_rights = atom.RightsFromString(self.rights.ToString())
self.assert_(self.rights.text == new_rights.text)
self.assert_(self.rights.type == new_rights.type)
class UpdatedTest(unittest.TestCase):
def setUp(self):
self.updated = atom.Updated()
def testUpdatedToAndFromString(self):
self.updated.text = 'my time'
self.assert_(self.updated.text == 'my time')
new_updated = atom.UpdatedFromString(self.updated.ToString())
self.assert_(self.updated.text == new_updated.text)
class PublishedTest(unittest.TestCase):
def setUp(self):
self.published = atom.Published()
def testPublishedToAndFromString(self):
self.published.text = 'pub time'
self.assert_(self.published.text == 'pub time')
new_published = atom.PublishedFromString(self.published.ToString())
self.assert_(self.published.text == new_published.text)
class FeedEntryParentTest(unittest.TestCase):
"""The test accesses hidden methods in atom.FeedEntryParent"""
def testConvertToAndFromElementTree(self):
# Use entry because FeedEntryParent doesn't have a tag or namespace.
original = atom.Entry()
copy = atom.FeedEntryParent()
original.author.append(atom.Author(name=atom.Name(text='J Scud')))
self.assert_(original.author[0].name.text == 'J Scud')
self.assert_(copy.author == [])
original.id = atom.Id(text='test id')
self.assert_(original.id.text == 'test id')
self.assert_(copy.id is None)
copy._HarvestElementTree(original._ToElementTree())
self.assert_(original.author[0].name.text == copy.author[0].name.text)
self.assert_(original.id.text == copy.id.text)
class EntryTest(unittest.TestCase):
def testConvertToAndFromString(self):
entry = atom.Entry()
entry.author.append(atom.Author(name=atom.Name(text='js')))
entry.title = atom.Title(text='my test entry')
self.assert_(entry.author[0].name.text == 'js')
self.assert_(entry.title.text == 'my test entry')
new_entry = atom.EntryFromString(entry.ToString())
self.assert_(new_entry.author[0].name.text == 'js')
self.assert_(new_entry.title.text == 'my test entry')
def testEntryCorrectlyConvertsActualData(self):
entry = atom.EntryFromString(test_data.XML_ENTRY_1)
self.assert_(entry.category[0].scheme ==
'http://base.google.com/categories/itemtypes')
self.assert_(entry.category[0].term == 'products')
self.assert_(entry.id.text == ' http://www.google.com/test/id/url ')
self.assert_(entry.title.text == 'Testing 2000 series laptop')
self.assert_(entry.title.type == 'text')
self.assert_(entry.content.type == 'xhtml')
#TODO check all other values for the test entry
def testAppControl(self):
entry = atom.EntryFromString(test_data.TEST_BASE_ENTRY)
self.assertEquals(entry.control.draft.text, 'yes')
self.assertEquals(len(entry.control.extension_elements), 1)
self.assertEquals(entry.control.extension_elements[0].tag, 'disapproved')
class ControlTest(unittest.TestCase):
def testConvertToAndFromString(self):
control = atom.Control()
control.text = 'some text'
control.draft = atom.Draft(text='yes')
self.assertEquals(control.draft.text, 'yes')
self.assertEquals(control.text, 'some text')
self.assertEquals(isinstance(control.draft, atom.Draft), True)
new_control = atom.ControlFromString(str(control))
self.assertEquals(control.draft.text, new_control.draft.text)
self.assertEquals(control.text, new_control.text)
self.assertEquals(isinstance(new_control.draft, atom.Draft), True)
class DraftTest(unittest.TestCase):
def testConvertToAndFromString(self):
draft = atom.Draft()
draft.text = 'maybe'
draft.extension_attributes['foo'] = 'bar'
self.assertEquals(draft.text, 'maybe')
self.assertEquals(draft.extension_attributes['foo'], 'bar')
new_draft = atom.DraftFromString(str(draft))
self.assertEquals(draft.text, new_draft.text)
self.assertEquals(draft.extension_attributes['foo'],
new_draft.extension_attributes['foo'])
class SourceTest(unittest.TestCase):
def testConvertToAndFromString(self):
source = atom.Source()
source.author.append(atom.Author(name=atom.Name(text='js')))
source.title = atom.Title(text='my test source')
source.generator = atom.Generator(text='gen')
self.assert_(source.author[0].name.text == 'js')
self.assert_(source.title.text == 'my test source')
self.assert_(source.generator.text == 'gen')
new_source = atom.SourceFromString(source.ToString())
self.assert_(new_source.author[0].name.text == 'js')
self.assert_(new_source.title.text == 'my test source')
self.assert_(new_source.generator.text == 'gen')
class FeedTest(unittest.TestCase):
def testConvertToAndFromString(self):
feed = atom.Feed()
feed.author.append(atom.Author(name=atom.Name(text='js')))
feed.title = atom.Title(text='my test source')
feed.generator = atom.Generator(text='gen')
feed.entry.append(atom.Entry(author=[atom.Author(name=atom.Name(
text='entry author'))]))
self.assert_(feed.author[0].name.text == 'js')
self.assert_(feed.title.text == 'my test source')
self.assert_(feed.generator.text == 'gen')
self.assert_(feed.entry[0].author[0].name.text == 'entry author')
new_feed = atom.FeedFromString(feed.ToString())
self.assert_(new_feed.author[0].name.text == 'js')
self.assert_(new_feed.title.text == 'my test source')
self.assert_(new_feed.generator.text == 'gen')
self.assert_(new_feed.entry[0].author[0].name.text == 'entry author')
def testPreserveEntryOrder(self):
test_xml = (
'<feed xmlns="http://www.w3.org/2005/Atom">'
'<entry><id>0</id></entry>'
'<entry><id>1</id></entry>'
'<title>Testing Order</title>'
'<entry><id>2</id></entry>'
'<entry><id>3</id></entry>'
'<entry><id>4</id></entry>'
'<entry><id>5</id></entry>'
'<entry><id>6</id></entry>'
'<entry><id>7</id></entry>'
'<author/>'
'<entry><id>8</id></entry>'
'<id>feed_id</id>'
'<entry><id>9</id></entry>'
'</feed>')
feed = atom.FeedFromString(test_xml)
for i in xrange(10):
self.assert_(feed.entry[i].id.text == str(i))
feed = atom.FeedFromString(feed.ToString())
for i in xrange(10):
self.assert_(feed.entry[i].id.text == str(i))
temp = feed.entry[3]
feed.entry[3] = feed.entry[4]
feed.entry[4] = temp
self.assert_(feed.entry[2].id.text == '2')
self.assert_(feed.entry[3].id.text == '4')
self.assert_(feed.entry[4].id.text == '3')
self.assert_(feed.entry[5].id.text == '5')
feed = atom.FeedFromString(feed.ToString())
self.assert_(feed.entry[2].id.text == '2')
self.assert_(feed.entry[3].id.text == '4')
self.assert_(feed.entry[4].id.text == '3')
self.assert_(feed.entry[5].id.text == '5')
class ContentEntryParentTest(unittest.TestCase):
"""The test accesses hidden methods in atom.FeedEntryParent"""
def setUp(self):
self.content = atom.Content()
def testConvertToAndFromElementTree(self):
self.content.text = 'my content'
self.content.type = 'text'
self.content.src = 'my source'
self.assert_(self.content.text == 'my content')
self.assert_(self.content.type == 'text')
self.assert_(self.content.src == 'my source')
new_content = atom.ContentFromString(self.content.ToString())
self.assert_(self.content.text == new_content.text)
self.assert_(self.content.type == new_content.type)
self.assert_(self.content.src == new_content.src)
def testContentConstructorSetsSrc(self):
new_content = atom.Content(src='abcd')
self.assertEquals(new_content.src, 'abcd')
class PreserveUnkownElementTest(unittest.TestCase):
"""Tests correct preservation of XML elements which are non Atom"""
def setUp(self):
self.feed = atom.FeedFromString(test_data.GBASE_ATTRIBUTE_FEED)
def testCaptureOpenSearchElements(self):
self.assertEquals(self.feed.FindExtensions('totalResults')[0].tag,
'totalResults')
self.assertEquals(self.feed.FindExtensions('totalResults')[0].namespace,
'http://a9.com/-/spec/opensearchrss/1.0/')
open_search_extensions = self.feed.FindExtensions(
namespace='http://a9.com/-/spec/opensearchrss/1.0/')
self.assertEquals(len(open_search_extensions), 3)
for element in open_search_extensions:
self.assertEquals(element.namespace,
'http://a9.com/-/spec/opensearchrss/1.0/')
def testCaptureMetaElements(self):
meta_elements = self.feed.entry[0].FindExtensions(
namespace='http://base.google.com/ns-metadata/1.0')
self.assertEquals(len(meta_elements), 1)
self.assertEquals(meta_elements[0].attributes['count'], '4416629')
self.assertEquals(len(meta_elements[0].children), 10)
def testCaptureMetaChildElements(self):
meta_elements = self.feed.entry[0].FindExtensions(
namespace='http://base.google.com/ns-metadata/1.0')
meta_children = meta_elements[0].FindChildren(
namespace='http://base.google.com/ns-metadata/1.0')
self.assertEquals(len(meta_children), 10)
for child in meta_children:
self.assertEquals(child.tag, 'value')
class LinkFinderTest(unittest.TestCase):
def setUp(self):
self.entry = atom.EntryFromString(test_data.XML_ENTRY_1)
def testLinkFinderGetsLicenseLink(self):
self.assertEquals(isinstance(self.entry.GetLicenseLink(), atom.Link),
True)
self.assertEquals(self.entry.GetLicenseLink().href,
'http://creativecommons.org/licenses/by-nc/2.5/rdf')
self.assertEquals(self.entry.GetLicenseLink().rel, 'license')
def testLinkFinderGetsAlternateLink(self):
self.assertEquals(isinstance(self.entry.GetAlternateLink(), atom.Link),
True)
self.assertEquals(self.entry.GetAlternateLink().href,
'http://www.provider-host.com/123456789')
self.assertEquals(self.entry.GetAlternateLink().rel, 'alternate')
class AtomBaseTest(unittest.TestCase):
def testAtomBaseConvertsExtensions(self):
# Using Id because it adds no additional members.
atom_base = atom.Id()
extension_child = atom.ExtensionElement('foo', namespace='http://ns0.com')
extension_grandchild = atom.ExtensionElement('bar',
namespace='http://ns0.com')
extension_child.children.append(extension_grandchild)
atom_base.extension_elements.append(extension_child)
self.assertEquals(len(atom_base.extension_elements), 1)
self.assertEquals(len(atom_base.extension_elements[0].children), 1)
self.assertEquals(atom_base.extension_elements[0].tag, 'foo')
self.assertEquals(atom_base.extension_elements[0].children[0].tag, 'bar')
element_tree = atom_base._ToElementTree()
self.assert_(element_tree.find('{http://ns0.com}foo') is not None)
self.assert_(element_tree.find('{http://ns0.com}foo').find(
'{http://ns0.com}bar') is not None)
class UtfParsingTest(unittest.TestCase):
def setUp(self):
self.test_xml = u"""<?xml version="1.0" encoding="utf-8"?>
<entry xmlns='http://www.w3.org/2005/Atom'>
<id>http://www.google.com/test/id/url</id>
<title type='\u03B1\u03BB\u03C6\u03B1'>\u03B1\u03BB\u03C6\u03B1</title>
</entry>"""
def testMemberStringEncoding(self):
atom_entry = atom.EntryFromString(self.test_xml)
#self.assertEqual(atom_entry.title.type.encode('utf-8'),
# u'\u03B1\u03BB\u03C6\u03B1'.encode('utf-8'))
#self.assertEqual(atom_entry.title.text.encode('utf-8'),
# u'\u03B1\u03BB\u03C6\u03B1'.encode('utf-8'))
# Setting object members to unicode strings is supported even if
# MEMBER_STRING_ENCODING is set 'utf-8' (should it be?)
atom_entry.title.type = u'\u03B1\u03BB\u03C6\u03B1'
xml = atom_entry.ToString()
self.assert_(u'\u03B1\u03BB\u03C6\u03B1'.encode('utf-8') in xml)
# Make sure that we can use plain text when MEMBER_STRING_ENCODING is utf8
atom_entry.title.type = "plain text"
atom_entry.title.text = "more text"
xml = atom_entry.ToString()
self.assert_("plain text" in xml)
self.assert_("more text" in xml)
# Test something else than utf-8
atom.MEMBER_STRING_ENCODING = 'iso8859_7'
atom_entry = atom.EntryFromString(self.test_xml)
self.assert_(atom_entry.title.type == u'\u03B1\u03BB\u03C6\u03B1'.encode(
'iso8859_7'))
self.assert_(atom_entry.title.text == u'\u03B1\u03BB\u03C6\u03B1'.encode(
'iso8859_7'))
# Test using unicode strings directly for object members
atom.MEMBER_STRING_ENCODING = unicode
atom_entry = atom.EntryFromString(self.test_xml)
self.assert_(atom_entry.title.type == u'\u03B1\u03BB\u03C6\u03B1')
self.assert_(atom_entry.title.text == u'\u03B1\u03BB\u03C6\u03B1')
# Make sure that we can use plain text when MEMBER_STRING_ENCODING is
# unicode
atom_entry.title.type = "plain text"
atom_entry.title.text = "more text"
xml = atom_entry.ToString()
self.assert_("plain text" in xml)
self.assert_("more text" in xml)
def testConvertExampleXML(self):
try:
entry = atom.CreateClassFromXMLString(atom.Entry,
test_data.GBASE_STRING_ENCODING_ENTRY)
except UnicodeDecodeError:
self.fail('Error when converting XML')
class DeprecationDecoratorTest(unittest.TestCase):
def testDeprecationWarning(self):
def to_deprecate():
return 5
self.assertEqual(to_deprecate.func_name, 'to_deprecate')
deprecated = atom.deprecated('test')(to_deprecate)
self.assertNotEqual(to_deprecate, deprecated)
# After decorating a function as deprecated, the function name should
# still be the name of the original function.
self.assertEqual(deprecated.func_name, 'to_deprecate')
#@atom.deprecated()
def also_deprecated():
return 6
also_deprecated = atom.deprecated()(also_deprecated)
self.assertEqual(also_deprecated.func_name, 'also_deprecated')
def suite():
return conf.build_suite([AuthorTest, EmailTest, NameTest,
ExtensionElementTest, LinkTest, GeneratorTest, TitleTest, SubtitleTest,
SummaryTest, IdTest, IconTest, LogoTest, RightsTest, UpdatedTest,
PublishedTest, FeedEntryParentTest, EntryTest, ContentEntryParentTest,
PreserveUnkownElementTest, FeedTest, LinkFinderTest, AtomBaseTest,
UtfParsingTest, DeprecationDecoratorTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.test_config as conf
import gdata.analytics.client
import gdata.apps.emailsettings.client
import gdata.blogger.client
import gdata.spreadsheets.client
import gdata.calendar_resource.client
import gdata.contacts.client
import gdata.docs.client
import gdata.projecthosting.client
import gdata.sites.client
class ClientSmokeTest(unittest.TestCase):
def test_check_auth_client_classes(self):
conf.check_clients_with_auth(self, (
gdata.analytics.client.AnalyticsClient,
gdata.apps.emailsettings.client.EmailSettingsClient,
gdata.blogger.client.BloggerClient,
gdata.spreadsheets.client.SpreadsheetsClient,
gdata.calendar_resource.client.CalendarResourceClient,
gdata.contacts.client.ContactsClient,
gdata.docs.client.DocsClient,
gdata.projecthosting.client.ProjectHostingClient,
gdata.sites.client.SitesClient
))
def suite():
return conf.build_suite([ClientSmokeTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.spreadsheets.client
import gdata.gauth
import gdata.client
import atom.http_core
import atom.mock_http_core
import atom.core
import gdata.data
import gdata.test_config as conf
conf.options.register_option(conf.SPREADSHEET_ID_OPTION)
class SpreadsheetsClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.spreadsheets.client.SpreadsheetsClient()
conf.configure_client(self.client, 'SpreadsheetsClientTest', 'wise')
def tearDown(self):
conf.close_client(self.client)
def test_create_update_delete_worksheet(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete_worksheet')
spreadsheet_id = conf.options.get_value('spreadsheetid')
original_worksheets = self.client.get_worksheets(spreadsheet_id)
self.assert_(isinstance(original_worksheets,
gdata.spreadsheets.data.WorksheetsFeed))
worksheet_count = int(original_worksheets.total_results.text)
# Add a new worksheet to the spreadsheet.
created = self.client.add_worksheet(
spreadsheet_id, 'a test worksheet', 4, 8)
self.assert_(isinstance(created,
gdata.spreadsheets.data.WorksheetEntry))
self.assertEqual(created.title.text, 'a test worksheet')
self.assertEqual(created.row_count.text, '4')
self.assertEqual(created.col_count.text, '8')
# There should now be one more worksheet in this spreadsheet.
updated_worksheets = self.client.get_worksheets(spreadsheet_id)
new_worksheet_count = int(updated_worksheets.total_results.text)
self.assertEqual(worksheet_count + 1, new_worksheet_count)
# Delete our test worksheet.
self.client.delete(created)
# We should be back to the original number of worksheets.
updated_worksheets = self.client.get_worksheets(spreadsheet_id)
new_worksheet_count = int(updated_worksheets.total_results.text)
self.assertEqual(worksheet_count, new_worksheet_count)
def test_create_update_delete_table_and_records(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(
self.client, 'test_create_update_delete_table_and_records')
spreadsheet_id = conf.options.get_value('spreadsheetid')
tables = self.client.get_tables(spreadsheet_id)
test_worksheet = self.client.add_worksheet(
spreadsheet_id, 'worksheet x', rows=30, cols=3)
self.assert_(isinstance(tables, gdata.spreadsheets.data.TablesFeed))
initial_count = tables.total_results.text
created_table = self.client.add_table(
spreadsheet_id, 'Test Table', 'This table is for testing',
'worksheet x', header_row=5, num_rows=10, start_row=8,
insertion_mode=None,
column_headers={'B': 'Food', 'C': 'Drink', 'A': 'Price'})
# Re-get the list of tables and make sure there are more now.
updated_tables = self.client.get_tables(spreadsheet_id)
self.assertEqual(int(initial_count) + 1,
int(updated_tables.total_results.text))
# Get the records in our new table to make sure it has the correct
# number of records.
table_num = int(created_table.get_table_id())
starting_records = self.client.get_records(spreadsheet_id, table_num)
self.assertEqual(starting_records.total_results.text, '10')
self.assert_(starting_records.entry[0].field[0].text is None)
self.assert_(starting_records.entry[0].field[1].text is None)
self.assert_(starting_records.entry[1].field[0].text is None)
self.assert_(starting_records.entry[1].field[1].text is None)
record1 = self.client.add_record(
spreadsheet_id, table_num,
{'Food': 'Cheese', 'Drink': 'Soda', 'Price': '2.99'}, 'icky')
self.client.add_record(spreadsheet_id, table_num,
{'Food': 'Eggs', 'Drink': 'Milk'})
self.client.add_record(spreadsheet_id, table_num,
{'Food': 'Spinach', 'Drink': 'Water'})
updated_records = self.client.get_records(spreadsheet_id, table_num)
self.assertEqual(updated_records.entry[10].value_for_name('Price'), '2.99')
self.assertEqual(updated_records.entry[10].value_for_index('A'), '2.99')
self.assertEqual(updated_records.entry[10].value_for_name('Drink'),
'Soda')
self.assert_(updated_records.entry[11].value_for_name('Price') is None)
self.assertEqual(updated_records.entry[11].value_for_name('Drink'),
'Milk')
self.assertEqual(updated_records.entry[12].value_for_name('Drink'),
'Water')
self.assert_(updated_records.entry[1].value_for_index('A') is None)
self.assert_(updated_records.entry[2].value_for_index('B') is None)
self.assert_(updated_records.entry[3].value_for_index('C') is None)
# Cleanup the table.
self.client.delete(created_table)
# Delete the test worksheet in which the table was placed.
self.client.delete(test_worksheet)
# Make sure we are back to the original count.
updated_tables = self.client.get_tables(spreadsheet_id)
self.assertEqual(int(initial_count),
int(updated_tables.total_results.text))
def test_get_and_update_cell(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_get_and_update_cell')
spreadsheet_id = conf.options.get_value('spreadsheetid')
test_worksheet = self.client.add_worksheet(
spreadsheet_id, 'worksheet x', rows=30, cols=3)
# Get a cell and set its value.
cell_entry = self.client.get_cell(
spreadsheet_id, test_worksheet.get_worksheet_id(), 1, 1)
cell_entry.cell.input_value = 'a test'
result = self.client.update(cell_entry)
self.assertEquals(cell_entry.cell.input_value, result.cell.input_value)
# Verify that the value was set.
cells = self.client.get_cells(
spreadsheet_id, test_worksheet.get_worksheet_id())
self.assertEquals(len(cells.entry), 1)
self.assertEquals(cells.entry[0].cell.input_value, 'a test')
# Delete the test worksheet.
self.client.delete(test_worksheet, force=True)
def set_cell(self, spreadsheet_id, worksheet_id, row, column, value):
cell_entry = self.client.get_cell(
spreadsheet_id, worksheet_id, row, column)
self.assert_(cell_entry is not None)
cell_entry.cell.input_value = value
self.assert_(self.client.update(cell_entry) is not None)
def test_batch_set_cells(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_get_and_update_cell')
spreadsheet_id = conf.options.get_value('spreadsheetid')
test_worksheet = self.client.add_worksheet(
spreadsheet_id, 'worksheet x', rows=30, cols=3)
# Set a couple of cells in a batch request.
feed = gdata.spreadsheets.data.build_batch_cells_update(
spreadsheet_id, test_worksheet.get_worksheet_id())
feed.add_set_cell(1, 1, '5')
feed.add_set_cell(1, 2, '=A1+2')
result = self.client.batch(feed, force=True)
self.assertEqual(result.entry[0].cell.text, '5')
self.assertEqual(result.entry[1].cell.text, '7')
# Delete the test worksheet.
self.client.delete(test_worksheet, force=True)
def test_crud_on_list_feed(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_crud_on_list_feed')
spreadsheet_id = conf.options.get_value('spreadsheetid')
test_worksheet = self.client.add_worksheet(
spreadsheet_id, 'worksheet x', rows=30, cols=3)
worksheet_id = test_worksheet.get_worksheet_id()
# Create the first column to provide row headings.
self.set_cell(spreadsheet_id, worksheet_id, 1, 1, 'cola')
self.set_cell(spreadsheet_id, worksheet_id, 1, 2, 'colb')
self.set_cell(spreadsheet_id, worksheet_id, 1, 3, 'colc')
# Add a row to the spreadsheet.
entry = gdata.spreadsheets.data.ListEntry()
entry.from_dict({'cola': 'alpha', 'colb': 'beta', 'colc': 'gamma'})
added = self.client.add_list_entry(entry, spreadsheet_id, worksheet_id)
self.assert_(isinstance(added, gdata.spreadsheets.data.ListEntry))
self.assertEquals(added.get_value('cola'), 'alpha')
# Update the row.
added.from_dict({'cola': '1', 'colb': '2', 'colc': '3'})
updated = self.client.update(added)
self.assert_(isinstance(updated, gdata.spreadsheets.data.ListEntry))
self.assertEquals(updated.get_value('cola'), '1')
# Check the number of rows.
rows = self.client.get_list_feed(spreadsheet_id, worksheet_id)
self.assertEquals(len(rows.entry), 1)
# Remove the row.
self.client.delete(updated)
# Check that it was removed.
rows = self.client.get_list_feed(spreadsheet_id, worksheet_id)
self.assertEquals(len(rows.entry), 0)
# Delete the test worksheet.
self.client.delete(test_worksheet, force=True)
def suite():
return conf.build_suite([SpreadsheetsClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.spreadsheets.data
import gdata.test_config as conf
import atom.core
SPREADSHEET = """<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='"BxAUSQUJRCp7ImBq"'>
<id>http://spreadsheets.google.com/feeds/spreadsheets/private/full/key</id>
<updated>2006-11-17T18:24:18.231Z</updated>
<title type="text">Groceries R Us</title>
<content type="text">Groceries R Us</content>
<link rel="http://schemas.google.com/spreadsheets/2006#worksheetsfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/spreadsheets/private/full/key"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
</entry>"""
WORKSHEETS_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearch/1.1/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='W/"D0cERnk-eip7ImA9WBBXGEg."'>
<id>http://spreadsheets.google.com/feeds/worksheets/key/private/full</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Groceries R Us</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link
rel="http://schemas.google.com/g/2005#post" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>1</openSearch:itemsPerPage>
<entry gd:etag='"YDwqeyI."'>
<id>http://spreadsheets.google.com/feeds/worksheets/0/private/full/1</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Sheet1</title>
<content type="text">Sheet1</content>
<link rel="http://schemas.google.com/spreadsheets/2006#listfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/0/1/private/full"/>
<link rel="http://schemas.google.com/spreadsheets/2006#cellsfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/0/1/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/0/private/full/1"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/.../0/.../1/version"/>
<gs:rowCount>100</gs:rowCount>
<gs:colCount>20</gs:colCount>
</entry>
</feed>"""
NEW_WORKSHEET = """<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<title>Expenses</title>
<gs:rowCount>50</gs:rowCount>
<gs:colCount>10</gs:colCount>
</entry>"""
EDIT_WORKSHEET = """<entry>
<id>
http://spreadsheets.google.com/feeds/worksheets/k/private/full/w
</id>
<updated>2007-07-30T18:51:30.666Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#worksheet"/>
<title type="text">Income</title>
<content type="text">Expenses</content>
<link rel="http://schemas.google.com/spreadsheets/2006#listfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full"/>
<link rel="http://schemas.google.com/spreadsheets/2006#cellsfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/k/private/full/w"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/.../k/private/full/w/v"/>
<gs:rowCount>45</gs:rowCount>
<gs:colCount>15</gs:colCount>
</entry>"""
NEW_TABLE = """<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<title type='text'>Table 1</title>
<summary type='text'>This is a list of all who have registered to vote and
whether or not they qualify to vote.</summary>
<gs:worksheet name='Sheet1' />
<gs:header row='1' />
<gs:data numRows='0' startRow='2'>
<gs:column index='B' name='Birthday' />
<gs:column index='C' name='Age' />
<gs:column index='A' name='Name' />
<gs:column index='D' name='CanVote' />
</gs:data>
</entry>"""
TABLES_FEED = """<?xml version='1.0' encoding='utf-8'?>
<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearch/1.1/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='W/"DEQHQn84fCt7ImA9WxJTGEU."'>
<id>
http://spreadsheets.google.com/feeds/key/tables</id>
<updated>2009-04-28T02:38:53.134Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#table' />
<title>Sample table and record feed</title>
<link rel='alternate' type='text/html'
href='http://spreadsheets.google.com/ccc?key=key' />
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables' />
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables' />
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables' />
<author>
<name>Liz</name>
<email>liz@gmail.com</email>
</author>
<openSearch:totalResults>2</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<entry gd:etag='"HBcUVgtWASt7ImBq"'>
<id>
http://spreadsheets.google.com/feeds/key/tables/0</id>
<updated>2009-04-28T01:20:32.707Z</updated>
<app:edited xmlns:app="http://www.w3.org/2007/app">
2009-04-28T01:20:32.707Z</app:edited>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#table' />
<title>Table 1</title>
<summary>This is a list of all who have registered to vote and
whether or not they qualify to vote.</summary>
<content type='application/atom+xml;type=feed'
src='http://spreadsheets.google.com/feeds/key/records/0' />
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables/0' />
<link rel='edit' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables/0' />
<gs:worksheet name='Sheet1' />
<gs:header row='1' />
<gs:data insertionMode='overwrite' numRows='2' startRow='2'>
<gs:column index='B' name='Birthday' />
<gs:column index='C' name='Age' />
<gs:column index='A' name='Name' />
<gs:column index='D' name='CanVote' />
</gs:data>
</entry>
<entry gd:etag='"HBcUVgdCGyt7ImBq"'>
<id>
http://spreadsheets.google.com/feeds/key/tables/1</id>
<updated>2009-04-28T01:20:38.313Z</updated>
<app:edited xmlns:app="http://www.w3.org/2007/app">
2009-04-28T01:20:38.313Z</app:edited>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#table' />
<title>Table 2</title>
<summary>List of detailed information about each voter.</summary>
<content type='application/atom+xml;type=feed'
src='http://spreadsheets.google.com/feeds/key/records/1' />
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables/1' />
<link rel='edit' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/tables/1' />
<gs:worksheet name='Sheet1' />
<gs:header row='30' />
<gs:data insertionMode='overwrite' numRows='10' startRow='34'>
<gs:column index='C' name='Last' />
<gs:column index='B' name='First' />
<gs:column index='D' name='DOB' />
<gs:column index='E' name='Driver License?' />
</gs:data>
</entry>
</feed>"""
NEW_RECORD = """<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<title>Darcy</title>
<gs:field name='Birthday'>2/10/1785</gs:field>
<gs:field name='Age'>28</gs:field>
<gs:field name='Name'>Darcy</gs:field>
<gs:field name='CanVote'>No</gs:field>
</entry>"""
RECORDS_FEED = """<?xml version='1.0' encoding='utf-8'?>
<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearch/1.1/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='W/"DEQHQn84fCt7ImA9WxJTGEU."'>
<id>http://spreadsheets.google.com/feeds/key/records/0</id>
<updated>2009-04-28T02:38:53.134Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#record' />
<title>Table 1</title>
<link rel='alternate' type='text/html'
href='http://spreadsheets.google.com/pub?key=key' />
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0' />
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0' />
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0' />
<author>
<name>Liz</name>
<email>liz@gmail.com</email>
</author>
<openSearch:totalResults>2</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<entry gd:etag='"UB8DTlJAKSt7ImA-WkUT"'>
<id>
http://spreadsheets.google.com/feeds/key/records/0/cn6ca</id>
<updated>2009-04-28T02:38:53.134Z</updated>
<app:edited xmlns:app="http://www.w3.org/2007/app">
2009-04-28T02:38:53.134Z</app:edited>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#record' />
<title>Darcy</title>
<content>Birthday: 2/10/1785, Age: 28, Name: Darcy,
CanVote: No</content>
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0/cn6ca' />
<link rel='edit' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0/cn6ca' />
<gs:field index='B' name='Birthday'>2/10/1785</gs:field>
<gs:field index='C' name='Age'>28</gs:field>
<gs:field index='A' name='Name'>Darcy</gs:field>
<gs:field index='D' name='CanVote'>No</gs:field>
</entry>
<entry gd:etag='"UVBFUEcNRCt7ImA9DU8."'>
<id>
http://spreadsheets.google.com/feeds/key/records/0/cokwr</id>
<updated>2009-04-28T02:38:53.134Z</updated>
<app:edited xmlns:app="http://www.w3.org/2007/app">
2009-04-28T02:38:53.134Z</app:edited>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/spreadsheets/2006#record' />
<title>Jane</title>
<content>Birthday: 1/6/1791, Age: 22, Name: Jane,
CanVote: Yes</content>
<link rel='self' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0/cokwr' />
<link rel='edit' type='application/atom+xml'
href='http://spreadsheets.google.com/feeds/key/records/0/cokwr' />
<gs:field index='B' name='Birthday'>1/6/1791</gs:field>
<gs:field index='C' name='Age'>22</gs:field>
<gs:field index='A' name='Name'>Jane</gs:field>
<gs:field index='D' name='CanVote'>Yes</gs:field>
</entry>
</feed>"""
LIST_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearch/1.1/"
xmlns:gsx="http://schemas.google.com/spreadsheets/2006/extended"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='W/"D0cERnk-eip7ImA9WBBXGEg."'>
<id>
http://spreadsheets.google.com/feeds/list/key/worksheetId/private/full
</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Sheet1</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full"/>
<link rel="http://schemas.google.com/g/2005#post"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:totalResults>8</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>8</openSearch:itemsPerPage>
<entry gd:etag='"S0wCTlpIIip7ImA0X0QI"'>
<id>http://spreadsheets.google.com/feeds/list/k/w/private/full/r</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#list"/>
<title type="text">Bingley</title>
<content type="text">Hours: 10, Items: 2, IPM: 0.0033</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r/v"/>
<gsx:name>Bingley</gsx:name>
<gsx:hours>10</gsx:hours>
<gsx:items>2</gsx:items>
<gsx:ipm>0.0033</gsx:ipm>
</entry>
<entry gd:etag='"AxQDSXxjfyp7ImA0ChJVSBI."'>
<id>
http://spreadsheets.google.com/feeds/list/k/w/private/full/rowId
</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#list"/>
<title type="text">Charlotte</title>
<content type="text">Hours: 60, Items: 18000, IPM: 5</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r/v"/>
<gsx:name>Charlotte</gsx:name>
<gsx:hours>60</gsx:hours>
<gsx:items>18000</gsx:items>
<gsx:ipm>5</gsx:ipm>
</entry>
</feed>"""
NEW_ROW = """<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gsx="http://schemas.google.com/spreadsheets/2006/extended">
<gsx:hours>1</gsx:hours>
<gsx:ipm>1</gsx:ipm>
<gsx:items>60</gsx:items>
<gsx:name>Elizabeth Bennet</gsx:name>
</entry>"""
UPDATED_ROW = """<entry gd:etag='"S0wCTlpIIip7ImA0X0QI"'
xmlns="http://www.w3.org/2005/Atom"
xmlns:gd="http://schemas.google.com/g/2005"
xmlns:gsx="http://schemas.google.com/spreadsheets/2006/extended">
<id>http://spreadsheets.google.com/feeds/list/k/w/private/full/rowId</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#list"/>
<title type="text">Bingley</title>
<content type="text">Hours: 10, Items: 2, IPM: 0.0033</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/k/w/private/full/r/v"/>
<gsx:name>Bingley</gsx:name>
<gsx:hours>20</gsx:hours>
<gsx:items>4</gsx:items>
<gsx:ipm>0.0033</gsx:ipm>
</entry>"""
CELLS_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearch/1.1/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006"
xmlns:gd="http://schemas.google.com/g/2005"
gd:etag='W/"D0cERnk-eip7ImA9WBBXGEg."'>
<id>
http://spreadsheets.google.com/feeds/cells/key/worksheetId/private/full
</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<title type="text">Sheet1</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/private/full"/>
<link rel="http://schemas.google.com/g/2005#post"
type="application/atom+xml"
<link rel="http://schemas.google.com/g/2005#batch"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/private/full/batch"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>36</openSearch:itemsPerPage>
<gs:rowCount>100</gs:rowCount>
<gs:colCount>20</gs:colCount>
<entry gd:etag='"ImA9D1APFyp7"'>
<id>
http://spreadsheets.google.com/feeds/cells/k/w/private/full/R1C1
</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#cell"/>
<title type="text">A1</title>
<content type="text">Name</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/pr/full/R1C1"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/./cells/k/w/pr/full/R1C1/bgvjf"/>
<gs:cell row="1" col="1" inputValue="Name">Name</gs:cell>
</entry>
<entry gd:etag='"YD0PS1YXByp7Ig.."'>
<id>
http://spreadsheets.google.com/feeds/cells/k/w/private/full/R1C2
</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#cell"/>
<title type="text">B1</title>
<content type="text">Hours</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/pr/full/R1C2"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/./cells/k/w/pr/full/R1C2/1pn567"/>
<gs:cell row="1" col="2" inputValue="Hours">Hours</gs:cell>
</entry>
<entry gd:etag='"ImB5CBYSRCp7"'>
<id>
http://spreadsheets.google.com/feeds/cells/k/w/private/full/R9C4
</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#cell"/>
<title type="text">D9</title>
<content type="text">5</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/k/w/pr/full/R9C4"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/./cells/k/w/pr/full/R9C4/srevc"/>
<gs:cell row="9" col="4"
inputValue="=FLOOR(R[0]C[-1]/(R[0]C[-2]*60),.0001)"
numericValue="5.0">5</gs:cell>
</entry>
</feed>"""
BATCH_CELLS = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:batch="http://schemas.google.com/gdata/batch"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>
http://spreadsheets.google.com/feeds/cells/key/worksheetId/private/full
</id>
<entry>
<batch:id">A1</batch:id">
<batch:operation type="update"/>
<id>
http://spreadsheets.google.com/feeds/cells/k/w/private/full/cellId
</id>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets/google.com/./cells/k/w/pr/full/R2C4/v"/>
<gs:cell row="2" col="4" inputValue="newData"/>
</entry>
<entry>
<batch:id">A2</batch:id">
<batch:operation type="update"/>
<title type="text">A2</title>
<id>
http://spreadsheets.google.com/feeds/cells/k/w/private/full/cellId
</id>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets/google.com/feeds/cells/k/w/pr/full/R2C5/v"/>
<gs:cell row="2" col="5" inputValue="moreInfo"/>
</entry>
</feed>"""
class SpreadsheetEntryTest(unittest.TestCase):
def setUp(self):
self.spreadsheet = atom.core.parse(
SPREADSHEET, gdata.spreadsheets.data.Spreadsheet)
def test_check_parsing(self):
self.assertEqual(self.spreadsheet.etag, '"BxAUSQUJRCp7ImBq"')
self.assertEqual(self.spreadsheet.id.text,
'http://spreadsheets.google.com/feeds/spreadsheets'
'/private/full/key')
self.assertEqual(self.spreadsheet.updated.text,
'2006-11-17T18:24:18.231Z')
self.assertEqual(self.spreadsheet.find_worksheets_feed(),
'http://spreadsheets.google.com/feeds/worksheets'
'/key/private/full')
self.assertEqual(self.spreadsheet.find_self_link(),
'http://spreadsheets.google.com/feeds/spreadsheets'
'/private/full/key')
def test_get_spreadsheet_key(self):
self.assertEqual(self.spreadsheet.get_spreadsheet_key(), 'key')
# Change the value of the self link.
self.spreadsheet.id.text = '42'
self.assertEqual(self.spreadsheet.GetSpreadsheetKey(), '42')
class WorksheetEntryTest(unittest.TestCase):
def setUp(self):
self.worksheets = atom.core.parse(
WORKSHEETS_FEED, gdata.spreadsheets.data.WorksheetsFeed)
def test_check_parsing(self):
self.assertEqual(len(self.worksheets.entry), 1)
self.assertEqual(self.worksheets.entry[0].get_id(),
'http://spreadsheets.google.com/feeds/worksheets/0/private/full/1')
def test_get_worksheet_id(self):
self.assertEqual(self.worksheets.entry[0].get_worksheet_id(), '1')
self.worksheets.entry[0].id.text = '////spam'
self.assertEqual(self.worksheets.entry[0].GetWorksheetId(), 'spam')
class ListEntryTest(unittest.TestCase):
def test_get_and_set_column_value(self):
row = atom.core.parse(NEW_ROW, gdata.spreadsheets.data.ListEntry)
row.set_value('hours', '3')
row.set_value('name', 'Lizzy')
self.assertEqual(row.get_value('hours'), '3')
self.assertEqual(row.get_value('ipm'), '1')
self.assertEqual(row.get_value('items'), '60')
self.assertEqual(row.get_value('name'), 'Lizzy')
self.assertEqual(row.get_value('x'), None)
row.set_value('x', 'Test')
self.assertEqual(row.get_value('x'), 'Test')
row_xml = str(row)
self.assert_(row_xml.find(':x') > -1)
self.assert_(row_xml.find('>Test</') > -1)
self.assert_(row_xml.find(':hours') > -1)
self.assert_(row_xml.find('>3</') > -1)
self.assert_(row_xml.find(':ipm') > -1)
self.assert_(row_xml.find('>1</') > -1)
self.assert_(row_xml.find(':items') > -1)
self.assert_(row_xml.find('>60</') > -1)
self.assert_(row_xml.find(':name') > -1)
self.assert_(row_xml.find('>Lizzy</') > -1)
self.assertEqual(row_xml.find(':zzz'), -1)
self.assertEqual(row_xml.find('>foo</'), -1)
def test_check_parsing(self):
row = atom.core.parse(NEW_ROW, gdata.spreadsheets.data.ListEntry)
self.assertEqual(row.get_value('hours'), '1')
self.assertEqual(row.get_value('ipm'), '1')
self.assertEqual(row.get_value('items'), '60')
self.assertEqual(row.get_value('name'), 'Elizabeth Bennet')
self.assertEqual(row.get_value('none'), None)
row = atom.core.parse(UPDATED_ROW, gdata.spreadsheets.data.ListEntry)
self.assertEqual(row.get_value('hours'), '20')
self.assertEqual(row.get_value('ipm'), '0.0033')
self.assertEqual(row.get_value('items'), '4')
self.assertEqual(row.get_value('name'), 'Bingley')
self.assertEqual(row.get_value('x'), None)
self.assertEqual(
row.id.text, 'http://spreadsheets.google.com/feeds/list'
'/k/w/private/full/rowId')
self.assertEqual(row.updated.text, '2006-11-17T18:23:45.173Z')
self.assertEqual(row.content.text, 'Hours: 10, Items: 2, IPM: 0.0033')
class RecordEntryTest(unittest.TestCase):
def setUp(self):
self.records = atom.core.parse(
RECORDS_FEED, gdata.spreadsheets.data.RecordsFeed)
def test_get_by_index(self):
self.assertEqual(self.records.entry[0].field[0].index, 'B')
self.assertEqual(self.records.entry[0].field[0].name, 'Birthday')
self.assertEqual(self.records.entry[0].field[0].text, '2/10/1785')
self.assertEqual(self.records.entry[0].value_for_index('B'), '2/10/1785')
self.assertRaises(gdata.spreadsheets.data.FieldMissing,
self.records.entry[0].ValueForIndex, 'E')
self.assertEqual(self.records.entry[1].value_for_index('D'), 'Yes')
def test_get_by_name(self):
self.assertEqual(self.records.entry[0].ValueForName('Birthday'),
'2/10/1785')
self.assertRaises(gdata.spreadsheets.data.FieldMissing,
self.records.entry[0].value_for_name, 'Foo')
self.assertEqual(self.records.entry[1].value_for_name('Age'), '22')
class BatchRequestTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.spreadsheets.data.build_batch_cells_update('skey', 'wid')
def test_builder(self):
self.assertEqual(len(self.feed.link), 1)
self.assertEqual(self.feed.link[0].rel, 'edit')
self.assertEqual(self.feed.link[0].href,
'https://spreadsheets.google.com/feeds/cells/skey/wid/'
'private/full/batch')
self.assertEqual(self.feed.id.text,
'https://spreadsheets.google.com/feeds/cells/skey/wid/'
'private/full')
self.assertEqual(len(self.feed.entry), 0)
def test_set_cell(self):
self.feed.add_set_cell(1, 2, 'value')
self.assertEqual(len(self.feed.entry), 1)
self.assertEqual(self.feed.entry[0].id.text,
'https://spreadsheets.google.com/feeds/cells/skey/wid/private/'
'full/R1C2')
self.assertEqual(self.feed.entry[0].cell.row, '1')
self.assertEqual(self.feed.entry[0].cell.col, '2')
self.assertEqual(self.feed.entry[0].cell.input_value, 'value')
self.assertEqual(self.feed.entry[0].batch_operation.type, 'update')
self.assertEqual(self.feed.entry[0].batch_id.text, '0')
self.feed.add_set_cell(3, 1, 'spam')
self.assertEqual(len(self.feed.entry), 2)
self.assertEqual(self.feed.entry[1].id.text,
'https://spreadsheets.google.com/feeds/cells/skey/wid/private/'
'full/R3C1')
self.assertEqual(self.feed.entry[1].cell.row, '3')
self.assertEqual(self.feed.entry[1].cell.col, '1')
self.assertEqual(self.feed.entry[1].cell.input_value, 'spam')
self.assertEqual(self.feed.entry[1].batch_operation.type, 'update')
self.assertEqual(self.feed.entry[1].batch_id.text, '1')
class DataClassSanityTest(unittest.TestCase):
def test_basic_element_structure(self):
conf.check_data_classes(self, [
gdata.spreadsheets.data.Cell, gdata.spreadsheets.data.ColCount,
gdata.spreadsheets.data.Field, gdata.spreadsheets.data.Column,
gdata.spreadsheets.data.Data, gdata.spreadsheets.data.Header,
gdata.spreadsheets.data.RowCount, gdata.spreadsheets.data.Worksheet,
gdata.spreadsheets.data.Spreadsheet,
gdata.spreadsheets.data.SpreadsheetsFeed,
gdata.spreadsheets.data.WorksheetEntry,
gdata.spreadsheets.data.WorksheetsFeed,
gdata.spreadsheets.data.Table,
gdata.spreadsheets.data.TablesFeed,
gdata.spreadsheets.data.Record,
gdata.spreadsheets.data.RecordsFeed,
gdata.spreadsheets.data.ListRow,
gdata.spreadsheets.data.ListEntry,
gdata.spreadsheets.data.ListsFeed,
gdata.spreadsheets.data.CellEntry,
gdata.spreadsheets.data.CellsFeed])
def suite():
return conf.build_suite([SpreadsheetEntryTest, DataClassSanityTest,
ListEntryTest, RecordEntryTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2008, 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'e.bidelman@google.com (Eric Bidelman)'
import os
import unittest
import atom.data
import gdata.client
import gdata.data
import gdata.gauth
import gdata.docs.client
import gdata.docs.data
import gdata.test_config as conf
TEST_FILE_LOCATION_OPTION = conf.Option(
'file',
'Please enter the full path to a test file to upload',
description=('This test file will be uploaded to DocList which. An example '
'file can be found in tests/gdata_tests/docs/test.doc'))
CONTENT_TYPE_OPTION = conf.Option(
'contenttype',
'Please enter the mimetype of the file',
description='The content type should match that of the upload file.')
conf.options.register_option(TEST_FILE_LOCATION_OPTION)
conf.options.register_option(CONTENT_TYPE_OPTION)
class ResumableUploadTestCase(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.docs.client.DocsClient(source='ResumableUploadTest')
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
self.f = open(conf.options.get_value('file'))
self.content_type = conf.options.get_value('contenttype')
conf.configure_client(
self.client, 'ResumableUploadTest', self.client.auth_service)
def tearDown(self):
conf.close_client(self.client)
self.f.close()
def testUploadEntireDocumentAndUpdate(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testUploadDocument')
uploader = gdata.client.ResumableUploader(
self.client, self.f, self.content_type, os.path.getsize(self.f.name),
chunk_size=20000, # 20000 bytes.
desired_class=gdata.docs.data.DocsEntry)
e = gdata.docs.data.DocsEntry(
title=atom.data.Title(text='MyResumableTitleEntireFile'))
e.category.append(gdata.docs.data.make_kind_category('document'))
e.writers_can_invite = gdata.docs.data.WritersCanInvite(value='false')
entry = uploader.UploadFile(
'/feeds/upload/create-session/default/private/full', entry=e)
# Verify upload has really completed.
self.assertEqual(uploader.QueryUploadStatus(), True)
self.assert_(isinstance(entry, gdata.docs.data.DocsEntry))
self.assertEqual(entry.title.text, 'MyResumableTitleEntireFile')
self.assertEqual(entry.GetDocumentType(), 'document')
self.assertEqual(entry.writers_can_invite.value, 'false')
self.assertEqual(int(entry.quota_bytes_used.text), 0)
self.client.Delete(entry, force=True)
def testUploadDocumentInChunks(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testUploadDocumentInChunks')
uploader = gdata.client.ResumableUploader(
self.client, self.f, self.content_type, os.path.getsize(self.f.name),
desired_class=gdata.docs.data.DocsEntry)
uploader._InitSession(
'/feeds/upload/create-session/default/private/full',
headers={'Slug': 'MyManualChunksNoAtomTitle'})
start_byte = 0
entry = None
while not entry:
entry = uploader.UploadChunk(
start_byte, uploader.file_handle.read(uploader.chunk_size))
start_byte += uploader.chunk_size
# Verify upload has really completed.
self.assertEqual(uploader.QueryUploadStatus(), True)
self.assert_(isinstance(entry, gdata.docs.data.DocsEntry))
self.assertEqual(entry.title.text, 'MyManualChunksNoAtomTitle')
self.assertEqual(entry.GetDocumentType(), 'document')
self.client.Delete(entry, force=True)
def suite():
return conf.build_suite([ResumableUploadTestCase])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
__author__ = "James Sams <sams.james@gmail.com>"
import unittest
from gdata import test_data
import gdata.books
import atom
class BookEntryTest(unittest.TestCase):
def testBookEntryFromString(self):
entry = gdata.books.Book.FromString(test_data.BOOK_ENTRY)
self.assert_(isinstance(entry, gdata.books.Book))
self.assertEquals([x.text for x in entry.creator], ['John Rawls'])
self.assertEquals(entry.date.text, '1999')
self.assertEquals(entry.format.text, '538 pages')
self.assertEquals([x.text for x in entry.identifier],
['b7GZr5Btp30C', 'ISBN:0198250541', 'ISBN:9780198250548'])
self.assertEquals([x.text for x in entry.publisher],
['Oxford University Press'])
self.assertEquals(entry.subject, None)
self.assertEquals([x.text for x in entry.dc_title],
['A theory of justice'])
self.assertEquals(entry.viewability.value,
'http://schemas.google.com/books/2008#view_partial')
self.assertEquals(entry.embeddability.value,
'http://schemas.google.com/books/2008#embeddable')
self.assertEquals(entry.review, None)
self.assertEquals([getattr(entry.rating, x) for x in
("min", "max", "average", "value")], ['1', '5', '4.00', None])
self.assertEquals(entry.GetThumbnailLink().href,
'http://bks0.books.google.com/books?id=b7GZr5Btp30C&printsec=frontcover&img=1&zoom=5&sig=ACfU3U121bWZsbjBfVwVRSK2o982jJTd1w&source=gbs_gdata')
self.assertEquals(entry.GetInfoLink().href,
'http://books.google.com/books?id=b7GZr5Btp30C&ie=ISO-8859-1&source=gbs_gdata')
self.assertEquals(entry.GetPreviewLink(), None)
self.assertEquals(entry.GetAnnotationLink().href,
'http://www.google.com/books/feeds/users/me/volumes')
self.assertEquals(entry.get_google_id(), 'b7GZr5Btp30C')
def testBookFeedFromString(self):
feed = gdata.books.BookFeed.FromString(test_data.BOOK_FEED)
self.assert_(isinstance(feed, gdata.books.BookFeed))
self.assertEquals( len(feed.entry), 1)
self.assert_(isinstance(feed.entry[0], gdata.books.Book))
def testBookEntryToDict(self):
book = gdata.books.Book()
book.dc_title.append(gdata.books.Title(text='a'))
book.dc_title.append(gdata.books.Title(text='b'))
book.dc_title.append(gdata.books.Title(text='c'))
self.assertEqual(book.to_dict()['title'], 'a b c')
if __name__ == "__main__":
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.eric@google.com (Eric Bidelman)'
import getpass
import unittest
from gdata import test_data
import gdata.health
import gdata.health.service
username = ''
password = ''
class HealthQueryProfileListTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
def testGetProfileListFeed(self):
self.assert_(isinstance(self.profile_list_feed,
gdata.health.ProfileListFeed))
self.assertEqual(self.profile_list_feed.id.text,
'https://www.google.com/health/feeds/profile/list')
first_entry = self.profile_list_feed.entry[0]
self.assert_(isinstance(first_entry, gdata.health.ProfileListEntry))
self.assert_(first_entry.GetProfileId() is not None)
self.assert_(first_entry.GetProfileName() is not None)
query = gdata.health.service.HealthProfileListQuery()
profile_list = self.health.GetProfileListFeed(query)
self.assertEqual(first_entry.GetProfileId(),
profile_list.entry[0].GetProfileId())
self.assertEqual(profile_list.id.text,
'https://www.google.com/health/feeds/profile/list')
class H9QueryProfileListTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
def testGetProfileListFeed(self):
self.assert_(isinstance(self.profile_list_feed,
gdata.health.ProfileListFeed))
self.assertEqual(self.profile_list_feed.id.text,
'https://www.google.com/h9/feeds/profile/list')
first_entry = self.profile_list_feed.entry[0]
self.assert_(isinstance(first_entry, gdata.health.ProfileListEntry))
self.assert_(first_entry.GetProfileId() is not None)
self.assert_(first_entry.GetProfileName() is not None)
query = gdata.health.service.HealthProfileListQuery()
profile_list = self.h9.GetProfileListFeed(query)
self.assertEqual(first_entry.GetProfileId(),
profile_list.entry[0].GetProfileId())
self.assertEqual(profile_list.id.text,
'https://www.google.com/h9/feeds/profile/list')
class HealthQueryProfileTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testGetProfileFeed(self):
feed = self.health.GetProfileFeed(profile_id=self.profile_id)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(isinstance(feed.entry[0].ccr, gdata.health.Ccr))
def testGetProfileFeedByQuery(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id)
feed = self.health.GetProfileFeed(query=query)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
def testGetProfileDigestFeed(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id,
params={'digest': 'true'})
feed = self.health.GetProfileFeed(query=query)
self.assertEqual(len(feed.entry), 1)
def testGetMedicationsAndConditions(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id,
params={'digest': 'true'}, categories=['medication|condition'])
feed = self.health.GetProfileFeed(query=query)
self.assertEqual(len(feed.entry), 1)
if feed.entry[0].ccr.GetMedications() is not None:
self.assert_(feed.entry[0].ccr.GetMedications()[0] is not None)
self.assert_(feed.entry[0].ccr.GetConditions()[0] is not None)
self.assert_(feed.entry[0].ccr.GetAllergies() is None)
self.assert_(feed.entry[0].ccr.GetAlerts() is None)
self.assert_(feed.entry[0].ccr.GetResults() is None)
class H9QueryProfileTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testGetProfileFeed(self):
feed = self.h9.GetProfileFeed(profile_id=self.profile_id)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
def testGetProfileFeedByQuery(self):
query = gdata.health.service.HealthProfileQuery(
service='h9', projection='ui', profile_id=self.profile_id)
feed = self.h9.GetProfileFeed(query=query)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
class HealthNoticeTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testSendNotice(self):
subject_line = 'subject line'
body = 'Notice <b>body</b>.'
ccr_xml = test_data.HEALTH_CCR_NOTICE_PAYLOAD
created_entry = self.health.SendNotice(subject_line,
body,
ccr=ccr_xml,
profile_id=self.profile_id)
self.assertEqual(created_entry.title.text, subject_line)
self.assertEqual(created_entry.content.text, body)
self.assertEqual(created_entry.content.type, 'html')
problem = created_entry.ccr.GetProblems()[0]
problem_desc = problem.FindChildren('Description')[0]
name = problem_desc.FindChildren('Text')[0]
self.assertEqual(name.text, 'Aortic valve disorders')
class H9NoticeTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testSendNotice(self):
subject_line = 'subject line'
body = 'Notice <b>body</b>.'
ccr_xml = test_data.HEALTH_CCR_NOTICE_PAYLOAD
created_entry = self.h9.SendNotice(subject_line, body, ccr=ccr_xml,
profile_id=self.profile_id)
self.assertEqual(created_entry.title.text, subject_line)
self.assertEqual(created_entry.content.text, body)
self.assertEqual(created_entry.content.type, 'html')
problem = created_entry.ccr.GetProblems()[0]
problem_desc = problem.FindChildren('Description')[0]
name = problem_desc.FindChildren('Text')[0]
self.assertEqual(name.text, 'Aortic valve disorders')
if __name__ == '__main__':
print ('Health API Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'j.s@google.com (Jeff Scudder)'
import os
import unittest
import gdata.client
import atom.http_core
import atom.mock_http_core
import atom.core
import gdata.data
import gdata.core
# TODO: switch to using v2 atom data once it is available.
import atom
import gdata.test_config as conf
conf.options.register_option(conf.BLOG_ID_OPTION)
class BloggerTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.client.GDClient()
conf.configure_client(self.client, 'BloggerTest', 'blogger')
def tearDown(self):
conf.close_client(self.client)
def test_create_update_delete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete')
blog_post = atom.Entry(
title=atom.Title(text='test from python BloggerTest'),
content=atom.Content(text='This is only a test.'))
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(str(blog_post), 'application/atom+xml')
def entry_from_string_wrapper(response):
self.assert_(response.getheader('content-type') is not None)
self.assert_(response.getheader('gdata-version') is not None)
return atom.EntryFromString(response.read())
entry = self.client.request('POST',
'http://www.blogger.com/feeds/%s/posts/default' % (
conf.options.get_value('blogid')),
converter=entry_from_string_wrapper, http_request=http_request)
self.assertEqual(entry.title.text, 'test from python BloggerTest')
self.assertEqual(entry.content.text, 'This is only a test.')
# Edit the test entry.
edit_link = None
for link in entry.link:
# Find the edit link for this entry.
if link.rel == 'edit':
edit_link = link.href
entry.title.text = 'Edited'
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(str(entry), 'application/atom+xml')
edited_entry = self.client.request('PUT', edit_link,
converter=entry_from_string_wrapper, http_request=http_request)
self.assertEqual(edited_entry.title.text, 'Edited')
self.assertEqual(edited_entry.content.text, entry.content.text)
# Delete the test entry from the blog.
edit_link = None
for link in edited_entry.link:
if link.rel == 'edit':
edit_link = link.href
response = self.client.request('DELETE', edit_link)
self.assertEqual(response.status, 200)
def test_use_version_two(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_use_version_two')
# Use version 2 of the Blogger API.
self.client.api_version = '2'
# Create a v2 blog post entry to post on the blog.
entry = create_element('entry')
entry._other_elements.append(
create_element('title', text='Marriage!',
attributes={'type': 'text'}))
entry._other_elements.append(
create_element('content', attributes={'type': 'text'},
text='Mr. Darcy has proposed marriage to me!'))
entry._other_elements.append(
create_element('category',
attributes={'scheme': TAG, 'term': 'marriage'}))
entry._other_elements.append(
create_element('category',
attributes={'scheme': TAG, 'term': 'Mr. Darcy'}))
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(entry.to_string(), 'application/atom+xml')
posted = self.client.request('POST',
'http://www.blogger.com/feeds/%s/posts/default' % (
conf.options.get_value('blogid')),
converter=element_from_string, http_request=http_request)
# Verify that the blog post content is correct.
self.assertEqual(posted.get_elements('title', ATOM)[0].text, 'Marriage!')
# TODO: uncomment once server bug is fixed.
#self.assertEqual(posted.get_elements('content', ATOM)[0].text,
# 'Mr. Darcy has proposed marriage to me!')
found_tags = [False, False]
categories = posted.get_elements('category', ATOM)
self.assertEqual(len(categories), 2)
for category in categories:
if category.get_attributes('term')[0].value == 'marriage':
found_tags[0] = True
elif category.get_attributes('term')[0].value == 'Mr. Darcy':
found_tags[1] = True
self.assert_(found_tags[0])
self.assert_(found_tags[1])
# Find the blog post on the blog.
self_link = None
edit_link = None
for link in posted.get_elements('link', ATOM):
if link.get_attributes('rel')[0].value == 'self':
self_link = link.get_attributes('href')[0].value
elif link.get_attributes('rel')[0].value == 'edit':
edit_link = link.get_attributes('href')[0].value
self.assert_(self_link is not None)
self.assert_(edit_link is not None)
queried = self.client.request('GET', self_link,
converter=element_from_string)
# TODO: add additional asserts to check content and etags.
# Test queries using ETags.
entry = self.client.get_entry(self_link)
self.assert_(entry.etag is not None)
self.assertRaises(gdata.client.NotModified, self.client.get_entry,
self_link, etag=entry.etag)
# Delete the test blog post.
self.client.request('DELETE', edit_link)
class ContactsTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.client.GDClient()
conf.configure_client(self.client, 'ContactsTest', 'cp')
def tearDown(self):
conf.close_client(self.client)
# Run this test and profiles fails
def test_crud_version_two(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_crud_version_two')
self.client.api_version = '2'
entry = create_element('entry')
entry._other_elements.append(
create_element('title', ATOM, 'Jeff', {'type': 'text'}))
entry._other_elements.append(
create_element('email', GD,
attributes={'address': 'j.s@google.com', 'rel': WORK_REL}))
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(entry.to_string(), 'application/atom+xml')
posted = self.client.request('POST',
'http://www.google.com/m8/feeds/contacts/default/full',
converter=element_from_string, http_request=http_request)
self_link = None
edit_link = None
for link in posted.get_elements('link', ATOM):
if link.get_attributes('rel')[0].value == 'self':
self_link = link.get_attributes('href')[0].value
elif link.get_attributes('rel')[0].value == 'edit':
edit_link = link.get_attributes('href')[0].value
self.assert_(self_link is not None)
self.assert_(edit_link is not None)
etag = posted.get_attributes('etag')[0].value
self.assert_(etag is not None)
self.assert_(len(etag) > 0)
# Delete the test contact.
http_request = atom.http_core.HttpRequest()
http_request.headers['If-Match'] = etag
self.client.request('DELETE', edit_link, http_request=http_request)
class VersionTwoClientContactsTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.client.GDClient()
self.client.api_version = '2'
conf.configure_client(self.client, 'VersionTwoClientContactsTest', 'cp')
self.old_proxy = os.environ.get('https_proxy')
def tearDown(self):
if self.old_proxy:
os.environ['https_proxy'] = self.old_proxy
elif 'https_proxy' in os.environ:
del os.environ['https_proxy']
conf.close_client(self.client)
def test_version_two_client(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_version_two_client')
entry = gdata.data.GDEntry()
entry._other_elements.append(
create_element('title', ATOM, 'Test', {'type': 'text'}))
entry._other_elements.append(
create_element('email', GD,
attributes={'address': 'test@example.com', 'rel': WORK_REL}))
# Create the test contact.
posted = self.client.post(entry,
'https://www.google.com/m8/feeds/contacts/default/full')
self.assert_(isinstance(posted, gdata.data.GDEntry))
self.assertEqual(posted.get_elements('title')[0].text, 'Test')
self.assertEqual(posted.get_elements('email')[0].get_attributes(
'address')[0].value, 'test@example.com')
posted.get_elements('title')[0].text = 'Doug'
edited = self.client.update(posted)
self.assert_(isinstance(edited, gdata.data.GDEntry))
self.assertEqual(edited.get_elements('title')[0].text, 'Doug')
self.assertEqual(edited.get_elements('email')[0].get_attributes(
'address')[0].value, 'test@example.com')
# Delete the test contact.
self.client.delete(edited)
def notest_crud_over_https_proxy(self):
import urllib
PROXY_ADDR = '98.192.125.23'
try:
response = urllib.urlopen('http://' + PROXY_ADDR)
except IOError:
return
# Only bother running the test if the proxy is up
if response.getcode() == 200:
os.environ['https_proxy'] = PROXY_ADDR
# Perform the CRUD test above, this time over a proxy.
self.test_version_two_client()
class JsoncRequestTest(unittest.TestCase):
def setUp(self):
self.client = gdata.client.GDClient()
def test_get_jsonc(self):
jsonc = self.client.get_feed(
'http://gdata.youtube.com/feeds/api/videos?q=surfing&v=2&alt=jsonc',
converter=gdata.core.parse_json_file)
self.assertTrue(len(jsonc.data.items) > 0)
# Utility methods.
# The Atom XML namespace.
ATOM = 'http://www.w3.org/2005/Atom'
# URL used as the scheme for a blog post tag.
TAG = 'http://www.blogger.com/atom/ns#'
# Namespace for Google Data API elements.
GD = 'http://schemas.google.com/g/2005'
WORK_REL = 'http://schemas.google.com/g/2005#work'
def create_element(tag, namespace=ATOM, text=None, attributes=None):
element = atom.core.XmlElement()
element._qname = '{%s}%s' % (namespace, tag)
if text is not None:
element.text = text
if attributes is not None:
element._other_attributes = attributes.copy()
return element
def element_from_string(response):
return atom.core.xml_element_from_string(response.read(),
atom.core.XmlElement)
def suite():
return conf.build_suite([BloggerTest, ContactsTest,
VersionTwoClientContactsTest,
JsoncRequestTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Vic Fryzel <vf@google.com>'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.calendar_resource.client
import gdata.calendar_resource.data
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class CalendarResourceClientTest(unittest.TestCase):
def setUp(self):
self.client = gdata.calendar_resource.client.CalendarResourceClient(
domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.calendar_resource.client.CalendarResourceClient(
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'CalendarResourceClientTest',
self.client.auth_service, True)
def tearDown(self):
conf.close_client(self.client)
def testClientConfiguration(self):
self.assertEqual('apps-apis.google.com', self.client.host)
self.assertEqual('2.0', self.client.api_version)
self.assertEqual('apps', self.client.auth_service)
self.assertEqual(
('http://www.google.com/a/feeds/',
'https://www.google.com/a/feeds/',
'http://apps-apis.google.com/a/feeds/',
'https://apps-apis.google.com/a/feeds/'), self.client.auth_scopes)
if conf.options.get_value('runlive') == 'true':
self.assertEqual(self.client.domain, conf.options.get_value('appsdomain'))
else:
self.assertEqual(self.client.domain, 'example.com')
def testMakeResourceFeedUri(self):
self.assertEqual('/a/feeds/calendar/resource/2.0/%s/' % self.client.domain,
self.client.MakeResourceFeedUri())
self.assertEqual('/a/feeds/calendar/resource/2.0/%s/CR-NYC-14-12-BR'
% self.client.domain,
self.client.MakeResourceFeedUri(resource_id='CR-NYC-14-12-BR'))
self.assertEqual('/a/feeds/calendar/resource/2.0/%s/?test=1'
% self.client.domain,
self.client.MakeResourceFeedUri(params={'test': 1}))
self.assertEqual('/a/feeds/calendar/resource/2.0/%s/CR-NYC-14-12-BR?test=1'
% self.client.domain,
self.client.MakeResourceFeedUri(resource_id='CR-NYC-14-12-BR',
params={'test': 1}))
def testCreateRetrieveUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
try:
new_entry = self.client.CreateResource(
'CR-NYC-14-12-BR', 'Boardroom',
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'), 'CR')
except Exception, e:
print e
self.client.delete_resource('CR-NYC-14-12-BR')
# If the test failed to run to completion
# the resource may already exist
new_entry = self.client.CreateResource(
'CR-NYC-14-12-BR', 'Boardroom',
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'), 'CR')
self.assert_(isinstance(new_entry,
gdata.calendar_resource.data.CalendarResourceEntry))
self.assertEqual(new_entry.resource_id, 'CR-NYC-14-12-BR')
self.assertEqual(new_entry.resource_common_name, 'Boardroom')
self.assertEqual(new_entry.resource_description,
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'))
self.assertEqual(new_entry.resource_type, 'CR')
fetched_entry = self.client.get_resource(resource_id='CR-NYC-14-12-BR')
self.assert_(isinstance(fetched_entry,
gdata.calendar_resource.data.CalendarResourceEntry))
self.assertEqual(fetched_entry.resource_id, 'CR-NYC-14-12-BR')
self.assertEqual(fetched_entry.resource_common_name, 'Boardroom')
self.assertEqual(fetched_entry.resource_description,
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'))
self.assertEqual(fetched_entry.resource_type, 'CR')
new_entry.resource_id = 'CR-MTV-14-12-BR'
new_entry.resource_common_name = 'Executive Boardroom'
new_entry.resource_description = 'This conference room is in Mountain View'
new_entry.resource_type = 'BR'
updated_entry = self.client.update(new_entry)
self.assert_(isinstance(updated_entry,
gdata.calendar_resource.data.CalendarResourceEntry))
self.assertEqual(updated_entry.resource_id, 'CR-MTV-14-12-BR')
self.assertEqual(updated_entry.resource_common_name, 'Executive Boardroom')
self.assertEqual(updated_entry.resource_description,
'This conference room is in Mountain View')
self.assertEqual(updated_entry.resource_type, 'BR')
self.client.delete_resource('CR-NYC-14-12-BR')
def suite():
return conf.build_suite([CalendarResourceClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Vic Fryzel <vf@google.com>'
import unittest
import atom.core
from gdata import test_data
import gdata.calendar_resource.data
import gdata.test_config as conf
class CalendarResourceEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.CALENDAR_RESOURCE_ENTRY,
gdata.calendar_resource.data.CalendarResourceEntry)
self.feed = atom.core.parse(test_data.CALENDAR_RESOURCES_FEED,
gdata.calendar_resource.data.CalendarResourceFeed)
def testCalendarResourceEntryFromString(self):
self.assert_(isinstance(self.entry,
gdata.calendar_resource.data.CalendarResourceEntry))
self.assertEquals(self.entry.resource_id, 'CR-NYC-14-12-BR')
self.assertEquals(self.entry.resource_common_name, 'Boardroom')
self.assertEquals(self.entry.resource_description,
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'))
self.assertEquals(self.entry.resource_type, 'CR')
def testCalendarResourceFeedFromString(self):
self.assertEquals(len(self.feed.entry), 2)
self.assert_(isinstance(self.feed,
gdata.calendar_resource.data.CalendarResourceFeed))
self.assert_(isinstance(self.feed.entry[0],
gdata.calendar_resource.data.CalendarResourceEntry))
self.assert_(isinstance(self.feed.entry[1],
gdata.calendar_resource.data.CalendarResourceEntry))
self.assertEquals(
self.feed.entry[0].find_edit_link(),
'https://apps-apis.google.com/feeds/calendar/resource/2.0/yourdomain.com/CR-NYC-14-12-BR')
self.assertEquals(self.feed.entry[0].resource_id, 'CR-NYC-14-12-BR')
self.assertEquals(self.feed.entry[0].resource_common_name, 'Boardroom')
self.assertEquals(self.feed.entry[0].resource_description,
('This conference room is in New York City, building 14, floor 12, '
'Boardroom'))
self.assertEquals(self.feed.entry[0].resource_type, 'CR')
self.assertEquals(self.feed.entry[1].resource_id,
'(Bike)-London-43-Lobby-Bike-1')
self.assertEquals(self.feed.entry[1].resource_common_name, 'London bike-1')
self.assertEquals(self.feed.entry[1].resource_description,
'Bike is in London at building 43\'s lobby.')
self.assertEquals(self.feed.entry[1].resource_type, '(Bike)')
self.assertEquals(
self.feed.entry[1].find_edit_link(),
'https://apps-apis.google.com/a/feeds/calendar/resource/2.0/yourdomain.com/(Bike)-London-43-Lobby-Bike-1')
def suite():
return conf.build_suite([CalendarResourceEntryTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.test_config as conf
import gdata.contacts.client
import atom.core
import atom.data
import gdata.data
class ContactsTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.contacts.client.ContactsClient()
conf.configure_client(self.client, 'ContactsTest', 'cp')
def tearDown(self):
conf.close_client(self.client)
def test_create_update_delete_contact(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete_contact')
new_contact = gdata.contacts.data.ContactEntry(
nickname=gdata.contacts.data.NickName(text='Joe'),
name=gdata.data.Name(
given_name=gdata.data.GivenName(text='Joseph'),
family_name=gdata.data.FamilyName(text='Testerson')))
new_contact.birthday = gdata.contacts.data.Birthday(when='2009-11-11')
new_contact.language.append(gdata.contacts.data.Language(
label='German'))
created = self.client.create_contact(new_contact)
# Add another language.
created.language.append(gdata.contacts.data.Language(
label='French'))
# Create a new membership group for our test contact.
new_group = gdata.contacts.data.GroupEntry(
title=atom.data.Title(text='a test group'))
created_group = self.client.create_group(new_group)
self.assert_(created_group.id.text)
# Add the contact to the new group.
created.group_membership_info.append(
gdata.contacts.data.GroupMembershipInfo(href=created_group.id.text))
# Upload the changes to the language and group membership.
edited = self.client.update(created)
# Delete the group and the test contact.
self.client.delete(created_group)
self.client.delete(edited)
def test_low_level_create_update_delete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_low_level_create_update_delete')
entry = atom.data.Entry()
entry.title = atom.data.Title(text='Jeff')
entry._other_elements.append(
gdata.data.Email(rel=gdata.data.WORK_REL, address='j.s@google.com'))
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(entry.to_string(), 'application/atom+xml')
posted = self.client.request('POST',
'http://www.google.com/m8/feeds/contacts/default/full',
desired_class=atom.data.Entry, http_request=http_request)
self_link = None
edit_link = None
for link in posted.get_elements('link', 'http://www.w3.org/2005/Atom'):
if link.get_attributes('rel')[0].value == 'self':
self_link = link.get_attributes('href')[0].value
elif link.get_attributes('rel')[0].value == 'edit':
edit_link = link.get_attributes('href')[0].value
self.assert_(self_link is not None)
self.assert_(edit_link is not None)
etag = posted.get_attributes('etag')[0].value
self.assert_(etag is not None)
self.assert_(len(etag) > 0)
# Delete the test contact.
http_request = atom.http_core.HttpRequest()
http_request.headers['If-Match'] = etag
self.client.request('DELETE', edit_link, http_request=http_request)
def suite():
return conf.build_suite([ContactsTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import getpass
import re
import unittest
import urllib
import atom
import gdata.contacts.service
import gdata.test_config as conf
conf.options.register_option(conf.TEST_IMAGE_LOCATION_OPTION)
class ContactsServiceTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.contacts.service.ContactsService()
conf.configure_service(self.gd_client, 'ContactsServiceTest', 'cp')
self.gd_client.email = conf.options.get_value('username')
def tearDown(self):
conf.close_service(self.gd_client)
def testGetContactsFeed(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_service_cache(self.gd_client, 'testGetContactsFeed')
feed = self.gd_client.GetContactsFeed()
self.assert_(isinstance(feed, gdata.contacts.ContactsFeed))
def testDefaultContactList(self):
self.assertEquals('default', self.gd_client.contact_list)
def testCustomContactList(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_service_cache(self.gd_client, 'testCustomContactList')
self.gd_client.contact_list = conf.options.get_value('username')
feed = self.gd_client.GetContactsFeed()
self.assert_(isinstance(feed, gdata.contacts.ContactsFeed))
def testGetFeedUriDefault(self):
self.gd_client.contact_list = 'domain.com'
self.assertEquals('/m8/feeds/contacts/domain.com/full',
self.gd_client.GetFeedUri())
def testGetFeedUriCustom(self):
uri = self.gd_client.GetFeedUri(kind='groups',
contact_list='example.com',
projection='base/batch',
scheme='https')
self.assertEquals(
'https://www.google.com/m8/feeds/groups/example.com/base/batch', uri)
def testCreateUpdateDeleteContactAndUpdatePhoto(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_service_cache(self.gd_client, 'testCreateUpdateDeleteContactAndUpdatePhoto')
DeleteTestContact(self.gd_client)
# Create a new entry
new_entry = gdata.contacts.ContactEntry()
new_entry.title = atom.Title(text='Elizabeth Bennet')
new_entry.content = atom.Content(text='Test Notes')
new_entry.email.append(gdata.contacts.Email(
rel='http://schemas.google.com/g/2005#work',
primary='true',
address='liz@gmail.com'))
new_entry.phone_number.append(gdata.contacts.PhoneNumber(
rel='http://schemas.google.com/g/2005#work', text='(206)555-1212'))
new_entry.organization = gdata.contacts.Organization(
org_name=gdata.contacts.OrgName(text='TestCo.'),
rel='http://schemas.google.com/g/2005#work')
entry = self.gd_client.CreateContact(new_entry)
# Generate and parse the XML for the new entry.
self.assertEquals(entry.title.text, new_entry.title.text)
self.assertEquals(entry.content.text, 'Test Notes')
self.assertEquals(len(entry.email), 1)
self.assertEquals(entry.email[0].rel, new_entry.email[0].rel)
self.assertEquals(entry.email[0].address, 'liz@gmail.com')
self.assertEquals(len(entry.phone_number), 1)
self.assertEquals(entry.phone_number[0].rel,
new_entry.phone_number[0].rel)
self.assertEquals(entry.phone_number[0].text, '(206)555-1212')
self.assertEquals(entry.organization.org_name.text, 'TestCo.')
# Edit the entry.
entry.phone_number[0].text = '(555)555-1212'
updated = self.gd_client.UpdateContact(entry.GetEditLink().href, entry)
self.assertEquals(updated.content.text, 'Test Notes')
self.assertEquals(len(updated.phone_number), 1)
self.assertEquals(updated.phone_number[0].rel,
entry.phone_number[0].rel)
self.assertEquals(updated.phone_number[0].text, '(555)555-1212')
# Change the contact's photo.
updated_photo = self.gd_client.ChangePhoto(
conf.options.get_value('imgpath'), updated,
content_type='image/jpeg')
# Refetch the contact so that it has the new photo link
updated = self.gd_client.GetContact(updated.GetSelfLink().href)
self.assert_(updated.GetPhotoLink() is not None)
# Fetch the photo data.
hosted_image = self.gd_client.GetPhoto(updated)
self.assert_(hosted_image is not None)
# Delete the entry.
self.gd_client.DeleteContact(updated.GetEditLink().href)
def testCreateAndDeleteContactUsingBatch(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_service_cache(self.gd_client, 'testCreateAndDeleteContactUsingBatch')
# Get random data for creating contact
random_contact_number = 'notRandom12'
random_contact_title = 'Random Contact %s' % (
random_contact_number)
# Set contact data
contact = gdata.contacts.ContactEntry()
contact.title = atom.Title(text=random_contact_title)
contact.email = gdata.contacts.Email(
address='user%s@example.com' % random_contact_number,
primary='true',
rel=gdata.contacts.REL_WORK)
contact.content = atom.Content(text='Contact created by '
'gdata-python-client automated test '
'suite.')
# Form a batch request
batch_request = gdata.contacts.ContactsFeed()
batch_request.AddInsert(entry=contact)
# Execute the batch request to insert the contact.
default_batch_url = gdata.contacts.service.DEFAULT_BATCH_URL
batch_result = self.gd_client.ExecuteBatch(batch_request,
default_batch_url)
self.assertEquals(len(batch_result.entry), 1)
self.assertEquals(batch_result.entry[0].title.text,
random_contact_title)
self.assertEquals(batch_result.entry[0].batch_operation.type,
gdata.BATCH_INSERT)
self.assertEquals(batch_result.entry[0].batch_status.code,
'201')
expected_batch_url = re.compile('default').sub(
urllib.quote(self.gd_client.email),
gdata.contacts.service.DEFAULT_BATCH_URL)
self.failUnless(batch_result.GetBatchLink().href,
expected_batch_url)
# Create a batch request to delete the newly created entry.
batch_delete_request = gdata.contacts.ContactsFeed()
batch_delete_request.AddDelete(entry=batch_result.entry[0])
batch_delete_result = self.gd_client.ExecuteBatch(
batch_delete_request,
batch_result.GetBatchLink().href)
self.assertEquals(len(batch_delete_result.entry), 1)
self.assertEquals(batch_delete_result.entry[0].batch_operation.type,
gdata.BATCH_DELETE)
self.assertEquals(batch_result.entry[0].batch_status.code,
'201')
def testCleanUriNeedsCleaning(self):
self.assertEquals('/relative/uri', self.gd_client._CleanUri(
'http://www.google.com/relative/uri'))
def testCleanUriDoesNotNeedCleaning(self):
self.assertEquals('/relative/uri', self.gd_client._CleanUri(
'/relative/uri'))
class ContactsQueryTest(unittest.TestCase):
def testConvertToStringDefaultFeed(self):
query = gdata.contacts.service.ContactsQuery()
self.assertEquals(str(query), '/m8/feeds/contacts/default/full')
query.max_results = 10
self.assertEquals(query.ToUri(),
'/m8/feeds/contacts/default/full?max-results=10')
def testConvertToStringCustomFeed(self):
query = gdata.contacts.service.ContactsQuery('/custom/feed/uri')
self.assertEquals(str(query), '/custom/feed/uri')
query.max_results = '10'
self.assertEquals(query.ToUri(), '/custom/feed/uri?max-results=10')
def testGroupQueryParameter(self):
query = gdata.contacts.service.ContactsQuery()
query.group = 'http://google.com/m8/feeds/groups/liz%40gmail.com/full/270f'
self.assertEquals(query.ToUri(), '/m8/feeds/contacts/default/full'
'?group=http%3A%2F%2Fgoogle.com%2Fm8%2Ffeeds%2Fgroups'
'%2Fliz%2540gmail.com%2Ffull%2F270f')
class ContactsGroupsTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.contacts.service.ContactsService()
conf.configure_service(self.gd_client, 'ContactsServiceTest', 'cp')
def tearDown(self):
conf.close_service(self.gd_client)
def testCreateUpdateDeleteGroup(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_service_cache(self.gd_client,
'testCreateUpdateDeleteGroup')
test_group = gdata.contacts.GroupEntry(title=atom.Title(
text='test group py'))
new_group = self.gd_client.CreateGroup(test_group)
self.assert_(isinstance(new_group, gdata.contacts.GroupEntry))
self.assertEquals(new_group.title.text, 'test group py')
# Change the group's title
new_group.title.text = 'new group name py'
updated_group = self.gd_client.UpdateGroup(new_group.GetEditLink().href,
new_group)
self.assertEquals(updated_group.title.text, new_group.title.text)
# Remove the group
self.gd_client.DeleteGroup(updated_group.GetEditLink().href)
# Utility methods.
def DeleteTestContact(client):
# Get test contact
feed = client.GetContactsFeed()
for entry in feed.entry:
if (entry.title.text == 'Elizabeth Bennet' and
entry.content.text == 'Test Notes' and
entry.email[0].address == 'liz@gmail.com'):
client.DeleteContact(entry.GetEditLink().href)
def suite():
return unittest.TestSuite((unittest.makeSuite(ContactsServiceTest, 'test'),
unittest.makeSuite(ContactsQueryTest, 'test'),
unittest.makeSuite(ContactsGroupsTest, 'test'),))
if __name__ == '__main__':
print ('Contacts Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.')
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import atom.core
import atom.data
import atom.http_core
import gdata.contacts.client
import gdata.data
import gdata.test_config as conf
import unittest
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
conf.options.register_option(conf.TARGET_USERNAME_OPTION)
class ProfileTest(unittest.TestCase):
def setUp(self):
self.client = gdata.contacts.client.ContactsClient(domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.contacts.client.ContactsClient(
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'ProfileTest',
self.client.auth_service, True)
self.client.username = conf.options.get_value('appsusername').split('@')[0]
def tearDown(self):
conf.close_client(self.client)
def test_profiles_feed(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_profiles_feed')
feed = self.client.get_profiles_feed()
self.assert_(isinstance(feed, gdata.contacts.data.ProfilesFeed))
def test_profiles_query(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_profiles_feed')
query = gdata.contacts.client.ProfilesQuery(max_results=1)
feed = self.client.get_profiles_feed(q=query)
self.assert_(isinstance(feed, gdata.contacts.data.ProfilesFeed))
self.assert_(len(feed.entry) == 1)
# Needs at least 2 profiles in the feed to test the start-key
# query param.
next = feed.GetNextLink()
feed = None
if next:
# Retrieve the start-key query param from the next link.
uri = atom.http_core.Uri.parse_uri(next.href)
if 'start-key' in uri.query:
query.start_key = uri.query['start-key']
feed = self.client.get_profiles_feed(q=query)
self.assert_(isinstance(feed, gdata.contacts.data.ProfilesFeed))
self.assert_(len(feed.entry) == 1)
self.assert_(feed.GetSelfLink().href == next.href)
# Compare with a feed retrieved with the next link.
next_feed = self.client.get_profiles_feed(uri=next.href)
self.assert_(len(next_feed.entry) == 1)
self.assert_(next_feed.entry[0].id.text == feed.entry[0].id.text)
def suite():
return conf.build_suite([ProfileTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains Unit Tests for Google Profiles API.
ProfilesServiceTest: Provides methods to test feeds and manipulate items.
ProfilesQueryTest: Constructs a query object for the profiles feed.
Extends Query.
"""
__author__ = 'jtoledo (Julian Toledo)'
import getopt
import getpass
import sys
import unittest
import gdata.contacts
import gdata.contacts.service
email = ''
password = ''
domain = ''
server = 'www.google.com'
GDATA_VER_HEADER = 'GData-Version'
class ProfilesServiceTest(unittest.TestCase):
def setUp(self):
additional_headers = {GDATA_VER_HEADER: 3}
self.gd_client = gdata.contacts.service.ContactsService(
contact_list=domain, additional_headers=additional_headers )
self.gd_client.email = email
self.gd_client.password = password
self.gd_client.source = 'GoogleInc-ProfilesPythonTest-1'
self.gd_client.ProgrammaticLogin()
def testGetFeedUriCustom(self):
uri = self.gd_client.GetFeedUri(kind='profiles', scheme='https')
self.assertEquals(
'https://%s/m8/feeds/profiles/domain/%s/full' % (server, domain), uri)
def testGetProfileFeedUriDefault(self):
self.gd_client.contact_list = 'domain.com'
self.assertEquals('/m8/feeds/profiles/domain/domain.com/full',
self.gd_client.GetFeedUri('profiles'))
def testCleanUriNeedsCleaning(self):
self.assertEquals('/relative/uri', self.gd_client._CleanUri(
'http://www.google.com/relative/uri'))
def testCleanUriDoesNotNeedCleaning(self):
self.assertEquals('/relative/uri', self.gd_client._CleanUri(
'/relative/uri'))
def testGetProfilesFeed(self):
feed = self.gd_client.GetProfilesFeed()
self.assert_(isinstance(feed, gdata.contacts.ProfilesFeed))
def testGetProfile(self):
# Gets an existing entry
feed = self.gd_client.GetProfilesFeed()
entry = feed.entry[0]
self.assert_(isinstance(entry, gdata.contacts.ProfileEntry))
self.assertEquals(entry.title.text,
self.gd_client.GetProfile(entry.id.text).title.text)
self.assertEquals(entry._children,
self.gd_client.GetProfile(entry.id.text)._children)
def testUpdateProfile(self):
feed = self.gd_client.GetProfilesFeed()
entry = feed.entry[1]
original_occupation = entry.occupation
entry.occupation = gdata.contacts.Occupation(text='TEST')
updated = self.gd_client.UpdateProfile(entry.GetEditLink().href, entry)
self.assertEquals('TEST', updated.occupation.text)
updated.occupation = original_occupation
self.gd_client.UpdateProfile(updated.GetEditLink().href, updated)
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=', 'domain='])
except getopt.error, msg:
print ('Profiles Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.\n'
'\nUsage: service_test.py --email=EMAIL '
'--password=PASSWORD --domain=DOMAIN\n')
sys.exit(2)
# Process options
for option, arg in opts:
if option == '--email':
email = arg
elif option == '--pw':
password = arg
elif option == '--domain':
domain = arg
while not email:
print 'NOTE: Please run these tests only with a test account.'
email = raw_input('Please enter your email: ')
while not password:
password = getpass.getpass('Please enter password: ')
if not password:
print 'Password cannot be blank.'
while not domain:
print 'NOTE: Please run these tests only with a test account.'
domain = raw_input('Please enter your Apps domain: ')
suite = unittest.makeSuite(ProfilesServiceTest)
unittest.TextTestRunner().run(suite)
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeffrey Scudder)'
import unittest
from gdata import test_data
import atom
import gdata.contacts
class ContactEntryTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.contacts.ContactEntryFromString(test_data.NEW_CONTACT)
def testParsingTestEntry(self):
self.assertEquals(self.entry.title.text, 'Fitzgerald')
self.assertEquals(len(self.entry.email), 2)
for email in self.entry.email:
if email.rel == 'http://schemas.google.com/g/2005#work':
self.assertEquals(email.address, 'liz@gmail.com')
elif email.rel == 'http://schemas.google.com/g/2005#home':
self.assertEquals(email.address, 'liz@example.org')
self.assertEquals(len(self.entry.phone_number), 3)
self.assertEquals(len(self.entry.postal_address), 1)
self.assertEquals(self.entry.postal_address[0].primary, 'true')
self.assertEquals(self.entry.postal_address[0].text,
'1600 Amphitheatre Pkwy Mountain View')
self.assertEquals(len(self.entry.im), 1)
self.assertEquals(len(self.entry.group_membership_info), 1)
self.assertEquals(self.entry.group_membership_info[0].href,
'http://google.com/m8/feeds/groups/liz%40gmail.com/base/270f')
self.assertEquals(self.entry.group_membership_info[0].deleted, 'false')
self.assertEquals(len(self.entry.extended_property), 2)
self.assertEquals(self.entry.extended_property[0].name, 'pet')
self.assertEquals(self.entry.extended_property[0].value, 'hamster')
self.assertEquals(self.entry.extended_property[1].name, 'cousine')
self.assertEquals(
self.entry.extended_property[1].GetXmlBlobExtensionElement().tag,
'italian')
def testToAndFromString(self):
copied_entry = gdata.contacts.ContactEntryFromString(str(self.entry))
self.assertEquals(copied_entry.title.text, 'Fitzgerald')
self.assertEquals(len(copied_entry.email), 2)
for email in copied_entry.email:
if email.rel == 'http://schemas.google.com/g/2005#work':
self.assertEquals(email.address, 'liz@gmail.com')
elif email.rel == 'http://schemas.google.com/g/2005#home':
self.assertEquals(email.address, 'liz@example.org')
self.assertEquals(len(copied_entry.phone_number), 3)
self.assertEquals(len(copied_entry.postal_address), 1)
self.assertEquals(copied_entry.postal_address[0].primary, 'true')
self.assertEquals(copied_entry.postal_address[0].text,
'1600 Amphitheatre Pkwy Mountain View')
self.assertEquals(len(copied_entry.im), 1)
self.assertEquals(len(copied_entry.group_membership_info), 1)
self.assertEquals(copied_entry.group_membership_info[0].href,
'http://google.com/m8/feeds/groups/liz%40gmail.com/base/270f')
self.assertEquals(copied_entry.group_membership_info[0].deleted, 'false')
self.assertEquals(len(copied_entry.extended_property), 2)
self.assertEquals(copied_entry.extended_property[0].name, 'pet')
self.assertEquals(copied_entry.extended_property[0].value, 'hamster')
self.assertEquals(copied_entry.extended_property[1].name, 'cousine')
self.assertEquals(
copied_entry.extended_property[1].GetXmlBlobExtensionElement().tag,
'italian')
def testCreateContactFromScratch(self):
# Create a new entry
new_entry = gdata.contacts.ContactEntry()
new_entry.title = atom.Title(text='Elizabeth Bennet')
new_entry.content = atom.Content(text='Test Notes')
new_entry.email.append(gdata.contacts.Email(
rel='http://schemas.google.com/g/2005#work',
address='liz@gmail.com'))
new_entry.phone_number.append(gdata.contacts.PhoneNumber(
rel='http://schemas.google.com/g/2005#work', text='(206)555-1212'))
new_entry.organization = gdata.contacts.Organization(
org_name=gdata.contacts.OrgName(text='TestCo.'))
new_entry.extended_property.append(gdata.ExtendedProperty(name='test',
value='1234'))
new_entry.birthday = gdata.contacts.Birthday(when='2009-7-23')
sports_property = gdata.ExtendedProperty(name='sports')
sports_property.SetXmlBlob('<dance><salsa/><ballroom_dancing/></dance>')
new_entry.extended_property.append(sports_property)
# Generate and parse the XML for the new entry.
entry_copy = gdata.contacts.ContactEntryFromString(str(new_entry))
self.assertEquals(entry_copy.title.text, new_entry.title.text)
self.assertEquals(entry_copy.content.text, 'Test Notes')
self.assertEquals(len(entry_copy.email), 1)
self.assertEquals(entry_copy.email[0].rel, new_entry.email[0].rel)
self.assertEquals(entry_copy.email[0].address, 'liz@gmail.com')
self.assertEquals(len(entry_copy.phone_number), 1)
self.assertEquals(entry_copy.phone_number[0].rel,
new_entry.phone_number[0].rel)
self.assertEquals(entry_copy.birthday.when, '2009-7-23')
self.assertEquals(entry_copy.phone_number[0].text, '(206)555-1212')
self.assertEquals(entry_copy.organization.org_name.text, 'TestCo.')
self.assertEquals(len(entry_copy.extended_property), 2)
self.assertEquals(entry_copy.extended_property[0].name, 'test')
self.assertEquals(entry_copy.extended_property[0].value, '1234')
class ContactsFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.contacts.ContactsFeedFromString(test_data.CONTACTS_FEED)
def testParsingTestFeed(self):
self.assertEquals(self.feed.id.text,
'http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base')
self.assertEquals(self.feed.title.text, 'Contacts')
self.assertEquals(self.feed.total_results.text, '1')
self.assertEquals(len(self.feed.entry), 1)
self.assert_(isinstance(self.feed.entry[0], gdata.contacts.ContactEntry))
self.assertEquals(self.feed.entry[0].GetPhotoLink().href,
'http://google.com/m8/feeds/photos/media/liz%40gmail.com/c9012de')
self.assertEquals(self.feed.entry[0].GetPhotoEditLink().href,
'http://www.google.com/m8/feeds/photos/media/liz%40gmail.com/'
'c9012de/photo4524')
def testToAndFromString(self):
copied_feed = gdata.contacts.ContactsFeedFromString(str(self.feed))
self.assertEquals(copied_feed.id.text,
'http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base')
self.assertEquals(copied_feed.title.text, 'Contacts')
self.assertEquals(copied_feed.total_results.text, '1')
self.assertEquals(len(copied_feed.entry), 1)
self.assert_(isinstance(copied_feed.entry[0], gdata.contacts.ContactEntry))
class GroupsFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.contacts.GroupsFeedFromString(
test_data.CONTACT_GROUPS_FEED)
def testParsingGroupsFeed(self):
self.assertEquals(self.feed.id.text, 'jo@gmail.com')
self.assertEquals(self.feed.title.text, 'Jo\'s Contact Groups')
self.assertEquals(self.feed.total_results.text, '3')
self.assertEquals(len(self.feed.entry), 1)
self.assert_(isinstance(self.feed.entry[0], gdata.contacts.GroupEntry))
class GroupEntryTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.contacts.GroupEntryFromString(
test_data.CONTACT_GROUP_ENTRY)
def testParsingTestEntry(self):
self.assertEquals(self.entry.title.text, 'Salsa group')
self.assertEquals(len(self.entry.extended_property), 1)
self.assertEquals(self.entry.extended_property[0].name,
'more info about the group')
self.assertEquals(
self.entry.extended_property[0].GetXmlBlobExtensionElement().namespace,
atom.ATOM_NAMESPACE)
self.assertEquals(
self.entry.extended_property[0].GetXmlBlobExtensionElement().tag,
'info')
self.assertEquals(
self.entry.extended_property[0].GetXmlBlobExtensionElement().text,
'Very nice people.')
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.gauth
import atom.http_core
import gdata.test_config as conf
PRIVATE_TEST_KEY = """
-----BEGIN PRIVATE KEY-----
MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALRiMLAh9iimur8V
A7qVvdqxevEuUkW4K+2KdMXmnQbG9Aa7k7eBjK1S+0LYmVjPKlJGNXHDGuy5Fw/d
7rjVJ0BLB+ubPK8iA/Tw3hLQgXMRRGRXXCn8ikfuQfjUS1uZSatdLB81mydBETlJ
hI6GH4twrbDJCR2Bwy/XWXgqgGRzAgMBAAECgYBYWVtleUzavkbrPjy0T5FMou8H
X9u2AC2ry8vD/l7cqedtwMPp9k7TubgNFo+NGvKsl2ynyprOZR1xjQ7WgrgVB+mm
uScOM/5HVceFuGRDhYTCObE+y1kxRloNYXnx3ei1zbeYLPCHdhxRYW7T0qcynNmw
rn05/KO2RLjgQNalsQJBANeA3Q4Nugqy4QBUCEC09SqylT2K9FrrItqL2QKc9v0Z
zO2uwllCbg0dwpVuYPYXYvikNHHg+aCWF+VXsb9rpPsCQQDWR9TT4ORdzoj+Nccn
qkMsDmzt0EfNaAOwHOmVJ2RVBspPcxt5iN4HI7HNeG6U5YsFBb+/GZbgfBT3kpNG
WPTpAkBI+gFhjfJvRw38n3g/+UeAkwMI2TJQS4n8+hid0uus3/zOjDySH3XHCUno
cn1xOJAyZODBo47E+67R4jV1/gzbAkEAklJaspRPXP877NssM5nAZMU0/O/NGCZ+
3jPgDUno6WbJn5cqm8MqWhW1xGkImgRk+fkDBquiq4gPiT898jusgQJAd5Zrr6Q8
AO/0isr/3aa6O6NLQxISLKcPDk2NOccAfS/xOtfOz4sJYM3+Bs4Io9+dZGSDCA54
Lw03eHTNQghS0A==
-----END PRIVATE KEY-----"""
class AuthSubTest(unittest.TestCase):
def test_generate_request_url(self):
url = gdata.gauth.generate_auth_sub_url('http://example.com',
['http://example.net/scope1'])
self.assert_(isinstance(url, atom.http_core.Uri))
self.assertEqual(url.query['secure'], '0')
self.assertEqual(url.query['session'], '1')
self.assertEqual(url.query['scope'], 'http://example.net/scope1')
self.assertEqual(atom.http_core.Uri.parse_uri(
url.query['next']).query['auth_sub_scopes'],
'http://example.net/scope1')
self.assertEqual(atom.http_core.Uri.parse_uri(url.query['next']).path,
'/')
self.assertEqual(atom.http_core.Uri.parse_uri(url.query['next']).host,
'example.com')
def test_from_url(self):
token_str = gdata.gauth.auth_sub_string_from_url(
'http://example.com/?token=123abc')[0]
self.assertEqual(token_str, '123abc')
def test_from_http_body(self):
token_str = gdata.gauth.auth_sub_string_from_body('Something\n'
'Token=DQAA...7DCTN\n'
'Expiration=20061004T123456Z\n')
self.assertEqual(token_str, 'DQAA...7DCTN')
def test_modify_request(self):
token = gdata.gauth.AuthSubToken('tval')
request = atom.http_core.HttpRequest()
token.modify_request(request)
self.assertEqual(request.headers['Authorization'], 'AuthSub token=tval')
def test_create_and_upgrade_tokens(self):
token = gdata.gauth.AuthSubToken.from_url(
'http://example.com/?token=123abc')
self.assert_(isinstance(token, gdata.gauth.AuthSubToken))
self.assertEqual(token.token_string, '123abc')
self.assertEqual(token.scopes, [])
token._upgrade_token('Token=456def')
self.assertEqual(token.token_string, '456def')
self.assertEqual(token.scopes, [])
class SecureAuthSubTest(unittest.TestCase):
def test_build_data(self):
request = atom.http_core.HttpRequest(method='PUT')
request.uri = atom.http_core.Uri.parse_uri('http://example.com/foo?a=1')
data = gdata.gauth.build_auth_sub_data(request, 1234567890, 'mynonce')
self.assertEqual(data,
'PUT http://example.com/foo?a=1 1234567890 mynonce')
def test_generate_signature(self):
request = atom.http_core.HttpRequest(
method='GET', uri=atom.http_core.Uri(host='example.com', path='/foo',
query={'a': '1'}))
data = gdata.gauth.build_auth_sub_data(request, 1134567890, 'p234908')
self.assertEqual(data,
'GET http://example.com/foo?a=1 1134567890 p234908')
self.assertEqual(
gdata.gauth.generate_signature(data, PRIVATE_TEST_KEY),
'GeBfeIDnT41dvLquPgDB4U5D4hfxqaHk/5LX1kccNBnL4BjsHWU1djbEp7xp3BL9ab'
'QtLrK7oa/aHEHtGRUZGg87O+ND8iDPR76WFXAruuN8O8GCMqCDdPduNPY++LYO4MdJ'
'BZNY974Nn0m6Hc0/T4M1ElqvPhl61fkXMm+ElSM=')
class TokensToAndFromBlobsTest(unittest.TestCase):
def test_client_login_conversion(self):
token = gdata.gauth.ClientLoginToken('test|key')
copy = gdata.gauth.token_from_blob(gdata.gauth.token_to_blob(token))
self.assertEqual(token.token_string, copy.token_string)
self.assert_(isinstance(copy, gdata.gauth.ClientLoginToken))
def test_authsub_conversion(self):
token = gdata.gauth.AuthSubToken('test|key')
copy = gdata.gauth.token_from_blob(gdata.gauth.token_to_blob(token))
self.assertEqual(token.token_string, copy.token_string)
self.assert_(isinstance(copy, gdata.gauth.AuthSubToken))
scopes = ['http://example.com', 'http://other||test', 'thir|d']
token = gdata.gauth.AuthSubToken('key-=', scopes)
copy = gdata.gauth.token_from_blob(gdata.gauth.token_to_blob(token))
self.assertEqual(token.token_string, copy.token_string)
self.assert_(isinstance(copy, gdata.gauth.AuthSubToken))
self.assertEqual(token.scopes, scopes)
def test_join_and_split(self):
token_string = gdata.gauth._join_token_parts('1x', 'test|string', '%x%',
'', None)
self.assertEqual(token_string, '1x|test%7Cstring|%25x%25||')
token_type, a, b, c, d = gdata.gauth._split_token_parts(token_string)
self.assertEqual(token_type, '1x')
self.assertEqual(a, 'test|string')
self.assertEqual(b, '%x%')
self.assert_(c is None)
self.assert_(d is None)
def test_secure_authsub_conversion(self):
token = gdata.gauth.SecureAuthSubToken(
'%^%', 'myRsaKey', ['http://example.com', 'http://example.org'])
copy = gdata.gauth.token_from_blob(gdata.gauth.token_to_blob(token))
self.assertEqual(copy.token_string, '%^%')
self.assertEqual(copy.rsa_private_key, 'myRsaKey')
self.assertEqual(copy.scopes,
['http://example.com', 'http://example.org'])
token = gdata.gauth.SecureAuthSubToken(rsa_private_key='f',
token_string='b')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1s|b|f')
copy = gdata.gauth.token_from_blob(blob)
self.assertEqual(copy.token_string, 'b')
self.assertEqual(copy.rsa_private_key, 'f')
self.assertEqual(copy.scopes, [])
token = gdata.gauth.SecureAuthSubToken(None, '')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1s||')
copy = gdata.gauth.token_from_blob(blob)
self.assertEqual(copy.token_string, None)
self.assertEqual(copy.rsa_private_key, None)
self.assertEqual(copy.scopes, [])
token = gdata.gauth.SecureAuthSubToken('', None)
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1s||')
copy = gdata.gauth.token_from_blob(blob)
self.assertEqual(copy.token_string, None)
self.assertEqual(copy.rsa_private_key, None)
self.assertEqual(copy.scopes, [])
token = gdata.gauth.SecureAuthSubToken(
None, None, ['http://example.net', 'http://google.com'])
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '1s|||http%3A%2F%2Fexample.net|http%3A%2F%2Fgoogle.com')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(copy.token_string is None)
self.assert_(copy.rsa_private_key is None)
self.assertEqual(copy.scopes, ['http://example.net', 'http://google.com'])
def test_oauth_rsa_conversion(self):
token = gdata.gauth.OAuthRsaToken(
'consumerKey', 'myRsa', 't', 'secret',
gdata.gauth.AUTHORIZED_REQUEST_TOKEN, 'http://example.com/next',
'verifier')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '1r|consumerKey|myRsa|t|secret|2|http%3A%2F%2Fexample.com'
'%2Fnext|verifier')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuthRsaToken))
self.assertEqual(copy.consumer_key, token.consumer_key)
self.assertEqual(copy.rsa_private_key, token.rsa_private_key)
self.assertEqual(copy.token, token.token)
self.assertEqual(copy.token_secret, token.token_secret)
self.assertEqual(copy.auth_state, token.auth_state)
self.assertEqual(copy.next, token.next)
self.assertEqual(copy.verifier, token.verifier)
token = gdata.gauth.OAuthRsaToken(
'', 'myRsa', 't', 'secret', 0)
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1r||myRsa|t|secret|0||')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuthRsaToken))
self.assert_(copy.consumer_key != token.consumer_key)
self.assert_(copy.consumer_key is None)
self.assertEqual(copy.rsa_private_key, token.rsa_private_key)
self.assertEqual(copy.token, token.token)
self.assertEqual(copy.token_secret, token.token_secret)
self.assertEqual(copy.auth_state, token.auth_state)
self.assertEqual(copy.next, token.next)
self.assert_(copy.next is None)
self.assertEqual(copy.verifier, token.verifier)
self.assert_(copy.verifier is None)
token = gdata.gauth.OAuthRsaToken(
rsa_private_key='myRsa', token='t', token_secret='secret',
auth_state=gdata.gauth.ACCESS_TOKEN, verifier='v', consumer_key=None)
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1r||myRsa|t|secret|3||v')
copy = gdata.gauth.token_from_blob(blob)
self.assertEqual(copy.consumer_key, token.consumer_key)
self.assert_(copy.consumer_key is None)
self.assertEqual(copy.rsa_private_key, token.rsa_private_key)
self.assertEqual(copy.token, token.token)
self.assertEqual(copy.token_secret, token.token_secret)
self.assertEqual(copy.auth_state, token.auth_state)
self.assertEqual(copy.next, token.next)
self.assert_(copy.next is None)
self.assertEqual(copy.verifier, token.verifier)
def test_oauth_hmac_conversion(self):
token = gdata.gauth.OAuthHmacToken(
'consumerKey', 'consumerSecret', 't', 'secret',
gdata.gauth.REQUEST_TOKEN, 'http://example.com/next', 'verifier')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '1h|consumerKey|consumerSecret|t|secret|1|http%3A%2F%2F'
'example.com%2Fnext|verifier')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuthHmacToken))
self.assertEqual(copy.consumer_key, token.consumer_key)
self.assertEqual(copy.consumer_secret, token.consumer_secret)
self.assertEqual(copy.token, token.token)
self.assertEqual(copy.token_secret, token.token_secret)
self.assertEqual(copy.auth_state, token.auth_state)
self.assertEqual(copy.next, token.next)
self.assertEqual(copy.verifier, token.verifier)
token = gdata.gauth.OAuthHmacToken(
consumer_secret='c,s', token='t', token_secret='secret',
auth_state=7, verifier='v', consumer_key=None)
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(blob, '1h||c%2Cs|t|secret|7||v')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuthHmacToken))
self.assertEqual(copy.consumer_key, token.consumer_key)
self.assert_(copy.consumer_key is None)
self.assertEqual(copy.consumer_secret, token.consumer_secret)
self.assertEqual(copy.token, token.token)
self.assertEqual(copy.token_secret, token.token_secret)
self.assertEqual(copy.auth_state, token.auth_state)
self.assertEqual(copy.next, token.next)
self.assert_(copy.next is None)
self.assertEqual(copy.verifier, token.verifier)
def test_oauth2_conversion(self):
token = gdata.gauth.OAuth2Token(
'clientId', 'clientSecret', 'https://www.google.com/calendar/feeds',
'userAgent', 'https://accounts.google.com/o/oauth2/auth',
'https://accounts.google.com/o/oauth2/token',
'accessToken', 'refreshToken')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '2o|clientId|clientSecret|https%3A%2F%2Fwww.google.com%2F'
'calendar%2Ffeeds|userAgent|https%3A%2F%2Faccounts.google.com%2F'
'o%2Foauth2%2Fauth|https%3A%2F%2Faccounts.google.com%2Fo%2Foauth2'
'%2Ftoken|accessToken|refreshToken')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuth2Token))
self.assertEqual(copy.client_id, token.client_id)
self.assertEqual(copy.client_secret, token.client_secret)
self.assertEqual(copy.scope, token.scope)
self.assertEqual(copy.user_agent, token.user_agent)
self.assertEqual(copy.auth_uri, token.auth_uri)
self.assertEqual(copy.token_uri, token.token_uri)
self.assertEqual(copy.access_token, token.access_token)
self.assertEqual(copy.refresh_token, token.refresh_token)
token = gdata.gauth.OAuth2Token(
'clientId', 'clientSecret', 'https://www.google.com/calendar/feeds',
'', 'https://accounts.google.com/o/oauth2/auth',
'https://accounts.google.com/o/oauth2/token',
'', '')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '2o|clientId|clientSecret|https%3A%2F%2Fwww.google.com%2F'
'calendar%2Ffeeds||https%3A%2F%2Faccounts.google.com%2F'
'o%2Foauth2%2Fauth|https%3A%2F%2Faccounts.google.com%2Fo%2Foauth2'
'%2Ftoken||')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuth2Token))
self.assertEqual(copy.client_id, token.client_id)
self.assertEqual(copy.client_secret, token.client_secret)
self.assertEqual(copy.scope, token.scope)
self.assert_(copy.user_agent is None)
self.assertEqual(copy.auth_uri, token.auth_uri)
self.assertEqual(copy.token_uri, token.token_uri)
self.assert_(copy.access_token is None)
self.assert_(copy.refresh_token is None)
token = gdata.gauth.OAuth2Token(
'clientId', 'clientSecret', 'https://www.google.com/calendar/feeds',
None, 'https://accounts.google.com/o/oauth2/auth',
'https://accounts.google.com/o/oauth2/token')
blob = gdata.gauth.token_to_blob(token)
self.assertEqual(
blob, '2o|clientId|clientSecret|https%3A%2F%2Fwww.google.com%2F'
'calendar%2Ffeeds||https%3A%2F%2Faccounts.google.com%2F'
'o%2Foauth2%2Fauth|https%3A%2F%2Faccounts.google.com%2Fo%2Foauth2'
'%2Ftoken||')
copy = gdata.gauth.token_from_blob(blob)
self.assert_(isinstance(copy, gdata.gauth.OAuth2Token))
self.assertEqual(copy.client_id, token.client_id)
self.assertEqual(copy.client_secret, token.client_secret)
self.assertEqual(copy.scope, token.scope)
self.assert_(copy.user_agent is None)
self.assertEqual(copy.auth_uri, token.auth_uri)
self.assertEqual(copy.token_uri, token.token_uri)
self.assert_(copy.access_token is None)
self.assert_(copy.refresh_token is None)
def test_illegal_token_types(self):
class MyToken(object):
pass
token = MyToken()
self.assertRaises(gdata.gauth.UnsupportedTokenType,
gdata.gauth.token_to_blob, token)
blob = '~~z'
self.assertRaises(gdata.gauth.UnsupportedTokenType,
gdata.gauth.token_from_blob, blob)
class OAuthHmacTokenTests(unittest.TestCase):
def test_build_base_string(self):
request = atom.http_core.HttpRequest('http://example.com/', 'GET')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.org', '12345', gdata.gauth.HMAC_SHA1, 1246301653,
'1.0')
self.assertEqual(
base_string, 'GET&http%3A%2F%2Fexample.com%2F&oauth_callback%3Doob%2'
'6oauth_consumer_key%3Dexample.org%26oauth_nonce%3D12345%26oauth_sig'
'nature_method%3DHMAC-SHA1%26oauth_timestamp%3D1246301653%26oauth_ve'
'rsion%3D1.0')
# Test using example from documentation.
request = atom.http_core.HttpRequest(
'http://www.google.com/calendar/feeds/default/allcalendars/full'
'?orderby=starttime', 'GET')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.com', '4572616e48616d6d65724c61686176',
gdata.gauth.RSA_SHA1, 137131200, '1.0', token='1%2Fab3cd9j4ks73hf7g',
next='http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(
base_string, 'GET&http%3A%2F%2Fwww.google.com%2Fcalendar%2Ffeeds%2Fd'
'efault%2Fallcalendars%2Ffull&oauth_callback%3Dhttp%253A%252F%252Fgo'
'oglecodesamples.com%252Foauth_playground%252Findex.php%26oauth_cons'
'umer_key%3Dexample.com%26oauth_nonce%3D4572616e48616d6d65724c616861'
'76%26oauth_signature_method%3DRSA-SHA1%26oauth_timestamp%3D13713120'
'0%26oauth_token%3D1%25252Fab3cd9j4ks73hf7g%26oauth_version%3D1.0%26'
'orderby%3Dstarttime')
# Test various defaults.
request = atom.http_core.HttpRequest('http://eXample.COM', 'get')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.org', '12345', gdata.gauth.HMAC_SHA1, 1246301653,
'1.0')
self.assertEqual(
base_string, 'GET&http%3A%2F%2Fexample.com%2F&oauth_callback%3Doob%2'
'6oauth_consumer_key%3Dexample.org%26oauth_nonce%3D12345%26oauth_sig'
'nature_method%3DHMAC-SHA1%26oauth_timestamp%3D1246301653%26oauth_ve'
'rsion%3D1.0')
request = atom.http_core.HttpRequest('https://eXample.COM:443', 'get')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.org', '12345', gdata.gauth.HMAC_SHA1, 1246301653,
'1.0', 'http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(
base_string, 'GET&https%3A%2F%2Fexample.com%2F&oauth_callback%3Dhttp'
'%253A%252F%252Fgooglecodesamples.com%252Foauth_playground%252Findex'
'.php%26oauth_consumer_key%3Dexample.org%26oauth_nonce%3D12345%26oau'
'th_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D1246301653%26oa'
'uth_version%3D1.0')
request = atom.http_core.HttpRequest('http://eXample.COM:443', 'get')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.org', '12345', gdata.gauth.HMAC_SHA1, 1246301653,
'1.0')
self.assertEqual(
base_string, 'GET&http%3A%2F%2Fexample.com%3A443%2F&oauth_callback%3'
'Doob%26oauth_consumer_key%3De'
'xample.org%26oauth_nonce%3D12345%26oauth_signature_method%3DHMAC-SH'
'A1%26oauth_timestamp%3D1246301653%26oauth_version%3D1.0')
request = atom.http_core.HttpRequest(
atom.http_core.Uri(host='eXample.COM'), 'GET')
base_string = gdata.gauth.build_oauth_base_string(
request, 'example.org', '12345', gdata.gauth.HMAC_SHA1, 1246301653,
'1.0', next='oob')
self.assertEqual(
base_string, 'GET&http%3A%2F%2Fexample.com%2F&oauth_callback%3Doob%2'
'6oauth_consumer_key%3Dexample.org%26oauth_nonce%3D12345%26oauth_sig'
'nature_method%3DHMAC-SHA1%26oauth_timestamp%3D1246301653%26oauth_ve'
'rsion%3D1.0')
request = atom.http_core.HttpRequest(
'https://www.google.com/accounts/OAuthGetRequestToken', 'GET')
request.uri.query['scope'] = ('https://docs.google.com/feeds/'
' http://docs.google.com/feeds/')
base_string = gdata.gauth.build_oauth_base_string(
request, 'anonymous', '48522759', gdata.gauth.HMAC_SHA1, 1246489532,
'1.0', 'http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(
base_string, 'GET&https%3A%2F%2Fwww.google.com%2Faccounts%2FOAuthGet'
'RequestToken&oauth_callback%3Dhttp%253A%252F%252Fgooglecodesamples.'
'com%252Foauth_playground%252Findex.php%26oauth_consumer_key%3Danony'
'mous%26oauth_nonce%3D4852275'
'9%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D12464895'
'32%26oauth_version%3D1.0%26scope%3Dhttps%253A%252F%252Fdocs.google.'
'com%252Ffeeds%252F%2520http%253A%252F%252Fdocs.google.com%252Ffeeds'
'%252F')
def test_generate_hmac_signature(self):
# Use the example from the OAuth playground:
# http://googlecodesamples.com/oauth_playground/
request = atom.http_core.HttpRequest(
'https://www.google.com/accounts/OAuthGetRequestToken?'
'scope=http%3A%2F%2Fwww.blogger.com%2Ffeeds%2F', 'GET')
signature = gdata.gauth.generate_hmac_signature(
request, 'anonymous', 'anonymous', '1246491360',
'c0155b3f28697c029e7a62efff44bd46', '1.0',
next='http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(signature, '5a2GPdtAY3LWYv8IdiT3wp1Coeg=')
# Try the same request but with a non escaped Uri object.
request = atom.http_core.HttpRequest(
'https://www.google.com/accounts/OAuthGetRequestToken', 'GET')
request.uri.query['scope'] = 'http://www.blogger.com/feeds/'
signature = gdata.gauth.generate_hmac_signature(
request, 'anonymous', 'anonymous', '1246491360',
'c0155b3f28697c029e7a62efff44bd46', '1.0',
'http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(signature, '5a2GPdtAY3LWYv8IdiT3wp1Coeg=')
# A different request also checked against the OAuth playground.
request = atom.http_core.HttpRequest(
'https://www.google.com/accounts/OAuthGetRequestToken', 'GET')
request.uri.query['scope'] = ('https://www.google.com/analytics/feeds/ '
'http://www.google.com/base/feeds/ '
'http://www.google.com/calendar/feeds/')
signature = gdata.gauth.generate_hmac_signature(
request, 'anonymous', 'anonymous', 1246491797,
'33209c4d7a09be4eb1d6ff18e00f8548', '1.0',
next='http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(signature, 'kFAgTTFDIWz4/xAabIlrcZZMTq8=')
class OAuthRsaTokenTests(unittest.TestCase):
def test_generate_rsa_signature(self):
request = atom.http_core.HttpRequest(
'https://www.google.com/accounts/OAuthGetRequestToken?'
'scope=http%3A%2F%2Fwww.blogger.com%2Ffeeds%2F', 'GET')
signature = gdata.gauth.generate_rsa_signature(
request, 'anonymous', PRIVATE_TEST_KEY, '1246491360',
'c0155b3f28697c029e7a62efff44bd46', '1.0',
next='http://googlecodesamples.com/oauth_playground/index.php')
self.assertEqual(
signature,
'bfMantdttKaTrwoxU87JiXmMeXhAiXPiq79a5XmLlOYwwlX06Pu7CafMp7hW1fPeZtL'
'4o9Sz3NvPI8GECCaZk7n5vi1EJ5/wfIQbddrC8j45joBG6gFSf4tRJct82dSyn6bd71'
'knwPZH1sKK46Y0ePJvEIDI3JDd7pRZuMM2sN8=')
class OAuth2TokenTests(unittest.TestCase):
def test_generate_authorize_url(self):
token = gdata.gauth.OAuth2Token('clientId', 'clientSecret',
'https://www.google.com/calendar/feeds',
'userAgent')
url = token.generate_authorize_url()
self.assertEqual(url,
'https://accounts.google.com/o/oauth2/auth?scope=https%3A%2F%2Fwww.google'
'.com%2Fcalendar%2Ffeeds&redirect_uri=oob&response_type=code&client_id='
'clientId&access_type=offline')
url = token.generate_authorize_url('https://www.example.com/redirect', 'token')
self.assertEqual(url,
'https://accounts.google.com/o/oauth2/auth?scope=https%3A%2F%2Fwww.google'
'.com%2Fcalendar%2Ffeeds&redirect_uri=https%3A%2F%2Fwww.example.com%2F'
'redirect&response_type=token&client_id=clientId&access_type=offline')
url = token.generate_authorize_url(access_type='online')
self.assertEqual(url,
'https://accounts.google.com/o/oauth2/auth?scope=https%3A%2F%2Fwww.google'
'.com%2Fcalendar%2Ffeeds&redirect_uri=oob&response_type=code&client_id='
'clientId&access_type=online')
def test_modify_request(self):
token = gdata.gauth.OAuth2Token('clientId', 'clientSecret',
'https://www.google.com/calendar/feeds',
'userAgent', access_token='accessToken')
request = atom.http_core.HttpRequest()
token.modify_request(request)
self.assertEqual(request.headers['Authorization'], 'OAuth accessToken')
class OAuthHeaderTest(unittest.TestCase):
def test_generate_auth_header(self):
header = gdata.gauth.generate_auth_header(
'consumerkey', 1234567890, 'mynonce', 'unknown_sig_type', 'sig')
self.assert_(header.startswith('OAuth'))
self.assert_(header.find('oauth_nonce="mynonce"') > -1)
self.assert_(header.find('oauth_timestamp="1234567890"') > -1)
self.assert_(header.find('oauth_consumer_key="consumerkey"') > -1)
self.assert_(
header.find('oauth_signature_method="unknown_sig_type"') > -1)
self.assert_(header.find('oauth_version="1.0"') > -1)
self.assert_(header.find('oauth_signature="sig"') > -1)
header = gdata.gauth.generate_auth_header(
'consumer/key', 1234567890, 'ab%&33', '', 'ab/+-_=')
self.assert_(header.find('oauth_nonce="ab%25%2633"') > -1)
self.assert_(header.find('oauth_consumer_key="consumer%2Fkey"') > -1)
self.assert_(header.find('oauth_signature_method=""') > -1)
self.assert_(header.find('oauth_signature="ab%2F%2B-_%3D"') > -1)
class OAuthGetRequestToken(unittest.TestCase):
def test_request_hmac_request_token(self):
request = gdata.gauth.generate_request_for_request_token(
'anonymous', gdata.gauth.HMAC_SHA1,
['http://www.blogger.com/feeds/',
'http://www.google.com/calendar/feeds/'],
consumer_secret='anonymous')
request_uri = str(request.uri)
self.assert_('http%3A%2F%2Fwww.blogger.com%2Ffeeds%2F' in request_uri)
self.assert_(
'http%3A%2F%2Fwww.google.com%2Fcalendar%2Ffeeds%2F' in request_uri)
auth_header = request.headers['Authorization']
self.assert_('oauth_consumer_key="anonymous"' in auth_header)
self.assert_('oauth_signature_method="HMAC-SHA1"' in auth_header)
self.assert_('oauth_version="1.0"' in auth_header)
self.assert_('oauth_signature="' in auth_header)
self.assert_('oauth_nonce="' in auth_header)
self.assert_('oauth_timestamp="' in auth_header)
def test_request_rsa_request_token(self):
request = gdata.gauth.generate_request_for_request_token(
'anonymous', gdata.gauth.RSA_SHA1,
['http://www.blogger.com/feeds/',
'http://www.google.com/calendar/feeds/'],
rsa_key=PRIVATE_TEST_KEY)
request_uri = str(request.uri)
self.assert_('http%3A%2F%2Fwww.blogger.com%2Ffeeds%2F' in request_uri)
self.assert_(
'http%3A%2F%2Fwww.google.com%2Fcalendar%2Ffeeds%2F' in request_uri)
auth_header = request.headers['Authorization']
self.assert_('oauth_consumer_key="anonymous"' in auth_header)
self.assert_('oauth_signature_method="RSA-SHA1"' in auth_header)
self.assert_('oauth_version="1.0"' in auth_header)
self.assert_('oauth_signature="' in auth_header)
self.assert_('oauth_nonce="' in auth_header)
self.assert_('oauth_timestamp="' in auth_header)
def test_extract_token_from_body(self):
body = ('oauth_token=4%2F5bNFM_efIu3yN-E9RrF1KfZzOAZG&oauth_token_secret='
'%2B4O49V9WUOkjXgpOobAtgYzy&oauth_callback_confirmed=true')
token, secret = gdata.gauth.oauth_token_info_from_body(body)
self.assertEqual(token, '4/5bNFM_efIu3yN-E9RrF1KfZzOAZG')
self.assertEqual(secret, '+4O49V9WUOkjXgpOobAtgYzy')
def test_hmac_request_token_from_body(self):
body = ('oauth_token=4%2F5bNFM_efIu3yN-E9RrF1KfZzOAZG&oauth_token_secret='
'%2B4O49V9WUOkjXgpOobAtgYzy&oauth_callback_confirmed=true')
request_token = gdata.gauth.hmac_token_from_body(body, 'myKey',
'mySecret', True)
self.assertEqual(request_token.consumer_key, 'myKey')
self.assertEqual(request_token.consumer_secret, 'mySecret')
self.assertEqual(request_token.token, '4/5bNFM_efIu3yN-E9RrF1KfZzOAZG')
self.assertEqual(request_token.token_secret, '+4O49V9WUOkjXgpOobAtgYzy')
self.assertEqual(request_token.auth_state, gdata.gauth.REQUEST_TOKEN)
def test_rsa_request_token_from_body(self):
body = ('oauth_token=4%2F5bNFM_efIu3yN-E9RrF1KfZzOAZG&oauth_token_secret='
'%2B4O49V9WUOkjXgpOobAtgYzy&oauth_callback_confirmed=true')
request_token = gdata.gauth.rsa_token_from_body(body, 'myKey',
'rsaKey', True)
self.assertEqual(request_token.consumer_key, 'myKey')
self.assertEqual(request_token.rsa_private_key, 'rsaKey')
self.assertEqual(request_token.token, '4/5bNFM_efIu3yN-E9RrF1KfZzOAZG')
self.assertEqual(request_token.token_secret, '+4O49V9WUOkjXgpOobAtgYzy')
self.assertEqual(request_token.auth_state, gdata.gauth.REQUEST_TOKEN)
class OAuthAuthorizeToken(unittest.TestCase):
def test_generate_authorization_url(self):
url = gdata.gauth.generate_oauth_authorization_url('/+=aosdpikk')
self.assert_(str(url).startswith(
'https://www.google.com/accounts/OAuthAuthorizeToken'))
self.assert_('oauth_token=%2F%2B%3Daosdpikk' in str(url))
def test_extract_auth_token(self):
url = ('http://www.example.com/test?oauth_token='
'CKF50YzIHxCT85KMAg&oauth_verifier=123zzz')
token = gdata.gauth.oauth_token_info_from_url(url)
self.assertEqual(token[0], 'CKF50YzIHxCT85KMAg')
self.assertEqual(token[1], '123zzz')
class FindScopesForService(unittest.TestCase):
def test_find_all_scopes(self):
count = 0
for key, scopes in gdata.gauth.AUTH_SCOPES.iteritems():
count += len(scopes)
self.assertEqual(count, len(gdata.gauth.find_scopes_for_services()))
def test_single_service(self):
self.assertEqual(
gdata.gauth.FindScopesForServices(('codesearch',)),
['http://www.google.com/codesearch/feeds/'])
def test_multiple_services(self):
self.assertEqual(
set(gdata.gauth.find_scopes_for_services(('jotspot', 'wise'))),
set(['http://sites.google.com/feeds/',
'https://sites.google.com/feeds/',
'https://spreadsheets.google.com/feeds/']))
def suite():
return conf.build_suite([AuthSubTest, TokensToAndFromBlobsTest,
OAuthHmacTokenTests, OAuthRsaTokenTests,
OAuthHeaderTest, OAuthGetRequestToken,
OAuthAuthorizeToken, FindScopesForService])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
"""Live integration tests of the Google Documents List API.
RESOURCES: Dict of test resource data, keyed on resource type.
"""
__author__ = 'vicfryzel@google.com (Vic Fryzel)'
import os
import os.path
import tempfile
import time
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.docs.client
import gdata.docs.data
import gdata.test_config as conf
RESOURCES = {
'document': (gdata.docs.data.DOCUMENT_LABEL,
'Text Document',
'data/test.0.doc',
'data/test.1.doc',
'application/msword',
'doc'),
'empty_document': (gdata.docs.data.DOCUMENT_LABEL,
'Empty Text Document',
None,
'data/test.1.doc',
'application/msword',
'txt'),
'spreadsheet': (gdata.docs.data.SPREADSHEET_LABEL,
'Spreadsheet',
'data/test.0.csv',
'data/test.1.csv',
'text/csv',
'csv'),
'presentation': (gdata.docs.data.PRESENTATION_LABEL,
'Presentation',
'data/test.0.ppt',
'data/test.1.ppt',
'application/vnd.ms-powerpoint',
'ppt'),
'drawing': (gdata.docs.data.DRAWING_LABEL,
'Drawing',
'data/test.0.wmf',
'data/test.1.wmf',
'application/x-msmetafile',
'png'),
'pdf': (gdata.docs.data.PDF_LABEL,
'PDF',
'data/test.0.pdf',
'data/test.1.pdf',
'application/pdf',
None),
'file': (gdata.docs.data.FILE_LABEL,
'File',
'data/test.0.bin',
'data/test.1.bin',
'application/octet-stream',
None),
'collection': (gdata.docs.data.COLLECTION_LABEL,
'Collection A',
None,
None,
None,
None)
}
class DocsTestCase(unittest.TestCase):
def shortDescription(self):
if hasattr(self, 'resource_type'):
return '%s for %s' % (self.__class__.__name__, self.resource_type)
else:
return self.__class__.__name__
def _delete(self, resource):
try:
self.client.DeleteResource(resource, permanent=True, force=True)
except:
pass
def _delete_all(self):
resources = self.client.GetAllResources(
'/feeds/default/private/full?showfolders=true&showdeleted=true')
for resource in resources:
self._delete(resource)
def _create(self):
ms = None
if self.resource_path is not None and self.resource_mime is not None:
ms = gdata.data.MediaSource(
file_path=os.path.join(os.path.dirname(__file__), self.resource_path),
content_type=self.resource_mime)
entry = gdata.docs.data.Resource(type=self.resource_label,
title=self.resource_title)
self.resource = self.client.CreateResource(entry, media=ms)
def _update(self):
ms = None
if self.resource_alt_path is not None and self.resource_mime is not None:
ms = gdata.data.MediaSource(
file_path=os.path.join(os.path.dirname(__file__),
self.resource_alt_path),
content_type=self.resource_mime)
self.resource.title.text = '%s Updated' % self.resource_title
return self.client.UpdateResource(self.resource, media=ms, force=True,
new_revision=True)
def setUp(self):
if conf.options.get_value('runlive') != 'true':
raise RuntimeError('Live tests require --runlive true')
self.client = gdata.docs.client.DocsClient()
if conf.options.get_value('ssl') == 'false':
self.client.ssl = False
conf.configure_client(self.client, 'DocsTest', self.client.auth_service)
conf.configure_cache(self.client, str(self.__class__))
if conf.options.get_value('clean') == 'true':
self._delete_all()
tries = 0
while tries < 3:
try:
tries += 1
self._create()
break
except gdata.client.RequestError:
if tries >= 2:
self.tearDown()
raise
def tearDown(self):
if conf.options.get_value('runlive') == 'true':
if conf.options.get_value('clean') == 'true':
self._delete_all()
else:
try:
self._delete(self.resource)
except:
pass
conf.close_client(self.client)
class ResourcesTest(DocsTestCase):
def testGetAllResources(self):
results = self.client.GetAllResources(
'/feeds/default/private/full?showfolders=true')
self.assert_(all(isinstance(item, gdata.docs.data.Resource) \
for item in results))
self.assertEqual(len(results), 1)
def testGetResources(self):
feed = self.client.GetResources(
'/feeds/default/private/full?showfolders=true', limit=1)
self.assert_(isinstance(feed, gdata.docs.data.ResourceFeed))
self.assertEqual(len(feed.entry), 1)
def testGetResource(self):
entry = self.client.GetResource(self.resource)
self.assert_(isinstance(entry, gdata.docs.data.Resource))
self.assert_(entry.id.text is not None)
self.assert_(entry.title.text is not None)
self.assert_(entry.resource_id.text is not None)
self.assert_(entry.title.text is not None)
entry = self.client.GetResourceById(self.resource.resource_id.text)
self.assert_(isinstance(entry, gdata.docs.data.Resource))
self.assert_(entry.id.text is not None)
self.assert_(entry.title.text is not None)
self.assert_(entry.resource_id.text is not None)
self.assert_(entry.title.text is not None)
entry = self.client.GetResourceById(
self.resource.resource_id.text.split(':')[1])
self.assert_(isinstance(entry, gdata.docs.data.Resource))
self.assert_(entry.id.text is not None)
self.assert_(entry.title.text is not None)
self.assert_(entry.resource_id.text is not None)
self.assert_(entry.title.text is not None)
entry = self.client.GetResourceBySelfLink(
self.resource.GetSelfLink().href)
self.assert_(isinstance(entry, gdata.docs.data.Resource))
self.assert_(entry.id.text is not None)
self.assert_(entry.title.text is not None)
self.assert_(entry.resource_id.text is not None)
self.assert_(entry.title.text is not None)
def testMoveResource(self):
entry = gdata.docs.data.Resource(
type=gdata.docs.data.COLLECTION_LABEL,
title='Collection B')
collection = self.client.CreateResource(entry)
# Start off in 0 collections
self.assertEqual(len(self.resource.InCollections()), 0)
# Move resource into collection
entry = self.client.MoveResource(self.resource, collection)
self.assertEqual(len(entry.InCollections()), 1)
self.assertEqual(entry.InCollections()[0].title, collection.title.text)
self.client.DeleteResource(collection, permanent=True, force=True)
def testCopyResource(self):
copy_title = '%s Copy' % self.resource_title
# Copy only supported for document, spreadsheet, presentation types
if self.resource_type in ['document', 'empty_document', 'spreadsheet',
'presentation']:
copy = self.client.CopyResource(self.resource, copy_title)
self.assertEqual(copy.title.text, copy_title)
self.client.DeleteResource(copy, permanent=True, force=True)
# TODO(vicfryzel): Expect appropriate error for drawings.
elif self.resource_type != 'drawing':
self.assertRaises(gdata.client.NotImplemented, self.client.CopyResource,
self.resource, copy_title)
def testDownloadResource(self):
tmp = tempfile.mkstemp()
if self.resource_type != 'collection':
if self.resource_export is not None:
extra_params = {'exportFormat': self.resource_export,
'format': self.resource_export}
self.client.DownloadResource(self.resource, tmp[1],
extra_params=extra_params)
else:
self.client.DownloadResource(self.resource, tmp[1])
else:
# Cannot download collections
self.assertRaises(ValueError, self.client.DownloadResource,
self.resource, tmp[1])
# Should get a 404
entry = gdata.docs.data.Resource(type=gdata.docs.data.DOCUMENT_LABEL,
title='Does Not Exist')
self.assertRaises(AttributeError, self.client.DownloadResource, entry,
tmp[1])
os.close(tmp[0])
os.remove(tmp[1])
def testDownloadResourceToMemory(self):
if self.resource_type != 'collection':
data = None
if self.resource_export is not None:
extra_params = {'exportFormat': self.resource_export,
'format': self.resource_export}
data = self.client.DownloadResourceToMemory(
self.resource, extra_params=extra_params)
else:
data = self.client.DownloadResourceToMemory(self.resource)
if self.resource_type == 'empty_document':
self.assertEqual(len(data), 3)
else:
self.assertNotEqual(len(data), 0)
else:
# Cannot download collections
self.assertRaises(ValueError, self.client.DownloadResourceToMemory,
self.resource)
def testDelete(self):
self.assertEqual(self.resource.deleted, None)
self.client.DeleteResource(self.resource, force=True)
self.resource = self.client.GetResource(self.resource)
self.assertNotEqual(self.resource.deleted, None)
self.client.DeleteResource(self.resource, permanent=True, force=True)
self.assertRaises(gdata.client.RequestError, self.client.GetResource,
self.resource)
class AclTest(DocsTestCase):
def testGetAcl(self):
acl_feed = self.client.GetResourceAcl(self.resource)
self.assert_(isinstance(acl_feed, gdata.docs.data.AclFeed))
self.assertEqual(len(acl_feed.entry), 1)
self.assert_(isinstance(acl_feed.entry[0], gdata.docs.data.AclEntry))
self.assert_(acl_feed.entry[0].scope is not None)
self.assert_(acl_feed.entry[0].role is not None)
def testGetAclEntry(self):
acl_feed = self.client.GetResourceAcl(self.resource)
acl_entry = acl_feed.entry[0]
same_acl_entry = self.client.GetAclEntry(acl_entry)
self.assert_(isinstance(same_acl_entry, gdata.docs.data.AclEntry))
self.assertEqual(acl_entry.GetSelfLink().href,
same_acl_entry.GetSelfLink().href)
self.assertEqual(acl_entry.title.text, same_acl_entry.title.text)
def testAddAclEntry(self):
acl_entry_to_add = gdata.docs.data.AclEntry.GetInstance(
role='writer', scope_type='default', key=True)
new_acl_entry = self.client.AddAclEntry(self.resource, acl_entry_to_add)
self.assertEqual(acl_entry_to_add.scope.type, new_acl_entry.scope.type)
self.assertEqual(new_acl_entry.scope.value, None)
# Key will always be overridden on add
self.assertEqual(acl_entry_to_add.with_key.role.value,
new_acl_entry.with_key.role.value)
acl_feed = self.client.GetResourceAcl(self.resource)
self.assert_(isinstance(acl_feed, gdata.docs.data.AclFeed))
self.assert_(isinstance(acl_feed.entry[0], gdata.docs.data.AclEntry))
self.assert_(isinstance(acl_feed.entry[1], gdata.docs.data.AclEntry))
def testUpdateAclEntry(self):
acl_entry_to_add = gdata.docs.data.AclEntry.GetInstance(
role='reader', scope_type='user', scope_value='jeff@example.com',
key=True)
other_acl_entry = gdata.docs.data.AclEntry.GetInstance(
role='writer', scope_type='user', scope_value='jeff@example.com')
new_acl_entry = self.client.AddAclEntry(self.resource, acl_entry_to_add)
new_acl_entry.with_key = None
new_acl_entry.scope = other_acl_entry.scope
new_acl_entry.role = other_acl_entry.role
updated_acl_entry = self.client.UpdateAclEntry(new_acl_entry)
self.assertEqual(updated_acl_entry.GetSelfLink().href,
new_acl_entry.GetSelfLink().href)
self.assertEqual(updated_acl_entry.title.text, new_acl_entry.title.text)
self.assertEqual(updated_acl_entry.scope.type, other_acl_entry.scope.type)
self.assertEqual(updated_acl_entry.scope.value, other_acl_entry.scope.value)
self.assertEqual(updated_acl_entry.role.value, other_acl_entry.role.value)
self.assertEqual(updated_acl_entry.with_key, None)
def testDeleteAclEntry(self):
acl_entry_to_add = gdata.docs.data.AclEntry.GetInstance(
role='writer', scope_type='user', scope_value='joe@example.com',
key=True)
acl_feed = self.client.GetResourceAcl(self.resource)
new_acl_entry = self.client.AddAclEntry(self.resource, acl_entry_to_add)
acl_feed = self.client.GetResourceAcl(self.resource)
self.assert_(isinstance(acl_feed, gdata.docs.data.AclFeed))
self.assertEqual(len(acl_feed.entry), 2)
self.assert_(isinstance(acl_feed.entry[0], gdata.docs.data.AclEntry))
self.assert_(isinstance(acl_feed.entry[1], gdata.docs.data.AclEntry))
self.client.DeleteAclEntry(new_acl_entry)
acl_feed = self.client.GetResourceAcl(self.resource)
self.assert_(isinstance(acl_feed, gdata.docs.data.AclFeed))
self.assertEqual(len(acl_feed.entry), 1)
self.assert_(isinstance(acl_feed.entry[0], gdata.docs.data.AclEntry))
class RevisionsTest(DocsTestCase):
def testGetRevisions(self):
# There are no revisions of collections
if self.resource_type != 'collection':
revisions = self.client.GetRevisions(self.resource)
self.assert_(isinstance(revisions, gdata.docs.data.RevisionFeed))
self.assert_(isinstance(revisions.entry[0], gdata.docs.data.Revision))
# Currently, there is a bug where new presentations have 2 revisions.
if self.resource_type != 'presentation':
self.assertEqual(len(revisions.entry), 1)
def testGetRevision(self):
# There are no revisions of collections
if self.resource_type != 'collection':
revisions = self.client.GetRevisions(self.resource)
entry = revisions.entry[0]
new_entry = self.client.GetRevision(entry)
self.assertEqual(entry.GetSelfLink().href, new_entry.GetSelfLink().href)
self.assertEqual(entry.title.text, new_entry.title.text)
def testGetRevisionBySelfLink(self):
# There are no revisions of collections
if self.resource_type != 'collection':
revisions = self.client.GetRevisions(self.resource)
entry = revisions.entry[0]
new_entry = self.client.GetRevisionBySelfLink(entry.GetSelfLink().href)
self.assertEqual(entry.GetSelfLink().href, new_entry.GetSelfLink().href)
self.assertEqual(entry.title.text, new_entry.title.text)
def testMultipleRevisionsAndUpdateResource(self):
if self.resource_type not in ['collection', 'presentation']:
revisions = self.client.GetRevisions(self.resource)
self.assertEqual(len(revisions.entry), 1)
# Currently, there is a bug where uploaded presentations have 2 revisions.
elif self.resource_type == 'presentation':
revisions = self.client.GetRevisions(self.resource)
self.assertEqual(len(revisions.entry), 2)
# Drawings do not currently support update, thus the rest of these
# tests do not yet work as expected.
if self.resource_type == 'drawing':
return
entry = self._update()
self.assertEqual(entry.title.text, '%s Updated' % self.resource_title)
if self.resource_type != 'collection':
revisions = self.client.GetRevisions(entry)
self.assert_(isinstance(revisions, gdata.docs.data.RevisionFeed))
if self.resource_type == 'presentation':
self.assertEqual(len(revisions.entry), 3)
self.assert_(isinstance(revisions.entry[2], gdata.docs.data.Revision))
else:
self.assertEqual(len(revisions.entry), 2)
self.assert_(isinstance(revisions.entry[0], gdata.docs.data.Revision))
self.assert_(isinstance(revisions.entry[1], gdata.docs.data.Revision))
def testPublishRevision(self):
if self.resource_type in ['file', 'pdf', 'collection']:
return
# Drawings do not currently support update, thus this test would fail.
if self.resource_type == 'drawing':
return
entry = self._update()
revisions = self.client.GetRevisions(entry)
revision = self.client.PublishRevision(revisions.entry[1])
# Currently, there is a bug where uploaded presentations have 2 revisions.
if self.resource_type == 'presentation':
revisions = self.client.GetRevisions(entry)
revision = revisions.entry[2]
self.assert_(isinstance(revision, gdata.docs.data.Revision))
self.assertEqual(revision.publish.value, 'true')
self.assertEqual(revision.publish_auto.value, 'false')
# The rest of the tests require an Apps domain
if 'gmail' in conf.options.get_value('username'):
return
self.assertEqual(revision.publish_outside_domain.value, 'false')
# Empty documents won't have further revisions b/c content didn't change
if self.resource_type == 'empty_document':
return
revisions = self.client.GetRevisions(entry)
if self.resource_type == 'presentation':
revision = self.client.PublishRevision(
revisions.entry[2], publish_auto=True, publish_outside_domain=True)
else:
revision = self.client.PublishRevision(
revisions.entry[1], publish_auto=True, publish_outside_domain=True)
if self.resource_type == 'spreadsheet':
revision = client.GetRevisions(entry).entry[1]
self.assert_(isinstance(revision, gdata.docs.data.Revision))
self.assertEqual(revision.publish.value, 'true')
self.assertEqual(revision.publish_auto.value, 'true')
self.assertEqual(revision.publish_outside_domain.value, 'true')
revision = self.client.GetRevision(revisions.entry[0])
self.assertEqual(revision.publish, None)
self.assertEqual(revision.publish_auto, None)
self.assertEqual(revision.publish_outside_domain, None)
def testDownloadRevision(self):
if self.resource_type == 'collection':
return
revisions = self.client.GetRevisions(self.resource)
tmp = tempfile.mkstemp()
self.client.DownloadRevision(revisions.entry[0], tmp[1])
os.close(tmp[0])
os.remove(tmp[1])
def testDeleteRevision(self):
# API can only delete file revisions
if self.resource_type != 'file':
return
entry = self._update()
revisions = self.client.GetRevisions(entry)
self.assertEqual(len(revisions.entry), 2)
self.client.DeleteRevision(revisions.entry[1])
self.assert_(isinstance(revisions, gdata.docs.data.RevisionFeed))
self.assert_(isinstance(revisions.entry[0], gdata.docs.data.Revision))
revisions = self.client.GetRevisions(entry)
self.assertEqual(len(revisions.entry), 1)
class ChangesTest(DocsTestCase):
def testGetChanges(self):
# This test assumes that by the time this test is run, the account
# being used already has a number of changes
changes = self.client.GetChanges(max_results=5)
self.assert_(isinstance(changes, gdata.docs.data.ChangeFeed))
self.assert_(len(changes.entry) <= 5)
self.assert_(isinstance(changes.entry[0], gdata.docs.data.Change))
self._update()
changes = self.client.GetChanges(changes.entry[0].changestamp.value, 5)
self.assert_(isinstance(changes, gdata.docs.data.ChangeFeed))
self.assert_(len(changes.entry) <= 5)
self.assert_(isinstance(changes.entry[0], gdata.docs.data.Change))
def testDeleteResourceCreatesNewChange(self):
"""Ensure that deleting a resource causes a new change entry."""
self._update()
changes = self.client.GetChanges(max_results=1)
latest = changes.entry[0].changestamp.value
self._delete(self.resource)
time.sleep(10)
changes = self.client.GetChanges(max_results=1)
self.assert_(latest < changes.entry[0].changestamp.value)
class MetadataTest(DocsTestCase):
def setUp(self):
if conf.options.get_value('runlive') != 'true':
raise RuntimeError('Live tests require --runlive true')
else:
self.client = gdata.docs.client.DocsClient()
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'DocsTest', self.client.auth_service)
conf.configure_cache(self.client, str(self.__class__))
if conf.options.get_value('clean') == 'true':
self._delete_all()
def tearDown(self):
conf.close_client(self.client)
def testMetadata(self):
metadata = self.client.GetMetadata()
self.assert_(isinstance(metadata, gdata.docs.data.Metadata))
self.assertNotEqual(int(metadata.quota_bytes_total.text), 0)
self.assertEqual(int(metadata.quota_bytes_used.text), 0)
self.assertEqual(int(metadata.quota_bytes_used_in_trash.text), 0)
self.assertNotEqual(len(metadata.import_formats), 0)
self.assertNotEqual(len(metadata.export_formats), 0)
self.assertNotEqual(len(metadata.features), 0)
self.assertNotEqual(len(metadata.max_upload_sizes), 0)
def suite():
suite = unittest.TestSuite()
for key, value in RESOURCES.iteritems():
for case in [ResourcesTest, AclTest, RevisionsTest, ChangesTest]:
tests = unittest.TestLoader().loadTestsFromTestCase(case)
for test in tests:
test.resource_type = key
test.resource_label = value[0]
test.resource_title = value[1]
test.resource_path = value[2]
test.resource_alt_path = value[3]
test.resource_mime = value[4]
test.resource_export = value[5]
suite.addTest(test)
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(MetadataTest))
return suite
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = ('api.jfisher (Jeff Fisher), '
'api.eric@google.com (Eric Bidelman)')
import getpass
import os
import re
import StringIO
import time
import unittest
import gdata.docs.service
import gdata.spreadsheet.service
username = ''
password = ''
client = gdata.docs.service.DocsService()
editClient = gdata.docs.service.DocsService()
spreadsheets = gdata.spreadsheet.service.SpreadsheetsService()
class DocumentsListServiceTest(unittest.TestCase):
def setUp(self):
self.client = client
self.editClient = editClient
self.editClient.SetClientLoginToken(client.GetClientLoginToken())
self.editClient.additional_headers = {'If-Match': '*'}
self.spreadsheets = spreadsheets
self.DOCUMENT_CATEGORY = client._MakeKindCategory(gdata.docs.service.DOCUMENT_LABEL)
self.SPREADSHEET_CATEGORY = client._MakeKindCategory(gdata.docs.service.SPREADSHEET_LABEL)
self.PRESENTATION_CATEGORY = client._MakeKindCategory(gdata.docs.service.PRESENTATION_LABEL)
class DocumentListQueryTest(DocumentsListServiceTest):
def setUp(self):
DocumentsListServiceTest.setUp(self)
self.feed = self.client.GetDocumentListFeed()
def testGetDocumentsListFeed(self):
self.assert_(isinstance(self.feed, gdata.docs.DocumentListFeed))
uri = 'http://docs.google.com/feeds/documents/private/full/?max-results=1'
# Query using GetDocumentListFeed()
feed = self.client.GetDocumentListFeed(uri)
self.assert_(isinstance(feed, gdata.docs.DocumentListFeed))
self.assertEqual(len(feed.entry), 1)
self.assertEqual(self.feed.entry[0].id.text, feed.entry[0].id.text)
self.assertEqual(self.feed.entry[0].title.text, feed.entry[0].title.text)
# Query using QueryDocumentListFeed()
feed2 = self.client.QueryDocumentListFeed(uri)
self.assertEqual(len(feed2.entry), 1)
self.assertEqual(self.feed.entry[0].id.text, feed2.entry[0].id.text)
self.assertEqual(self.feed.entry[0].title.text, feed2.entry[0].title.text)
def testGetDocumentsListEntry(self):
self_link = self.feed.entry[0].GetSelfLink().href
entry = self.client.GetDocumentListEntry(self_link)
self.assert_(isinstance(entry, gdata.docs.DocumentListEntry))
self.assertEqual(self.feed.entry[0].id.text, entry.id.text)
self.assertEqual(self.feed.entry[0].title.text, entry.title.text)
self.assert_(self.feed.entry[0].resourceId.text is not None)
self.assert_(self.feed.entry[0].lastModifiedBy is not None)
self.assert_(self.feed.entry[0].lastViewed is not None)
def testGetDocumentsListAclFeed(self):
uri = ('http://docs.google.com/feeds/documents/private/full/'
'-/mine?max-results=1')
feed = self.client.GetDocumentListFeed(uri)
feed_link = feed.entry[0].GetAclLink().href
acl_feed = self.client.GetDocumentListAclFeed(feed_link)
self.assert_(isinstance(acl_feed, gdata.docs.DocumentListAclFeed))
self.assert_(isinstance(acl_feed.entry[0], gdata.docs.DocumentListAclEntry))
self.assert_(acl_feed.entry[0].scope is not None)
self.assert_(acl_feed.entry[0].role is not None)
class DocumentListAclTest(DocumentsListServiceTest):
def setUp(self):
DocumentsListServiceTest.setUp(self)
uri = ('http://docs.google.com/feeds/documents/private/full'
'/-/mine?max-results=1')
self.feed = self.client.GetDocumentListFeed(uri)
self.EMAIL = 'x@example.com'
self.SCOPE_TYPE = 'user'
self.ROLE_VALUE = 'reader'
def testCreateAndUpdateAndDeleteAcl(self):
# Add new ACL
scope = gdata.docs.Scope(value=self.EMAIL, type=self.SCOPE_TYPE)
role = gdata.docs.Role(value=self.ROLE_VALUE)
acl_entry = self.client.Post(
gdata.docs.DocumentListAclEntry(scope=scope, role=role),
self.feed.entry[0].GetAclLink().href,
converter=gdata.docs.DocumentListAclEntryFromString)
self.assert_(isinstance(acl_entry, gdata.docs.DocumentListAclEntry))
self.assertEqual(acl_entry.scope.value, self.EMAIL)
self.assertEqual(acl_entry.scope.type, self.SCOPE_TYPE)
self.assertEqual(acl_entry.role.value, self.ROLE_VALUE)
# Update the user's role
ROLE_VALUE = 'writer'
acl_entry.role.value = ROLE_VALUE
updated_acl_entry = self.editClient.Put(
acl_entry, acl_entry.GetEditLink().href,
converter=gdata.docs.DocumentListAclEntryFromString)
self.assertEqual(updated_acl_entry.scope.value, self.EMAIL)
self.assertEqual(updated_acl_entry.scope.type, self.SCOPE_TYPE)
self.assertEqual(updated_acl_entry.role.value, ROLE_VALUE)
# Delete the ACL
self.editClient.Delete(updated_acl_entry.GetEditLink().href)
# Make sure entry was actually deleted
acl_feed = self.client.GetDocumentListAclFeed(
self.feed.entry[0].GetAclLink().href)
for acl_entry in acl_feed.entry:
self.assert_(acl_entry.scope.value != self.EMAIL)
class DocumentListCreateAndDeleteTest(DocumentsListServiceTest):
def setUp(self):
DocumentsListServiceTest.setUp(self)
self.BLANK_TITLE = "blank.txt"
self.TITLE = 'Test title'
self.new_entry = gdata.docs.DocumentListEntry()
self.new_entry.category.append(self.DOCUMENT_CATEGORY)
def testCreateAndDeleteEmptyDocumentSlugHeaderTitle(self):
created_entry = self.client.Post(self.new_entry,
'/feeds/documents/private/full',
extra_headers={'Slug': self.BLANK_TITLE})
self.editClient.Delete(created_entry.GetEditLink().href)
self.assertEqual(created_entry.title.text, self.BLANK_TITLE)
self.assertEqual(created_entry.category[0].label, 'document')
def testCreateAndDeleteEmptyDocumentAtomTitle(self):
self.new_entry.title = gdata.atom.Title(text=self.TITLE)
created_entry = self.client.Post(self.new_entry,
'/feeds/documents/private/full')
self.editClient.Delete(created_entry.GetEditLink().href)
self.assertEqual(created_entry.title.text, self.TITLE)
self.assertEqual(created_entry.category[0].label, 'document')
def testCreateAndDeleteEmptySpreadsheet(self):
self.new_entry.title = gdata.atom.Title(text=self.TITLE)
self.new_entry.category[0] = self.SPREADSHEET_CATEGORY
created_entry = self.client.Post(self.new_entry,
'/feeds/documents/private/full')
self.editClient.Delete(created_entry.GetEditLink().href)
self.assertEqual(created_entry.title.text, self.TITLE)
self.assertEqual(created_entry.category[0].label, 'viewed')
self.assertEqual(created_entry.category[1].label, 'spreadsheet')
def testCreateAndDeleteEmptyPresentation(self):
self.new_entry.title = gdata.atom.Title(text=self.TITLE)
self.new_entry.category[0] = self.PRESENTATION_CATEGORY
created_entry = self.client.Post(self.new_entry,
'/feeds/documents/private/full')
self.editClient.Delete(created_entry.GetEditLink().href)
self.assertEqual(created_entry.title.text, self.TITLE)
self.assertEqual(created_entry.category[0].label, 'viewed')
self.assertEqual(created_entry.category[1].label, 'presentation')
def testCreateAndDeleteFolder(self):
folder_name = 'TestFolder'
folder = self.client.CreateFolder(folder_name)
self.assertEqual(folder.title.text, folder_name)
self.editClient.Delete(folder.GetEditLink().href)
def testCreateAndDeleteFolderInFolder(self):
DEST_FOLDER_NAME = 'TestFolder'
dest_folder = self.client.CreateFolder(DEST_FOLDER_NAME)
CREATED_FOLDER_NAME = 'TestFolder2'
new_folder = self.client.CreateFolder(CREATED_FOLDER_NAME, dest_folder)
for category in new_folder.category:
if category.scheme.startswith(gdata.docs.service.FOLDERS_SCHEME_PREFIX):
self.assertEqual(new_folder.category[0].label, DEST_FOLDER_NAME)
break
# delete the folders we created, this will also delete the child folder
dest_folder = self.client.Get(dest_folder.GetSelfLink().href)
self.editClient.Delete(dest_folder.GetEditLink().href)
class DocumentListMoveInAndOutOfFolderTest(DocumentsListServiceTest):
def setUp(self):
DocumentsListServiceTest.setUp(self)
self.folder_name = 'TestFolder'
self.folder = self.client.CreateFolder(self.folder_name)
self.doc_title = 'TestDoc'
self.ms = gdata.MediaSource(file_path='test.doc',
content_type='application/msword')
def tearDown(self):
folder = self.client.Get(self.folder.GetSelfLink().href)
self.editClient.Delete(folder.GetEditLink().href)
def testUploadDocumentToFolder(self):
created_entry = self.client.Upload(self.ms, self.doc_title,
self.folder)
for category in created_entry.category:
if category.scheme.startswith(gdata.docs.service.FOLDERS_SCHEME_PREFIX):
self.assertEqual(category.label, self.folder_name)
break
# delete the doc we created
created_entry = self.client.Get(created_entry.GetSelfLink().href)
match = re.search('\/(document%3A[^\/]*)\/?.*?\/(.*)$',
created_entry.GetEditLink().href)
edit_uri = 'http://docs.google.com/feeds/documents/private/full/'
edit_uri += '%s/%s' % (match.group(1), match.group(2))
self.editClient.Delete(edit_uri)
def testMoveDocumentInAndOutOfFolder(self):
created_entry = self.client.Upload(self.ms, self.doc_title)
moved_entry = self.client.MoveIntoFolder(created_entry,
self.folder)
for category in moved_entry.category:
if category.scheme.startswith(gdata.docs.service.FOLDERS_SCHEME_PREFIX):
self.assertEqual(category.label, self.folder_name)
break
self.editClient.MoveOutOfFolder(moved_entry)
moved_entry = self.client.Get(moved_entry.GetSelfLink().href)
for category in moved_entry.category:
starts_with_folder__prefix = category.scheme.startswith(
gdata.docs.service.FOLDERS_SCHEME_PREFIX)
self.assert_(not starts_with_folder__prefix)
created_entry = self.client.Get(created_entry.GetSelfLink().href)
self.editClient.Delete(created_entry.GetEditLink().href)
def testMoveFolderIntoFolder(self):
dest_folder_name = 'DestFolderName'
dest_folder = self.client.CreateFolder(dest_folder_name)
self.client.MoveIntoFolder(self.folder, dest_folder)
self.folder = self.client.Get(self.folder.GetSelfLink().href)
folder_was_moved = False
for category in self.folder.category:
if category.term == dest_folder_name:
folder_was_moved = True
break
self.assert_(folder_was_moved)
#cleanup
dest_folder = self.client.Get(dest_folder.GetSelfLink().href)
self.editClient.Delete(dest_folder.GetEditLink().href)
class DocumentListUploadTest(DocumentsListServiceTest):
def testUploadAndDeleteDocument(self):
ms = gdata.MediaSource(file_path='test.doc',
content_type='application/msword')
entry = self.client.Upload(ms, 'test doc')
self.assertEqual(entry.title.text, 'test doc')
self.assertEqual(entry.category[0].label, 'document')
self.assert_(isinstance(entry, gdata.docs.DocumentListEntry))
self.editClient.Delete(entry.GetEditLink().href)
def testUploadAndDeletePresentation(self):
ms = gdata.MediaSource(file_path='test.ppt',
content_type='application/vnd.ms-powerpoint')
entry = self.client.Upload(ms, 'test preso')
self.assertEqual(entry.title.text, 'test preso')
self.assertEqual(entry.category[0].label, 'viewed')
self.assertEqual(entry.category[1].label, 'presentation')
self.assert_(isinstance(entry, gdata.docs.DocumentListEntry))
self.editClient.Delete(entry.GetEditLink().href)
def testUploadAndDeleteSpreadsheet(self):
ms = gdata.MediaSource(file_path='test.csv',
content_type='text/csv')
entry = self.client.Upload(ms, 'test spreadsheet')
self.assert_(entry.title.text == 'test spreadsheet')
self.assertEqual(entry.category[0].label, 'viewed')
self.assertEqual(entry.category[1].label, 'spreadsheet')
self.assert_(isinstance(entry, gdata.docs.DocumentListEntry))
self.editClient.Delete(entry.GetEditLink().href)
class DocumentListUpdateTest(DocumentsListServiceTest):
def setUp(self):
DocumentsListServiceTest.setUp(self)
self.TITLE = 'CreatedTestDoc'
new_entry = gdata.docs.DocumentListEntry()
new_entry.title = gdata.atom.Title(text=self.TITLE)
new_entry.category.append(self.DOCUMENT_CATEGORY)
self.created_entry = self.client.Post(new_entry,
'/feeds/documents/private/full')
def tearDown(self):
# Delete the test doc we created
self_link = self.created_entry.GetSelfLink().href
entry = self.client.GetDocumentListEntry(self_link)
self.editClient.Delete(entry.GetEditLink().href)
def testUpdateDocumentMetadataAndContent(self):
title = 'UpdatedTestDoc'
# Update metadata
self.created_entry.title.text = title
updated_entry = self.editClient.Put(self.created_entry,
self.created_entry.GetEditLink().href)
self.assertEqual(updated_entry.title.text, title)
# Update document's content
ms = gdata.MediaSource(file_path='test.doc',
content_type='application/msword')
uri = updated_entry.GetEditMediaLink().href
updated_entry = self.editClient.Put(ms, uri)
self.assertEqual(updated_entry.title.text, title)
# Append content to document
data = 'data to append'
ms = gdata.MediaSource(file_handle=StringIO.StringIO(data),
content_type='text/plain',
content_length=len(data))
uri = updated_entry.GetEditMediaLink().href + '?append=true'
updated_entry = self.editClient.Put(ms, uri)
class DocumentListExportTest(DocumentsListServiceTest):
def testExportDocument(self):
query = ('https://docs.google.com/feeds/documents/private/full'
'/-/document?max-results=1')
feed = self.client.QueryDocumentListFeed(query)
file_paths = ['./downloadedTest.doc', './downloadedTest.html',
'./downloadedTest.odt', './downloadedTest.pdf',
'./downloadedTest.png', './downloadedTest.rtf',
'./downloadedTest.txt', './downloadedTest.zip']
for path in file_paths:
self.client.Export(feed.entry[0], path)
self.assert_(os.path.exists(path))
self.assert_(os.path.getsize(path))
os.remove(path)
def testExportPresentation(self):
query = ('https://docs.google.com/feeds/documents/private/full'
'/-/presentation?max-results=1')
feed = self.client.QueryDocumentListFeed(query)
file_paths = ['./downloadedTest.pdf', './downloadedTest.ppt',
'./downloadedTest.swf', './downloadedTest.txt']
for path in file_paths:
self.client.Export(feed.entry[0].resourceId.text, path)
self.assert_(os.path.exists(path))
self.assert_(os.path.getsize(path))
os.remove(path)
def testExportSpreadsheet(self):
query = ('https://docs.google.com/feeds/documents/private/full'
'/-/spreadsheet?max-results=1')
feed = self.client.QueryDocumentListFeed(query)
file_paths = ['./downloadedTest.xls', './downloadedTest.csv',
'./downloadedTest.pdf', './downloadedTest.ods',
'./downloadedTest.tsv', './downloadedTest.html']
docs_token = self.client.GetClientLoginToken()
self.client.SetClientLoginToken(self.spreadsheets.GetClientLoginToken())
for path in file_paths:
self.client.Export(feed.entry[0], path)
self.assert_(os.path.exists(path))
self.assert_(os.path.getsize(path) > 0)
os.remove(path)
self.client.SetClientLoginToken(docs_token)
def testExportNonExistentDocument(self):
path = './ned.txt'
exception_raised = False
try:
self.client.Export('non_existent_doc', path)
except Exception, e: # expected
exception_raised = True
self.assert_(exception_raised)
self.assert_(not os.path.exists(path))
if __name__ == '__main__':
print ('DocList API Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
if client.GetClientLoginToken() is None:
client.ClientLogin(username, password,
source='Document List Client Unit Tests')
if spreadsheets.GetClientLoginToken() is None:
spreadsheets.ClientLogin(username, password,
source='Document List Client Unit Tests')
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'e.bidelman (Eric Bidelman)'
import unittest
import atom
from gdata import test_data
import gdata.acl.data
import gdata.data
import gdata.docs.data
import gdata.test_config as conf
class DocsEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.DOCUMENT_LIST_ENTRY_V3,
gdata.docs.data.Resource)
def testToAndFromStringDocsEntry(self):
self.assert_(isinstance(self.entry, gdata.docs.data.Resource))
self.assertEqual(self.entry.GetResourceType(), 'spreadsheet')
self.assert_(isinstance(self.entry.last_viewed, gdata.docs.data.LastViewed))
self.assertEqual(self.entry.last_viewed.text, '2009-03-05T07:48:21.493Z')
self.assert_(
isinstance(self.entry.last_modified_by, gdata.docs.data.LastModifiedBy))
self.assertEqual(
self.entry.last_modified_by.email.text, 'test.user@gmail.com')
self.assertEqual(self.entry.last_modified_by.name.text, 'test.user')
self.assert_(isinstance(self.entry.resource_id, gdata.docs.data.ResourceId))
self.assertEqual(self.entry.resource_id.text,
'spreadsheet:supercalifragilisticexpealidocious')
self.assert_(isinstance(self.entry.writers_can_invite,
gdata.docs.data.WritersCanInvite))
self.assertEqual(self.entry.writers_can_invite.value, 'true')
self.assert_(isinstance(self.entry.quota_bytes_used,
gdata.docs.data.QuotaBytesUsed))
self.assertEqual(self.entry.quota_bytes_used.text, '1000')
self.assertEqual(len(self.entry.feed_link), 2)
self.assert_(isinstance(self.entry.feed_link[0], gdata.data.FeedLink))
self.assertEqual(
self.entry.GetAclFeedLink().href,
('https://docs.google.com/feeds/default/private/full/'
'spreadsheet%3Asupercalifragilisticexpealidocious/acl'))
self.assertEqual(
self.entry.GetRevisionsFeedLink().href,
('https://docs.google.com/feeds/default/private/full/'
'spreadsheet%3Asupercalifragilisticexpealidocious/revisions'))
self.assertEqual(len(self.entry.InCollections()), 1)
self.assertEqual(self.entry.InCollections()[0].title, 'AFolderName')
class AclTest(unittest.TestCase):
def setUp(self):
self.acl_entry = atom.core.parse(test_data.DOCUMENT_LIST_ACL_ENTRY,
gdata.docs.data.AclEntry)
self.acl_entry_withkey = atom.core.parse(
test_data.DOCUMENT_LIST_ACL_WITHKEY_ENTRY, gdata.docs.data.AclEntry)
self.acl_entry_additional_role = atom.core.parse(
test_data.DOCUMENT_LIST_ACL_ADDITIONAL_ROLE_ENTRY,
gdata.docs.data.AclEntry)
def testToAndFromString(self):
self.assert_(isinstance(self.acl_entry, gdata.docs.data.AclEntry))
self.assert_(isinstance(self.acl_entry.role, gdata.acl.data.AclRole))
self.assert_(isinstance(self.acl_entry.scope, gdata.acl.data.AclScope))
self.assertEqual(self.acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(self.acl_entry.scope.type, 'user')
self.assertEqual(self.acl_entry.role.value, 'writer')
acl_entry_str = str(self.acl_entry)
new_acl_entry = atom.core.parse(acl_entry_str, gdata.docs.data.AclEntry)
self.assert_(isinstance(new_acl_entry, gdata.docs.data.AclEntry))
self.assert_(isinstance(new_acl_entry.role, gdata.acl.data.AclRole))
self.assert_(isinstance(new_acl_entry.scope, gdata.acl.data.AclScope))
self.assertEqual(new_acl_entry.scope.value, self.acl_entry.scope.value)
self.assertEqual(new_acl_entry.scope.type, self.acl_entry.scope.type)
self.assertEqual(new_acl_entry.role.value, self.acl_entry.role.value)
def testToAndFromStringWithKey(self):
self.assert_(isinstance(self.acl_entry_withkey, gdata.docs.data.AclEntry))
self.assert_(self.acl_entry_withkey.role is None)
self.assert_(isinstance(self.acl_entry_withkey.with_key,
gdata.acl.data.AclWithKey))
self.assert_(isinstance(self.acl_entry_withkey.with_key.role,
gdata.acl.data.AclRole))
self.assert_(isinstance(self.acl_entry_withkey.scope,
gdata.acl.data.AclScope))
self.assertEqual(self.acl_entry_withkey.with_key.key, 'somekey')
self.assertEqual(self.acl_entry_withkey.with_key.role.value, 'writer')
self.assertEqual(self.acl_entry_withkey.scope.value, 'example.com')
self.assertEqual(self.acl_entry_withkey.scope.type, 'domain')
acl_entry_withkey_str = str(self.acl_entry_withkey)
new_acl_entry_withkey = atom.core.parse(acl_entry_withkey_str,
gdata.docs.data.AclEntry)
self.assert_(isinstance(new_acl_entry_withkey, gdata.docs.data.AclEntry))
self.assert_(new_acl_entry_withkey.role is None)
self.assert_(isinstance(new_acl_entry_withkey.with_key,
gdata.acl.data.AclWithKey))
self.assert_(isinstance(new_acl_entry_withkey.with_key.role,
gdata.acl.data.AclRole))
self.assert_(isinstance(new_acl_entry_withkey.scope,
gdata.acl.data.AclScope))
self.assertEqual(new_acl_entry_withkey.with_key.key,
self.acl_entry_withkey.with_key.key)
self.assertEqual(new_acl_entry_withkey.with_key.role.value,
self.acl_entry_withkey.with_key.role.value)
self.assertEqual(new_acl_entry_withkey.scope.value,
self.acl_entry_withkey.scope.value)
self.assertEqual(new_acl_entry_withkey.scope.type,
self.acl_entry_withkey.scope.type)
def testCreateNewAclEntry(self):
cat = gdata.atom.Category(
term='http://schemas.google.com/acl/2007#accessRule',
scheme='http://schemas.google.com/g/2005#kind')
acl_entry = gdata.docs.DocumentListAclEntry(category=[cat])
acl_entry.scope = gdata.docs.Scope(value='user@gmail.com', type='user')
acl_entry.role = gdata.docs.Role(value='writer')
self.assert_(isinstance(acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(acl_entry.scope, gdata.docs.Scope))
self.assertEqual(acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(acl_entry.scope.type, 'user')
self.assertEqual(acl_entry.role.value, 'writer')
def testAdditionalRole(self):
self.assertEqual(
self.acl_entry_additional_role.additional_role.value,
'commenter')
self.assertEqual(
self.acl_entry_additional_role.with_key.additional_role.value,
'commenter')
class AclFeedTest(unittest.TestCase):
def setUp(self):
self.feed = atom.core.parse(test_data.DOCUMENT_LIST_ACL_FEED,
gdata.docs.data.AclFeed)
def testToAndFromString(self):
for entry in self.feed.entry:
self.assert_(isinstance(entry, gdata.docs.data.AclEntry))
feed = atom.core.parse(str(self.feed), gdata.docs.data.AclFeed)
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.docs.data.AclEntry))
def testConvertActualData(self):
entries = self.feed.entry
self.assert_(len(entries) == 2)
self.assertEqual(entries[0].title.text,
'Document Permission - user@gmail.com')
self.assertEqual(entries[0].role.value, 'owner')
self.assertEqual(entries[0].scope.type, 'user')
self.assertEqual(entries[0].scope.value, 'user@gmail.com')
self.assert_(entries[0].GetSelfLink() is not None)
self.assert_(entries[0].GetEditLink() is not None)
self.assertEqual(entries[1].title.text,
'Document Permission - user2@google.com')
self.assertEqual(entries[1].role.value, 'writer')
self.assertEqual(entries[1].scope.type, 'domain')
self.assertEqual(entries[1].scope.value, 'google.com')
self.assert_(entries[1].GetSelfLink() is not None)
self.assert_(entries[1].GetEditLink() is not None)
class RevisionFeedTest(unittest.TestCase):
def setUp(self):
self.feed = atom.core.parse(test_data.DOCUMENT_LIST_REVISION_FEED,
gdata.docs.data.RevisionFeed)
def testToAndFromString(self):
for entry in self.feed.entry:
self.assert_(isinstance(entry, gdata.docs.data.Revision))
feed = atom.core.parse(str(self.feed), gdata.docs.data.RevisionFeed)
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.docs.data.Revision))
def testConvertActualData(self):
entries = self.feed.entry
self.assert_(len(entries) == 1)
self.assertEqual(entries[0].title.text, 'Revision 2')
self.assertEqual(entries[0].publish.value, 'true')
self.assertEqual(entries[0].publish_auto.value, 'true')
self.assertEqual(entries[0].publish_outside_domain.value, 'false')
self.assertEqual(
entries[0].GetPublishLink().href,
'https://docs.google.com/View?docid=dfr4&pageview=1&hgd=1')
self.assertEqual(
entries[0].FindPublishLink(),
'https://docs.google.com/View?docid=dfr4&pageview=1&hgd=1')
class DataClassSanityTest(unittest.TestCase):
def test_basic_element_structure(self):
conf.check_data_classes(self, [
gdata.docs.data.ResourceId, gdata.docs.data.LastModifiedBy,
gdata.docs.data.LastViewed, gdata.docs.data.WritersCanInvite,
gdata.docs.data.QuotaBytesUsed, gdata.docs.data.Publish,
gdata.docs.data.PublishAuto, gdata.docs.data.PublishOutsideDomain,
gdata.docs.data.Resource, gdata.docs.data.AclEntry, gdata.docs.data.AclFeed,
gdata.docs.data.ResourceFeed, gdata.docs.data.Revision,
gdata.docs.data.RevisionFeed])
class CategoryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.DOCUMENT_LIST_ENTRY_V3,
gdata.docs.data.Resource)
def testAddCategory(self):
entry = gdata.docs.data.Resource()
entry.AddCategory('test_scheme', 'test_term', 'test_label')
self.assertEqual(entry.GetFirstCategory('test_scheme').scheme,
'test_scheme')
self.assertEqual(entry.GetFirstCategory('test_scheme').term, 'test_term')
self.assertEqual(entry.GetFirstCategory('test_scheme').label, 'test_label')
def testGetFirstCategory(self):
entry = gdata.docs.data.Resource()
cat1 = entry.AddCategory('test_scheme', 'test_term1', 'test_label1')
cat2 = entry.AddCategory('test_scheme', 'test_term2', 'test_label2')
self.assertEqual(entry.GetFirstCategory('test_scheme'), cat1)
def testGetCategories(self):
cat1 = self.entry.AddCategory('test_scheme', 'test_term1', 'test_label1')
cat2 = self.entry.AddCategory('test_scheme', 'test_term2', 'test_label2')
cats = list(self.entry.GetCategories('test_scheme'))
self.assertTrue(cat1 in cats)
self.assertTrue(cat2 in cats)
def testRemoveCategories(self):
self.entry.RemoveCategories(gdata.docs.data.LABELS_SCHEME)
self.assertEqual(self.entry.GetLabels(), set())
def testResourceType(self):
entry = gdata.docs.data.Resource('spreadsheet')
self.assertEqual(self.entry.GetResourceType(), 'spreadsheet')
def testGetResourceType(self):
self.assertEqual(self.entry.GetResourceType(), 'spreadsheet')
def testSetResourceType(self):
self.assertEqual(self.entry.GetResourceType(), 'spreadsheet')
self.entry.SetResourceType('drawing')
self.assertEqual(self.entry.GetResourceType(), 'drawing')
def testGetLabels(self):
self.assertEqual(self.entry.GetLabels(),
set(['mine', 'private', 'restricted-download',
'shared-with-domain', 'viewed', 'starred', 'hidden',
'trashed']))
def testAddLabel(self):
entry = gdata.docs.data.Resource()
entry.AddLabel('banana')
self.assertTrue('banana' in entry.GetLabels())
def testRemoveLabel(self):
entry = gdata.docs.data.Resource()
entry.AddLabel('banana')
entry.AddLabel('orange')
self.assertTrue('banana' in entry.GetLabels())
self.assertTrue('orange' in entry.GetLabels())
entry.RemoveLabel('orange')
self.assertFalse('orange' in entry.GetLabels())
def testIsHidden(self):
self.assertTrue(self.entry.IsHidden())
def testIsNotHidden(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsHidden())
def testIsViewed(self):
self.assertTrue(self.entry.IsViewed())
def testIsNotViewed(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsViewed())
def testIsStarred(self):
self.assertTrue(self.entry.IsStarred())
def testIsNotStarred(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsStarred())
def testIsTrashed(self):
self.assertTrue(self.entry.IsTrashed())
def testIsNotTrashed(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsTrashed())
def testIsPrivate(self):
self.assertTrue(self.entry.IsPrivate())
def testIsNotPrivate(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsPrivate())
def testIsMine(self):
self.assertTrue(self.entry.IsMine())
def testIsNotMine(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsMine())
def testIsSharedWithDomain(self):
self.assertTrue(self.entry.IsSharedWithDomain())
def testIsNotSharedWithDomain(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsSharedWithDomain())
def testIsRestrictedDownload(self):
self.assertTrue(self.entry.IsRestrictedDownload())
def testIsNotRestrictedDownload(self):
self.entry.remove_categories(gdata.docs.data.LABELS_SCHEME)
self.assertFalse(self.entry.IsRestrictedDownload())
class MetadataTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.DOCUMENT_LIST_METADATA,
gdata.docs.data.Metadata)
def testAdditionalRoleInfo(self):
self.assertEqual(self.entry.additional_role_info[0].kind, 'document')
def testAdditionalRoleSet(self):
self.assertEqual(
self.entry.additional_role_info[0].additional_role_set[0].primaryRole,
'reader')
def testAdditionalRole(self):
self.assertEqual(
self.entry.additional_role_info[0].additional_role_set[0].\
additional_role[0].value, 'commenter')
def suite():
return conf.build_suite(
[DataClassSanityTest, CategoryTest, DocsHelperTest, DocsEntryTest,
AclTest, AclFeed, MetadataTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.laurabeth@gmail.com (Laura Beth Lincoln)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import gdata
import gdata.spreadsheet
SPREADSHEETS_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>http://spreadsheets.google.com/feeds/spreadsheets/private/full</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Available Spreadsheets</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>1</openSearch:itemsPerPage>
<entry>
<id>http://spreadsheets.google.com/feeds/spreadsheets/private/full/key</id>
<updated>2006-11-17T18:24:18.231Z</updated>
<title type="text">Groceries R Us</title>
<content type="text">Groceries R Us</content>
<link rel="http://schemas.google.com/spreadsheets/2006#worksheetsfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/spreadsheets/private/full/key"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
</entry>
</feed>
"""
WORKSHEETS_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>http://spreadsheets.google.com/feeds/worksheets/key/private/full</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Groceries R Us</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>1</openSearch:itemsPerPage>
<entry>
<id>http://spreadsheets.google.com/feeds/worksheets/key/private/full/od6</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Sheet1</title>
<content type="text">Sheet1</content>
<link rel="http://schemas.google.com/spreadsheets/2006#listfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full"/>
<link rel="http://schemas.google.com/spreadsheets/2006#cellsfeed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/worksheets/key/private/full/od6"/>
<gs:rowCount>100</gs:rowCount>
<gs:colCount>20</gs:colCount>
</entry>
</feed>
"""
CELLS_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>http://spreadsheets.google.com/feeds/cells/key/od6/private/full</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<title type="text">Sheet1</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full"/>
<link rel="http://schemas.google.com/g/2005#post" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>1</openSearch:itemsPerPage>
<gs:rowCount>100</gs:rowCount>
<gs:colCount>20</gs:colCount>
<entry>
<id>http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C1</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#cell"/>
<title type="text">A1</title>
<content type="text">Name</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C1"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C1/bgvjf"/>
<gs:cell row="1" col="1" inputValue="Name">Name</gs:cell>
</entry>
<entry>
<id>http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C2</id>
<updated>2006-11-17T18:27:32.543Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#cell"/>
<title type="text">B1</title>
<content type="text">Hours</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C2"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/cells/key/od6/private/full/R1C2/1pn567"/>
<gs:cell row="1" col="2" inputValue="Hours">Hours</gs:cell>
</entry>
</feed>
"""
LIST_FEED = """<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gsx="http://schemas.google.com/spreadsheets/2006/extended">
<id>http://spreadsheets.google.com/feeds/list/key/od6/private/full</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<title type="text">Sheet1</title>
<link rel="alternate" type="text/html"
href="http://spreadsheets.google.com/ccc?key=key"/>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full"/>
<link rel="http://schemas.google.com/g/2005#post"
type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full"/>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full"/>
<author>
<name>Fitzwilliam Darcy</name>
<email>fitz@gmail.com</email>
</author>
<openSearch:totalResults>2</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>2</openSearch:itemsPerPage>
<entry>
<id>http://spreadsheets.google.com/feeds/list/key/od6/private/full/cokwr</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#list"/>
<title type="text">Bingley</title>
<content type="text">Hours: 10, Items: 2, IPM: 0.0033</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full/cokwr"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full/cokwr/2ehkc2oh7d"/>
<gsx:name>Bingley</gsx:name>
<gsx:hours>10</gsx:hours>
<gsx:items>2</gsx:items>
<gsx:ipm>0.0033</gsx:ipm>
</entry>
<entry>
<id>http://spreadsheets.google.com/feeds/list/key/od6/private/full/cyevm</id>
<updated>2006-11-17T18:23:45.173Z</updated>
<category scheme="http://schemas.google.com/spreadsheets/2006"
term="http://schemas.google.com/spreadsheets/2006#list"/>
<title type="text">Charlotte</title>
<content type="text">Hours: 60, Items: 18000, IPM: 5</content>
<link rel="self" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full/cyevm"/>
<link rel="edit" type="application/atom+xml"
href="http://spreadsheets.google.com/feeds/list/key/od6/private/full/cyevm/64rl27px3zyn"/>
<gsx:name>Charlotte</gsx:name>
<gsx:hours>60</gsx:hours>
<gsx:items>18000</gsx:items>
<gsx:ipm>5</gsx:ipm>
</entry>
</feed>
"""
class ColCountTest(unittest.TestCase):
def setUp(self):
self.col_count = gdata.spreadsheet.ColCount()
def testToAndFromString(self):
self.col_count.text = '20'
self.assert_(self.col_count.text == '20')
new_col_count = gdata.spreadsheet.ColCountFromString(self.col_count.ToString())
self.assert_(self.col_count.text == new_col_count.text)
class RowCountTest(unittest.TestCase):
def setUp(self):
self.row_count = gdata.spreadsheet.RowCount()
def testToAndFromString(self):
self.row_count.text = '100'
self.assert_(self.row_count.text == '100')
new_row_count = gdata.spreadsheet.RowCountFromString(self.row_count.ToString())
self.assert_(self.row_count.text == new_row_count.text)
class CellTest(unittest.TestCase):
def setUp(self):
self.cell = gdata.spreadsheet.Cell()
def testToAndFromString(self):
self.cell.text = 'test cell'
self.assert_(self.cell.text == 'test cell')
self.cell.row = '1'
self.assert_(self.cell.row == '1')
self.cell.col = '2'
self.assert_(self.cell.col == '2')
self.cell.inputValue = 'test input value'
self.assert_(self.cell.inputValue == 'test input value')
self.cell.numericValue = 'test numeric value'
self.assert_(self.cell.numericValue == 'test numeric value')
new_cell = gdata.spreadsheet.CellFromString(self.cell.ToString())
self.assert_(self.cell.text == new_cell.text)
self.assert_(self.cell.row == new_cell.row)
self.assert_(self.cell.col == new_cell.col)
self.assert_(self.cell.inputValue == new_cell.inputValue)
self.assert_(self.cell.numericValue == new_cell.numericValue)
class CustomTest(unittest.TestCase):
def setUp(self):
self.custom = gdata.spreadsheet.Custom()
def testToAndFromString(self):
self.custom.text = 'value'
self.custom.column = 'column_name'
self.assert_(self.custom.text == 'value')
self.assert_(self.custom.column == 'column_name')
new_custom = gdata.spreadsheet.CustomFromString(self.custom.ToString())
self.assert_(self.custom.text == new_custom.text)
self.assert_(self.custom.column == new_custom.column)
class SpreadsheetsWorksheetTest(unittest.TestCase):
def setUp(self):
self.worksheet = gdata.spreadsheet.SpreadsheetsWorksheet()
def testToAndFromString(self):
self.worksheet.row_count = gdata.spreadsheet.RowCount(text='100')
self.assert_(self.worksheet.row_count.text == '100')
self.worksheet.col_count = gdata.spreadsheet.ColCount(text='20')
self.assert_(self.worksheet.col_count.text == '20')
new_worksheet = gdata.spreadsheet.SpreadsheetsWorksheetFromString(
self.worksheet.ToString())
self.assert_(self.worksheet.row_count.text == new_worksheet.row_count.text)
self.assert_(self.worksheet.col_count.text == new_worksheet.col_count.text)
class SpreadsheetsCellTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.spreadsheet.SpreadsheetsCell()
def testToAndFromString(self):
self.entry.cell = gdata.spreadsheet.Cell(text='my cell', row='1', col='2',
inputValue='my input value', numericValue='my numeric value')
self.assert_(self.entry.cell.text == 'my cell')
self.assert_(self.entry.cell.row == '1')
self.assert_(self.entry.cell.col == '2')
self.assert_(self.entry.cell.inputValue == 'my input value')
self.assert_(self.entry.cell.numericValue == 'my numeric value')
new_cell = gdata.spreadsheet.SpreadsheetsCellFromString(self.entry.ToString())
self.assert_(self.entry.cell.text == new_cell.cell.text)
self.assert_(self.entry.cell.row == new_cell.cell.row)
self.assert_(self.entry.cell.col == new_cell.cell.col)
self.assert_(self.entry.cell.inputValue == new_cell.cell.inputValue)
self.assert_(self.entry.cell.numericValue == new_cell.cell.numericValue)
class SpreadsheetsListTest(unittest.TestCase):
def setUp(self):
self.row = gdata.spreadsheet.SpreadsheetsList()
def testToAndFromString(self):
self.row.custom['column_1'] = gdata.spreadsheet.Custom(column='column_1',
text='my first column')
self.row.custom['column_2'] = gdata.spreadsheet.Custom(column='column_2',
text='my second column')
self.assert_(self.row.custom['column_1'].column == 'column_1')
self.assert_(self.row.custom['column_1'].text == 'my first column')
self.assert_(self.row.custom['column_2'].column == 'column_2')
self.assert_(self.row.custom['column_2'].text == 'my second column')
new_row = gdata.spreadsheet.SpreadsheetsListFromString(self.row.ToString())
self.assert_(self.row.custom['column_1'].column == new_row.custom['column_1'].column)
self.assert_(self.row.custom['column_1'].text == new_row.custom['column_1'].text)
self.assert_(self.row.custom['column_2'].column == new_row.custom['column_2'].column)
self.assert_(self.row.custom['column_2'].text == new_row.custom['column_2'].text)
class SpreadsheetsSpreadsheetsFeedTest(unittest.TestCase):
def setUp(self):
#self.item_feed = gdata.spreadsheet.SpreadsheetSpreadsheetsFeed()
self.feed = gdata.spreadsheet.SpreadsheetsSpreadsheetsFeedFromString(
SPREADSHEETS_FEED)
def testToAndFromString(self):
self.assert_(len(self.feed.entry) == 1)
for an_entry in self.feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsSpreadsheet))
new_feed = gdata.spreadsheet.SpreadsheetsSpreadsheetsFeedFromString(
str(self.feed))
for an_entry in new_feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsSpreadsheet))
class SpreadsheetsWorksheetsFeedTest(unittest.TestCase):
def setUp(self):
#self.item_feed = gdata.spreadsheet.SpreadsheetWorksheetsFeed()
self.feed = gdata.spreadsheet.SpreadsheetsWorksheetsFeedFromString(
WORKSHEETS_FEED)
def testToAndFromString(self):
self.assert_(len(self.feed.entry) == 1)
for an_entry in self.feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsWorksheet))
new_feed = gdata.spreadsheet.SpreadsheetsWorksheetsFeedFromString(
str(self.feed))
for an_entry in new_feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsWorksheet))
class SpreadsheetsCellsFeedTest(unittest.TestCase):
def setUp(self):
#self.item_feed = gdata.spreadsheet.SpreadsheetCellsFeed()
self.feed = gdata.spreadsheet.SpreadsheetsCellsFeedFromString(
CELLS_FEED)
def testToAndFromString(self):
self.assert_(len(self.feed.entry) == 2)
for an_entry in self.feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsCell))
new_feed = gdata.spreadsheet.SpreadsheetsCellsFeedFromString(str(self.feed))
self.assert_(isinstance(new_feed.row_count,
gdata.spreadsheet.RowCount))
self.assert_(new_feed.row_count.text == '100')
self.assert_(isinstance(new_feed.col_count,
gdata.spreadsheet.ColCount))
self.assert_(new_feed.col_count.text == '20')
for an_entry in new_feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsCell))
class SpreadsheetsListFeedTest(unittest.TestCase):
def setUp(self):
#self.item_feed = gdata.spreadsheet.SpreadsheetListFeed()
self.feed = gdata.spreadsheet.SpreadsheetsListFeedFromString(
LIST_FEED)
def testToAndFromString(self):
self.assert_(len(self.feed.entry) == 2)
for an_entry in self.feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsList))
new_feed = gdata.spreadsheet.SpreadsheetsListFeedFromString(str(self.feed))
for an_entry in new_feed.entry:
self.assert_(isinstance(an_entry, gdata.spreadsheet.SpreadsheetsList))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 's@google.com (John Skidgel)'
# Python imports.
import unittest
import urllib
import urllib2
# Google Data APIs imports.
import gdata.youtube.client
import gdata.youtube.data
import gdata.gauth
import gdata.client
import atom.http_core
import atom.mock_http_core
import atom.core
import gdata.data
import gdata.test_config as conf
# Constants
#DEVELOPER_KEY = 'AI39si4DTx4tY1ZCnIiZJrxtaxzfYuomY20SKDSfIAYrehKForeoHVgAgJZdNcYhmugD103wciae6TRI6M96nSymS8TV1kNP7g'
#CLIENT_ID = 'ytapi-Google-CaptionTube-2rj5q0oh-0'
conf.options.register_option(conf.YT_DEVELOPER_KEY_OPTION)
conf.options.register_option(conf.YT_CLIENT_ID_OPTION)
conf.options.register_option(conf.YT_VIDEO_ID_OPTION)
TRACK_BODY_SRT = """1
00:00:04,0 --> 00:00:05,75
My other computer is a data center
"""
class YouTubeClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.youtube.client.YouTubeClient()
conf.configure_client(self.client,
'YouTubeTest',
'youtube')
def tearDown(self):
conf.close_client(self.client)
def test_retrieve_video_entry(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_retrieve_video_entry')
entry = self.client.get_video_entry(video_id=conf.options.get_value('videoid'))
self.assertTrue(entry.etag)
def test_retrieve_video_feed(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_retrieve_video_has_entries')
entries = self.client.get_videos()
self.assertTrue(len(entries.entry) > 0)
def test_retrieve_user_feed(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_retrieve_video_has_entries')
entries = self.client.get_user_feed(username='joegregoriotest')
self.assertTrue(len(entries.entry) > 0)
def test_create_update_delete_captions(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete_captions')
# Add a track.
created = self.client.create_track(conf.options.get_value('videoid'), 'Test', 'en',
TRACK_BODY_SRT, conf.options.get_value('clientid'),
conf.options.get_value('developerkey'))
self.assertEqual(created.__class__, gdata.youtube.data.TrackEntry)
# Update the contents of a track. Language and title cannot be
# updated due to limitations. A workaround is to delete the original
# track and replace it with captions that have the desired contents,
# title, and name.
# @see 'Updating a caption track' in the protocol guide for captions:
# http://code.google.com/intl/en/apis/youtube/2.0/
# developers_guide_protocol_captions.html
updated = self.client.update_track(conf.options.get_value('videoid'), created,
TRACK_BODY_SRT, conf.options.get_value('clientid'),
conf.options.get_value('developerkey'))
self.assertEqual(updated.__class__, gdata.youtube.data.TrackEntry)
# Retrieve the captions for the track for comparision testing.
track_url = updated.content.src
track = self.client.get_caption_track(
track_url, conf.options.get_value('clientid'),
conf.options.get_value('developerkey'))
track_contents = track.read()
self.assertEqual(track_contents, TRACK_BODY_SRT)
# Delete a track.
resp = self.client.delete_track(conf.options.get_value('videoid'),
created,
conf.options.get_value('clientid'),
conf.options.get_value('developerkey'))
self.assertEqual(200, resp.status)
def suite():
return conf.build_suite([YouTubeClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jhartmann@gmail.com (Jochen Hartmann)'
import getpass
import time
import StringIO
import random
import unittest
import atom
import gdata.youtube
import gdata.youtube.service
YOUTUBE_TEST_CLIENT_ID = 'ytapi-pythonclientlibrary_servicetest'
class YouTubeServiceTest(unittest.TestCase):
def setUp(self):
self.client = gdata.youtube.service.YouTubeService()
self.client.email = username
self.client.password = password
self.client.source = YOUTUBE_TEST_CLIENT_ID
self.client.developer_key = developer_key
self.client.client_id = YOUTUBE_TEST_CLIENT_ID
self.client.ProgrammaticLogin()
def testRetrieveVideoFeed(self):
feed = self.client.GetYouTubeVideoFeed(
'https://gdata.youtube.com/feeds/api/standardfeeds/recently_featured');
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
for entry in feed.entry:
self.assert_(entry.title.text != '')
def testRetrieveTopRatedVideoFeed(self):
feed = self.client.GetTopRatedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostViewedVideoFeed(self):
feed = self.client.GetMostViewedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveRecentlyFeaturedVideoFeed(self):
feed = self.client.GetRecentlyFeaturedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveWatchOnMobileVideoFeed(self):
feed = self.client.GetWatchOnMobileVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveTopFavoritesVideoFeed(self):
feed = self.client.GetTopFavoritesVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRecentVideoFeed(self):
feed = self.client.GetMostRecentVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostDiscussedVideoFeed(self):
feed = self.client.GetMostDiscussedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostLinkedVideoFeed(self):
feed = self.client.GetMostLinkedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRespondedVideoFeed(self):
feed = self.client.GetMostRespondedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveVideoEntryByUri(self):
entry = self.client.GetYouTubeVideoEntry(
'https://gdata.youtube.com/feeds/videos/Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveVideoEntryByVideoId(self):
entry = self.client.GetYouTubeVideoEntry(video_id='Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveUserVideosbyUri(self):
feed = self.client.GetYouTubeUserFeed(
'https://gdata.youtube.com/feeds/users/gdpython/uploads')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveUserVideosbyUsername(self):
feed = self.client.GetYouTubeUserFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testSearchWithVideoQuery(self):
query = gdata.youtube.service.YouTubeVideoQuery()
query.vq = 'google'
query.max_results = 8
feed = self.client.YouTubeQuery(query)
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assertEquals(len(feed.entry), 8)
def testDirectVideoUploadStatusUpdateAndDeletion(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
# Set Geo location to 37,-122 lat, long
where = gdata.geo.Where()
where.set_location((37.0,-122.0))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group,
geo=where)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
# check upload status also
upload_status = self.client.CheckUploadStatus(new_entry)
self.assert_(upload_status[0] != '')
# test updating entry meta-data
new_video_description = 'description ' + str(random.randint(1000,5000))
new_entry.media.description.text = new_video_description
updated_entry = self.client.UpdateVideoEntry(new_entry)
self.assert_(isinstance(updated_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(updated_entry.media.description.text,
new_video_description)
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
self.assert_(value == True)
def testDirectVideoUploadWithDeveloperTags(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
test_developer_tag_01 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_02 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_03 = 'tag' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = [gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos')],
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
original_developer_tags = [test_developer_tag_01, test_developer_tag_02,
test_developer_tag_03]
dev_tags = video_entry.AddDeveloperTags(original_developer_tags)
for dev_tag in dev_tags:
self.assert_(dev_tag.text in original_developer_tags)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
developer_tags_from_new_entry = new_entry.GetDeveloperTags()
for dev_tag in developer_tags_from_new_entry:
self.assert_(dev_tag.text in original_developer_tags)
self.assertEquals(len(developer_tags_from_new_entry),
len(original_developer_tags))
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
self.assert_(value == True)
def testBrowserBasedVideoUpload(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
response = self.client.GetFormUploadToken(video_entry)
self.assert_(response[0].startswith(
'https://uploads.gdata.youtube.com/action/FormDataUpload/'))
self.assert_(len(response[0]) > 55)
self.assert_(len(response[1]) > 100)
def testRetrieveRelatedVideoFeedByUri(self):
feed = self.client.GetYouTubeRelatedVideoFeed(
'https://gdata.youtube.com/feeds/videos/Ncakifd_16k/related')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveRelatedVideoFeedById(self):
feed = self.client.GetYouTubeRelatedVideoFeed(video_id = 'Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedByUri(self):
feed = self.client.GetYouTubeVideoResponseFeed(
'https://gdata.youtube.com/feeds/videos/Ncakifd_16k/responses')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedById(self):
feed = self.client.GetYouTubeVideoResponseFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByUri(self):
feed = self.client.GetYouTubeVideoCommentFeed(
'https://gdata.youtube.com/feeds/api/videos/Ncakifd_16k/comments')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByVideoId(self):
feed = self.client.GetYouTubeVideoCommentFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testAddComment(self):
video_id = '9g6buYJTt_g'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
random_comment_text = 'test_comment_' + str(random.randint(1000,50000))
self.client.AddComment(comment_text=random_comment_text,
video_entry=video_entry)
comment_feed = self.client.GetYouTubeVideoCommentFeed(video_id=video_id)
comment_found = False
for item in comment_feed.entry:
if (item.content.text == random_comment_text):
comment_found = True
self.assertEquals(comment_found, True)
def testAddRating(self):
video_id_to_rate = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id_to_rate)
response = self.client.AddRating(3, video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
def testRetrievePlaylistFeedByUri(self):
feed = self.client.GetYouTubePlaylistFeed(
'https://gdata.youtube.com/feeds/users/gdpython/playlists')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistListFeedByUsername(self):
feed = self.client.GetYouTubePlaylistFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistVideoFeed(self):
feed = self.client.GetYouTubePlaylistVideoFeed(
'https://gdata.youtube.com/feeds/api/playlists/BCB3BB96DF51B505')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistVideoFeed))
self.assert_(len(feed.entry) > 0)
self.assert_(isinstance(feed.entry[0],
gdata.youtube.YouTubePlaylistVideoEntry))
def testAddUpdateAndDeletePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
break
self.assertEquals(update_successful, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddUpdateAndDeletePrivatePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description,
playlist_private=True)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description,
playlist_private=True)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
playlist_still_private = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
if playlist_entry.private is not None:
playlist_still_private = True
self.assertEquals(update_successful, True)
self.assertEquals(playlist_still_private, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddEditAndDeleteVideoFromPlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
custom_video_title = 'my test video on my test playlist'
custom_video_description = 'this is a test video on my test playlist'
video_id = 'Ncakifd_16k'
playlist_uri = response.feed_link[0].href
time.sleep(10)
response = self.client.AddPlaylistVideoEntryToPlaylist(
playlist_uri, video_id, custom_video_title, custom_video_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
playlist_entry_id = response.id.text.split('/')[-1]
playlist_uri = response.id.text.split(playlist_entry_id)[0][:-1]
new_video_title = 'video number ' + str(random.randint(1000,3000))
new_video_description = 'test video'
time.sleep(10)
response = self.client.UpdatePlaylistVideoEntryMetaData(
playlist_uri,
playlist_entry_id,
new_video_title,
new_video_description,
1)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
time.sleep(10)
playlist_entry_id = response.id.text.split('/')[-1]
# remove video from playlist
response = self.client.DeletePlaylistVideoEntry(playlist_uri,
playlist_entry_id)
self.assertEquals(response, True)
time.sleep(10)
# delete the playlist
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testRetrieveSubscriptionFeedByUri(self):
feed = self.client.GetYouTubeSubscriptionFeed(
'https://gdata.youtube.com/feeds/users/gdpython/subscriptions')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveSubscriptionFeedByUsername(self):
feed = self.client.GetYouTubeSubscriptionFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveUserProfileByUri(self):
user = self.client.GetYouTubeUserEntry(
'https://gdata.youtube.com/feeds/users/gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserProfileByUsername(self):
user = self.client.GetYouTubeUserEntry(username='gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveDefaultUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testAddAndDeleteVideoFromFavorites(self):
video_id = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
response = self.client.AddVideoEntryToFavorites(video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
time.sleep(10)
response = self.client.DeleteVideoEntryFromFavorites(video_id)
self.assertEquals(response, True)
def testRetrieveContactFeedByUri(self):
feed = self.client.GetYouTubeContactFeed(
'https://gdata.youtube.com/feeds/users/gdpython/contacts')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
def testRetrieveContactFeedByUsername(self):
feed = self.client.GetYouTubeContactFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
if __name__ == '__main__':
print ('NOTE: Please run these tests only with a test account. '
'The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
developer_key = raw_input('Please enter your developer key: ')
video_file_location = raw_input(
'Please enter the absolute path to a video file: ')
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import unittest
import getpass
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import gdata.service
import gdata
import gdata.auth
import atom
import atom.service
import atom.token_store
import os.path
from gdata import test_data
import atom.mock_http
import atom.mock_http_core
username = ''
password = ''
test_image_location = '../testimage.jpg'
test_image_name = 'testimage.jpg'
class GDataServiceMediaUnitTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.service.GDataService()
self.gd_client.email = username
self.gd_client.password = password
self.gd_client.service = 'lh2'
self.gd_client.source = 'GDataService Media "Unit" Tests'
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
# create a test album
gd_entry = gdata.GDataEntry()
gd_entry.title = atom.Title(text='GData Test Album')
gd_entry.category.append(atom.Category(
scheme='http://schemas.google.com/g/2005#kind',
term='http://schemas.google.com/photos/2007#album'))
self.album_entry = self.gd_client.Post(gd_entry,
'http://picasaweb.google.com/data/feed/api/user/' + username)
def tearDown(self):
album_entry = self.gd_client.Get(self.album_entry.id.text)
self.gd_client.Delete(album_entry.GetEditLink().href)
def testSourceGeneratesUserAgentHeader(self):
self.gd_client.source = 'GoogleInc-ServiceUnitTest-1'
self.assert_(self.gd_client.additional_headers['User-Agent'].startswith(
'GoogleInc-ServiceUnitTest-1 GData-Python'))
def testMedia1(self):
# Create media-only
ms = gdata.MediaSource()
ms.setFile(test_image_location, 'image/jpeg')
media_entry = self.gd_client.Post(None,
self.album_entry.GetFeedLink().href, media_source = ms)
self.assert_(media_entry is not None)
self.assert_(isinstance(media_entry, gdata.GDataEntry))
self.assert_(media_entry.IsMedia())
# Update media & metadata
ms = gdata.MediaSource()
ms.setFile(test_image_location, 'image/jpeg')
media_entry.summary = atom.Summary(text='Test Image')
media_entry2 = self.gd_client.Put(media_entry,
media_entry.GetEditLink().href, media_source = ms)
self.assert_(media_entry2 is not None)
self.assert_(isinstance(media_entry2, gdata.GDataEntry))
self.assert_(media_entry2.IsMedia())
self.assert_(media_entry2.summary.text == 'Test Image')
# Read media binary
imageSource = self.gd_client.GetMedia(media_entry2.GetMediaURL())
self.assert_(isinstance(imageSource, gdata.MediaSource))
self.assert_(imageSource.content_type == 'image/jpeg')
self.assert_(imageSource.content_length)
imageData = imageSource.file_handle.read()
self.assert_(imageData)
# Delete entry
response = self.gd_client.Delete(media_entry2.GetEditLink().href)
self.assert_(response)
def testMedia2(self):
# Create media & metadata
ms = gdata.MediaSource()
ms.setFile(test_image_location, 'image/jpeg')
new_media_entry = gdata.GDataEntry()
new_media_entry.title = atom.Title(text='testimage1.jpg')
new_media_entry.summary = atom.Summary(text='Test Image')
new_media_entry.category.append(atom.Category(scheme =
'http://schemas.google.com/g/2005#kind', term =
'http://schemas.google.com/photos/2007#photo'))
media_entry = self.gd_client.Post(new_media_entry,
self.album_entry.GetFeedLink().href, media_source = ms)
self.assert_(media_entry is not None)
self.assert_(isinstance(media_entry, gdata.GDataEntry))
self.assert_(media_entry.IsMedia())
self.assert_(media_entry.summary.text == 'Test Image')
# Update media only
ms = gdata.MediaSource()
ms.setFile(test_image_location, 'image/jpeg')
media_entry = self.gd_client.Put(None, media_entry.GetEditMediaLink().href,
media_source = ms)
self.assert_(media_entry is not None)
self.assert_(isinstance(media_entry, gdata.GDataEntry))
self.assert_(media_entry.IsMedia())
# Delete entry
response = self.gd_client.Delete(media_entry.GetEditLink().href)
self.assert_(response)
def testMediaConstructorDefaults(self):
ms = gdata.MediaSource()
ms.setFile(test_image_location, 'image/jpeg')
self.assert_(ms is not None)
self.assert_(isinstance(ms, gdata.MediaSource))
self.assertEquals(ms.file_name, test_image_name)
self.assertEquals(ms.content_type, 'image/jpeg')
def testMediaConstructorWithFilePath(self):
ms = gdata.MediaSource(file_path=test_image_location,
content_type='image/jpeg')
self.assert_(ms is not None)
self.assert_(isinstance(ms, gdata.MediaSource))
self.assertEquals(ms.file_name, test_image_name)
self.assertEquals(ms.content_type, 'image/jpeg')
def testMediaConstructorWithFileHandle(self):
fh = open(test_image_location, 'r')
len = os.path.getsize(test_image_location)
ms = gdata.MediaSource(fh, 'image/jpeg', len, file_name=test_image_location)
self.assert_(ms is not None)
self.assert_(isinstance(ms, gdata.MediaSource))
self.assertEquals(ms.file_name, test_image_location)
self.assertEquals(ms.content_type, 'image/jpeg')
class GDataServiceUnitTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.service.GDataService()
self.gd_client.email = username
self.gd_client.password = password
self.gd_client.service = 'gbase'
self.gd_client.source = 'GDataClient "Unit" Tests'
def testProperties(self):
email_string = 'Test Email'
password_string = 'Passwd'
self.gd_client.email = email_string
self.assertEquals(self.gd_client.email, email_string)
self.gd_client.password = password_string
self.assertEquals(self.gd_client.password, password_string)
def testCorrectLogin(self):
try:
self.gd_client.ProgrammaticLogin()
self.assert_(isinstance(
self.gd_client.token_store.find_token(
'http://base.google.com/base/feeds/'),
gdata.auth.ClientLoginToken))
self.assert_(self.gd_client.captcha_token is None)
self.assert_(self.gd_client.captcha_url is None)
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
def testDefaultHttpClient(self):
self.assert_(isinstance(self.gd_client.http_client,
atom.http.HttpClient))
def testGet(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.additional_headers = {'X-Google-Key':
'ABQIAAAAoLioN3buSs9KqIIq9V' +
'mkFxT2yXp_ZAY8_ufC3CFXhHIE' +
'1NvwkxRK8C1Q8OWhsWA2AIKv-c' +
'VKlVrNhQ'}
self.gd_client.server = 'base.google.com'
result = self.gd_client.Get('/base/feeds/snippets?bq=digital+camera')
self.assert_(result is not None)
self.assert_(isinstance(result, atom.Feed))
def testGetWithAuthentication(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.additional_headers = {'X-Google-Key':
'ABQIAAAAoLioN3buSs9KqIIq9V' +
'mkFxT2yXp_ZAY8_ufC3CFXhHIE' +
'1NvwkxRK8C1Q8OWhsWA2AIKv-c' +
'VKlVrNhQ'}
self.gd_client.server = 'base.google.com'
result = self.gd_client.Get('/base/feeds/items?bq=digital+camera')
self.assert_(result is not None)
self.assert_(isinstance(result, atom.Feed))
def testGetEntry(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.server = 'base.google.com'
try:
result = self.gd_client.GetEntry('/base/feeds/items?bq=digital+camera')
self.fail(
'Result from server in GetEntry should have raised an exception')
except gdata.service.UnexpectedReturnType:
pass
def testGetFeed(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.server = 'base.google.com'
result = self.gd_client.GetFeed('/base/feeds/items?bq=digital+camera')
self.assert_(result is not None)
self.assert_(isinstance(result, atom.Feed))
def testGetWithResponseTransformer(self):
# Query Google Base and interpret the results as a GBaseSnippetFeed.
feed = self.gd_client.Get(
'http://www.google.com/base/feeds/snippets?bq=digital+camera',
converter=gdata.base.GBaseSnippetFeedFromString)
self.assertEquals(isinstance(feed, gdata.base.GBaseSnippetFeed), True)
def testPostPutAndDelete(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.additional_headers = {'X-Google-Key':
'ABQIAAAAoLioN3buSs9KqIIq9V' +
'mkFxT2yXp_ZAY8_ufC3CFXhHIE' +
'1NvwkxRK8C1Q8OWhsWA2AIKv-c' +
'VKlVrNhQ'}
self.gd_client.server = 'base.google.com'
# Insert a new item
response = self.gd_client.Post(test_data.TEST_BASE_ENTRY,
'/base/feeds/items')
self.assert_(response is not None)
self.assert_(isinstance(response, atom.Entry))
self.assert_(response.category[0].term == 'products')
# Find the item id of the created item
item_id = response.id.text.lstrip(
'http://www.google.com/base/feeds/items/')
self.assert_(item_id is not None)
updated_xml = gdata.base.GBaseItemFromString(test_data.TEST_BASE_ENTRY)
# Change one of the labels in the item
updated_xml.label[2].text = 'beach ball'
# Update the item
response = self.gd_client.Put(updated_xml,
'/base/feeds/items/%s' % item_id)
self.assert_(response is not None)
new_base_item = gdata.base.GBaseItemFromString(str(response))
self.assert_(isinstance(new_base_item, atom.Entry))
# Delete the item the test just created.
response = self.gd_client.Delete('/base/feeds/items/%s' % item_id)
self.assert_(response)
def testPostPutAndDeleteWithConverters(self):
try:
self.gd_client.ProgrammaticLogin()
except gdata.service.CaptchaRequired:
self.fail('Required Captcha')
except gdata.service.BadAuthentication:
self.fail('Bad Authentication')
except gdata.service.Error:
self.fail('Login Error')
self.gd_client.additional_headers = {'X-Google-Key':
'ABQIAAAAoLioN3buSs9KqIIq9V' +
'mkFxT2yXp_ZAY8_ufC3CFXhHIE' +
'1NvwkxRK8C1Q8OWhsWA2AIKv-c' +
'VKlVrNhQ'}
self.gd_client.server = 'base.google.com'
# Insert a new item
response = self.gd_client.Post(test_data.TEST_BASE_ENTRY,
'/base/feeds/items', converter=gdata.base.GBaseItemFromString)
self.assert_(response is not None)
self.assert_(isinstance(response, atom.Entry))
self.assert_(isinstance(response, gdata.base.GBaseItem))
self.assert_(response.category[0].term == 'products')
updated_xml = gdata.base.GBaseItemFromString(test_data.TEST_BASE_ENTRY)
# Change one of the labels in the item
updated_xml.label[2].text = 'beach ball'
# Update the item
response = self.gd_client.Put(updated_xml,
response.id.text,
converter=gdata.base.GBaseItemFromString)
self.assertEquals(response is not None, True)
self.assertEquals(isinstance(response, gdata.base.GBaseItem), True)
# Delete the item the test just created.
response = self.gd_client.Delete(response.id.text)
self.assert_(response)
def testCaptchaUrlGeneration(self):
# Populate the mock server with a pairing for a ClientLogin request to a
# CAPTCHA challenge.
mock_client = atom.mock_http.MockHttpClient()
captcha_response = atom.mock_http.MockResponse(
body="""Url=http://www.google.com/login/captcha
Error=CaptchaRequired
CaptchaToken=DQAAAGgAdkI1LK9
CaptchaUrl=Captcha?ctoken=HiteT4b0Bk5Xg18_AcVoP6-yFkHPibe7O9EqxeiI7lUSN
""", status=403, reason='Access Forbidden')
mock_client.add_response(captcha_response, 'POST',
'https://www.google.com/accounts/ClientLogin')
# Set the exising client's handler so that it will make requests to the
# mock service instead of the real server.
self.gd_client.http_client = mock_client
try:
self.gd_client.ProgrammaticLogin()
self.fail('Login attempt should have caused a CAPTCHA challenge.')
except gdata.service.CaptchaRequired, error:
self.assertEquals(self.gd_client.captcha_url,
('https://www.google.com/accounts/Captcha?ctoken=HiteT4b0Bk5Xg18_'
'AcVoP6-yFkHPibe7O9EqxeiI7lUSN'))
class DeleteWithUrlParamsTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.service.GDataService()
# Set the client to echo the request back in the response.
self.gd_client.http_client.v2_http_client = (
atom.mock_http_core.SettableHttpClient(200, 'OK', '', {}))
def testDeleteWithUrlParams(self):
self.assert_(self.gd_client.Delete('http://example.com/test',
{'TestHeader': '123'}, {'urlParam1': 'a', 'urlParam2': 'test'}))
request = self.gd_client.http_client.v2_http_client.last_request
self.assertEqual(request.uri.host, 'example.com')
self.assertEqual(request.uri.path, '/test')
self.assertEqual(request.uri.query,
{'urlParam1': 'a', 'urlParam2': 'test'})
def testDeleteWithSessionId(self):
self.gd_client._SetSessionId('test_session_id')
self.assert_(self.gd_client.Delete('http://example.com/test',
{'TestHeader': '123'}, {'urlParam1': 'a', 'urlParam2': 'test'}))
request = self.gd_client.http_client.v2_http_client.last_request
self.assertEqual(request.uri.host, 'example.com')
self.assertEqual(request.uri.path, '/test')
self.assertEqual(request.uri.query, {'urlParam1': 'a',
'urlParam2': 'test', 'gsessionid': 'test_session_id'})
class QueryTest(unittest.TestCase):
def setUp(self):
self.query = gdata.service.Query()
def testQueryShouldBehaveLikeDict(self):
try:
self.query['zap']
self.fail()
except KeyError:
pass
self.query['zap'] = 'x'
self.assert_(self.query['zap'] == 'x')
def testContructorShouldRejectBadInputs(self):
test_q = gdata.service.Query(params=[1,2,3,4])
self.assert_(len(test_q.keys()) == 0)
def testTextQueryProperty(self):
self.assert_(self.query.text_query is None)
self.query['q'] = 'test1'
self.assert_(self.query.text_query == 'test1')
self.query.text_query = 'test2'
self.assert_(self.query.text_query == 'test2')
def testOrderByQueryProperty(self):
self.assert_(self.query.orderby is None)
self.query['orderby'] = 'updated'
self.assert_(self.query.orderby == 'updated')
self.query.orderby = 'starttime'
self.assert_(self.query.orderby == 'starttime')
def testQueryShouldProduceExampleUris(self):
self.query.feed = '/base/feeds/snippets'
self.query.text_query = 'This is a test'
self.assert_(self.query.ToUri() == '/base/feeds/snippets?q=This+is+a+test')
def testCategoriesFormattedCorrectly(self):
self.query.feed = '/x'
self.query.categories.append('Fritz')
self.query.categories.append('Laurie')
self.assert_(self.query.ToUri() == '/x/-/Fritz/Laurie')
# The query's feed should not have been changed
self.assert_(self.query.feed == '/x')
self.assert_(self.query.ToUri() == '/x/-/Fritz/Laurie')
def testCategoryQueriesShouldEscapeOrSymbols(self):
self.query.feed = '/x'
self.query.categories.append('Fritz|Laurie')
self.assert_(self.query.ToUri() == '/x/-/Fritz%7CLaurie')
def testTypeCoercionOnIntParams(self):
self.query.feed = '/x'
self.query.max_results = 10
self.query.start_index = 5
self.assert_(isinstance(self.query.max_results, str))
self.assert_(isinstance(self.query.start_index, str))
self.assertEquals(self.query['max-results'], '10')
self.assertEquals(self.query['start-index'], '5')
def testPassInCategoryListToConstructor(self):
query = gdata.service.Query(feed='/feed/sample', categories=['foo', 'bar',
'eggs|spam'])
url = query.ToUri()
self.assert_(url.find('/foo') > -1)
self.assert_(url.find('/bar') > -1)
self.assert_(url.find('/eggs%7Cspam') > -1)
class GetNextPageInFeedTest(unittest.TestCase):
def setUp(self):
self.gd_client = gdata.service.GDataService()
def testGetNextPage(self):
feed = self.gd_client.Get(
'http://www.google.com/base/feeds/snippets?max-results=2',
converter=gdata.base.GBaseSnippetFeedFromString)
self.assert_(len(feed.entry) > 0)
first_id = feed.entry[0].id.text
feed2 = self.gd_client.GetNext(feed)
self.assert_(len(feed2.entry) > 0)
next_id = feed2.entry[0].id.text
self.assert_(first_id != next_id)
self.assert_(feed2.__class__ == feed.__class__)
class ScopeLookupTest(unittest.TestCase):
def testLookupScopes(self):
scopes = gdata.service.lookup_scopes('cl')
self.assertEquals(scopes, gdata.service.CLIENT_LOGIN_SCOPES['cl'])
scopes = gdata.service.lookup_scopes(None)
self.assert_(scopes is None)
scopes = gdata.service.lookup_scopes('UNKNOWN_SERVICE')
self.assert_(scopes is None)
class TokenLookupTest(unittest.TestCase):
def setUp(self):
self.client = gdata.service.GDataService()
def testSetAndGetClientLoginTokenWithNoService(self):
self.assert_(self.client.auth_token is None)
self.client.SetClientLoginToken('foo')
self.assert_(self.client.auth_token is None)
self.assert_(self.client.token_store.find_token(
atom.token_store.SCOPE_ALL) is not None)
self.assertEquals(self.client.GetClientLoginToken(), 'foo')
self.client.SetClientLoginToken('foo2')
self.assertEquals(self.client.GetClientLoginToken(), 'foo2')
def testSetAndGetClientLoginTokenWithService(self):
self.client.service = 'cp'
self.client.SetClientLoginToken('bar')
self.assertEquals(self.client.GetClientLoginToken(), 'bar')
# Changing the service should cause the token to no longer be found.
self.client.service = 'gbase'
self.client.current_token = None
self.assert_(self.client.GetClientLoginToken() is None)
def testSetAndGetClientLoginTokenWithScopes(self):
scopes = gdata.service.CLIENT_LOGIN_SCOPES['cl'][:]
scopes.extend(gdata.service.CLIENT_LOGIN_SCOPES['gbase'])
self.client.SetClientLoginToken('baz', scopes=scopes)
self.client.current_token = None
self.assert_(self.client.GetClientLoginToken() is None)
self.client.service = 'cl'
self.assertEquals(self.client.GetClientLoginToken(), 'baz')
self.client.service = 'gbase'
self.assertEquals(self.client.GetClientLoginToken(), 'baz')
self.client.service = 'wise'
self.assert_(self.client.GetClientLoginToken() is None)
def testLookupUsingTokenStore(self):
scopes = gdata.service.CLIENT_LOGIN_SCOPES['cl'][:]
scopes.extend(gdata.service.CLIENT_LOGIN_SCOPES['gbase'])
self.client.SetClientLoginToken('baz', scopes=scopes)
token = self.client.token_store.find_token(
'http://www.google.com/calendar/feeds/foo')
self.assertEquals(token.get_token_string(), 'baz')
self.assertEquals(token.auth_header, '%s%s' % (
gdata.auth.PROGRAMMATIC_AUTH_LABEL, 'baz'))
token = self.client.token_store.find_token(
'http://www.google.com/calendar/')
self.assert_(isinstance(token, gdata.auth.ClientLoginToken) == False)
token = self.client.token_store.find_token(
'http://www.google.com/base/feeds/snippets')
self.assertEquals(token.get_token_string(), 'baz')
if __name__ == '__main__':
print ('GData Service Media Unit Tests\nNOTE: Please run these tests only '
'with a test account. The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functional Tests for Google Analytics Account Feed and Data Feed.
AnalyticsClientTest: Tests making live requests to Google Analytics API.
"""
__author__ = 'api.nickm@google.com (Nick Mihailovski)'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.analytics.client
import gdata.test_config as conf
conf.options.register_option(conf.GA_TABLE_ID)
class AnalyticsClientTest(unittest.TestCase):
"""Tests creating an Account Feed query and making a request to the
Google Analytics Account Feed."""
def setUp(self):
"""Creates an AnalyticsClient object."""
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.analytics.client.AnalyticsClient()
self.client.http_client.debug = True
conf.configure_client(
self.client,
'AnalyticsClientTest',
self.client.auth_service)
def testAccountFeed(self):
"""Tests if the Account Feed exists."""
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'testAccountFeed')
account_query = gdata.analytics.client.AccountFeedQuery({
'max-results': '1'
})
feed = self.client.GetAccountFeed(account_query)
self.assert_(feed.entry is not None)
properties = [
'ga:accountId',
'ga:accountName',
'ga:profileId',
'ga:webPropertyId',
'ga:currency',
'ga:timezone'
]
entry = feed.entry[0]
for prop in properties:
property = entry.GetProperty(prop)
self.assertEquals(property.name, prop)
def testDataFeed(self):
"""Tests if the Data Feed exists."""
start_date = '2008-10-01'
end_date = '2008-10-02'
metrics = 'ga:visits'
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'testDataFeed')
data_query = gdata.analytics.client.DataFeedQuery({
'ids': conf.options.get_value('table_id'),
'start-date': start_date,
'end-date': end_date,
'metrics' : metrics,
'max-results': '1'
})
feed = self.client.GetDataFeed(data_query)
self.assert_(feed.entry is not None)
self.assertEquals(feed.start_date.text, start_date)
self.assertEquals(feed.end_date.text, end_date)
self.assertEquals(feed.entry[0].GetMetric(metrics).name, metrics)
def testManagementFeed(self):
"""Tests of the Management Feed exists."""
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'testManagementFeed')
account_query = gdata.analytics.client.AccountQuery()
feed = self.client.GetManagementFeed(account_query)
self.assert_(feed.entry is not None)
def tearDown(self):
"""Closes client connection."""
conf.close_client(self.client)
def suite():
return conf.build_suite([AnalyticsClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit Tests for Google Analytics API query objects.
AnalyticsClientTest: Tests making live requests to Google Analytics API.
"""
__author__ = 'api.nickm@google.com (Nick Mihailovski)'
import unittest
from gdata.analytics import client
class DataExportQueryTest(unittest.TestCase):
"""Tests making Data Export API Queries."""
def testAccountFeed(self):
"""Tests Account Feed queries."""
queryTest1 = client.AccountFeedQuery()
self.assertEquals(str(queryTest1),
'https://www.google.com/analytics/feeds/accounts/default')
queryTest2 = client.AccountFeedQuery({'max-results': 50})
self.assertEquals(str(queryTest2),
'https://www.google.com/analytics/feeds/accounts/default'
'?max-results=50')
queryTest3 = client.AccountFeedQuery()
queryTest3.query['max-results'] = 100
self.assertEquals(str(queryTest3),
'https://www.google.com/analytics/feeds/accounts/default'
'?max-results=100')
def testDataFeed(self):
"""Tests Data Feed queries."""
queryTest1 = client.DataFeedQuery()
self.assertEquals(str(queryTest1),
'https://www.google.com/analytics/feeds/data')
queryTest2 = client.DataFeedQuery({'ids': 'ga:1234'})
self.assertEquals(str(queryTest2),
'https://www.google.com/analytics/feeds/data?ids=ga%3A1234')
queryTest3 = client.DataFeedQuery()
queryTest3.query['ids'] = 'ga:1234'
self.assertEquals(str(queryTest3),
'https://www.google.com/analytics/feeds/data?ids=ga%3A1234')
class ManagementQueryTest(unittest.TestCase):
"""Tests making Management API queries."""
def setUp(self):
self.base_url = 'https://www.google.com/analytics/feeds/datasources/ga'
def testAccountFeedQuery(self):
"""Tests Account Feed queries."""
queryTest1 = client.AccountQuery()
self.assertEquals(str(queryTest1),
'%s/accounts' % self.base_url)
queryTest2 = client.AccountQuery({'max-results': 50})
self.assertEquals(str(queryTest2),
'%s/accounts?max-results=50' % self.base_url)
def testWebPropertyFeedQuery(self):
"""Tests Web Property Feed queries."""
queryTest1 = client.WebPropertyQuery()
self.assertEquals(str(queryTest1),
'%s/accounts/~all/webproperties' % self.base_url)
queryTest2 = client.WebPropertyQuery('123')
self.assertEquals(str(queryTest2),
'%s/accounts/123/webproperties' % self.base_url)
queryTest3 = client.WebPropertyQuery('123', {'max-results': 100})
self.assertEquals(str(queryTest3),
'%s/accounts/123/webproperties?max-results=100' % self.base_url)
def testProfileFeedQuery(self):
"""Tests Profile Feed queries."""
queryTest1 = client.ProfileQuery()
self.assertEquals(str(queryTest1),
'%s/accounts/~all/webproperties/~all/profiles' % self.base_url)
queryTest2 = client.ProfileQuery('123', 'UA-123-1')
self.assertEquals(str(queryTest2),
'%s/accounts/123/webproperties/UA-123-1/profiles' % self.base_url)
queryTest3 = client.ProfileQuery('123', 'UA-123-1',
{'max-results': 100})
self.assertEquals(str(queryTest3),
'%s/accounts/123/webproperties/UA-123-1/profiles?max-results=100'
% self.base_url)
queryTest4 = client.ProfileQuery()
queryTest4.acct_id = '123'
queryTest4.web_prop_id = 'UA-123-1'
queryTest4.query['max-results'] = 100
self.assertEquals(str(queryTest4),
'%s/accounts/123/webproperties/UA-123-1/profiles?max-results=100'
% self.base_url)
def testGoalFeedQuery(self):
"""Tests Goal Feed queries."""
queryTest1 = client.GoalQuery()
self.assertEquals(str(queryTest1),
'%s/accounts/~all/webproperties/~all/profiles/~all/goals'
% self.base_url)
queryTest2 = client.GoalQuery('123', 'UA-123-1', '555')
self.assertEquals(str(queryTest2),
'%s/accounts/123/webproperties/UA-123-1/profiles/555/goals'
% self.base_url)
queryTest3 = client.GoalQuery('123', 'UA-123-1', '555',
{'max-results': 100})
self.assertEquals(str(queryTest3),
'%s/accounts/123/webproperties/UA-123-1/profiles/555/goals'
'?max-results=100' % self.base_url)
queryTest4 = client.GoalQuery()
queryTest4.acct_id = '123'
queryTest4.web_prop_id = 'UA-123-1'
queryTest4.profile_id = '555'
queryTest4.query['max-results'] = 100
self.assertEquals(str(queryTest3),
'%s/accounts/123/webproperties/UA-123-1/profiles/555/goals'
'?max-results=100' % self.base_url)
def testAdvSegQuery(self):
"""Tests Advanced Segment Feed queries."""
queryTest1 = client.AdvSegQuery()
self.assertEquals(str(queryTest1),
'%s/segments'
% self.base_url)
queryTest2 = client.AdvSegQuery({'max-results': 100})
self.assertEquals(str(queryTest2),
'%s/segments?max-results=100'
% self.base_url)
def suite():
return conf.build_suite([DataExportQueryTest,
ManagementQueryTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit Tests for Google Analytics Data Export API and Management APIs.
Although the Data Export API and Management API conceptually operate on
different parts of Google Analytics, the APIs share some code so they
are released in the same module.
AccountFeedTest: All unit tests for AccountFeed class.
DataFeedTest: All unit tests for DataFeed class.
ManagementFeedAccountTest: Unit tests for ManagementFeed class.
ManagementFeedGoalTest: Unit tests for ManagementFeed class.
ManagementFeedAdvSegTest: Unit tests for ManagementFeed class.
"""
__author__ = 'api.nickm@google.com (Nick Mihailovski)'
import unittest
from gdata import test_data
import gdata.analytics.data
import atom.core
import gdata.test_config as conf
class AccountFeedTest(unittest.TestCase):
"""Unit test for all custom elements in the Account Feed."""
def setUp(self):
"""Retrieves the test XML feed into a AccountFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_ACCOUNT_FEED,
gdata.analytics.data.AccountFeed)
def testSegment(self):
"""Tests Segment class in Google Analytics Account Feed."""
segment = self.feed.segment[0]
self.assertEquals(segment.id, 'gaid::-11')
self.assertEquals(segment.name, 'Visits from iPhones')
def testSegmentDefinition(self):
"""Tests Definition class in Google Analytics Account Feed."""
definition = self.feed.segment[0].definition
self.assertEquals(definition.text, 'ga:operatingSystem==iPhone')
def testEntryTableId(self):
"""Tests custom classes in Google Analytics Account Feed."""
entry = self.feed.entry[0]
self.assertEquals(entry.table_id.text, 'ga:1174')
def testEntryProperty(self):
"""Tests the property classes in Google Analytics Account Feed."""
property = self.feed.entry[0].property
self.assertEquals(property[0].name, 'ga:accountId')
self.assertEquals(property[0].value, '30481')
self.assertEquals(property[1].name, 'ga:accountName')
self.assertEquals(property[1].value, 'Google Store')
self.assertEquals(property[2].name, 'ga:profileId')
self.assertEquals(property[2].value, '1174')
self.assertEquals(property[3].name, 'ga:webPropertyId')
self.assertEquals(property[3].value, 'UA-30481-1')
self.assertEquals(property[4].name, 'ga:currency')
self.assertEquals(property[4].value, 'USD')
self.assertEquals(property[5].name, 'ga:timezone')
self.assertEquals(property[5].value, 'America/Los_Angeles')
def testEntryGetProperty(self):
"""Tests GetProperty inherited class in the AccountEntry class."""
entry = self.feed.entry[0]
self.assertEquals(entry.GetProperty('ga:accountId').value, '30481')
self.assertEquals(entry.GetProperty('ga:accountName').value, 'Google Store')
self.assertEquals(entry.GetProperty('ga:profileId').value, '1174')
self.assertEquals(entry.GetProperty('ga:webPropertyId').value, 'UA-30481-1')
self.assertEquals(entry.GetProperty('ga:currency').value, 'USD')
self.assertEquals(entry.GetProperty('ga:timezone').value, 'America/Los_Angeles')
def testGoal(self):
"""Tests Goal class in Google Anlaytics Account Feed."""
goal = self.feed.entry[0].goal[0]
self.assertEquals(goal.number, '1')
self.assertEquals(goal.name, 'Completing Order')
self.assertEquals(goal.value, '10.0')
self.assertEquals(goal.active, 'true')
def testDestination(self):
"""Tests Destination class in Google Analytics Account Feed."""
destination = self.feed.entry[0].goal[0].destination
self.assertEquals(destination.expression, '/purchaseComplete.html')
self.assertEquals(destination.case_sensitive, 'false')
self.assertEquals(destination.match_type, 'regex')
self.assertEquals(destination.step1_required, 'false')
def testStep(self):
"""Tests Step class in Google Analytics Account Feed."""
step = self.feed.entry[0].goal[0].destination.step[0]
self.assertEquals(step.number, '1')
self.assertEquals(step.name, 'View Product Categories')
self.assertEquals(step.path, '/Apps|Accessories|Fun|Kid\+s|Office')
def testEngagemet(self):
"""Tests Engagement class in Google Analytics Account Feed."""
engagement = self.feed.entry[0].goal[1].engagement
self.assertEquals(engagement.type, 'timeOnSite')
self.assertEquals(engagement.comparison, '>')
self.assertEquals(engagement.threshold_value, '300')
def testCustomVariable(self):
"""Tests CustomVariable class in Google Analytics Account Feed."""
customVar = self.feed.entry[0].custom_variable[0]
self.assertEquals(customVar.index, '1')
self.assertEquals(customVar.name, 'My Custom Variable')
self.assertEquals(customVar.scope, '3')
class DataFeedTest(unittest.TestCase):
"""Unit test for all custom elements in the Data Feed."""
def setUp(self):
"""Retrieves the test XML feed into a DataFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_DATA_FEED,
gdata.analytics.data.DataFeed)
def testDataFeed(self):
"""Tests custom classes in Google Analytics Data Feed."""
self.assertEquals(self.feed.start_date.text, '2008-10-01')
self.assertEquals(self.feed.end_date.text, '2008-10-31')
def testAggregates(self):
"""Tests Aggregates class in Google Analytics Data Feed."""
self.assert_(self.feed.aggregates is not None)
def testContainsSampledData(self):
"""Tests ContainsSampledData class in Google Analytics Data Feed."""
contains_sampled_data = self.feed.contains_sampled_data.text
self.assertEquals(contains_sampled_data, 'true')
self.assertTrue(self.feed.HasSampledData())
def testAggregatesElements(self):
"""Tests Metrics class in Aggregates class."""
metric = self.feed.aggregates.metric[0]
self.assertEquals(metric.confidence_interval, '0.0')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.value, '136540')
metric = self.feed.aggregates.GetMetric('ga:visits')
self.assertEquals(metric.confidence_interval, '0.0')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.value, '136540')
def testDataSource(self):
"""Tests DataSources class in Google Analytics Data Feed."""
self.assert_(self.feed.data_source[0] is not None)
def testDataSourceTableId(self):
"""Tests TableId class in the DataSource class."""
table_id = self.feed.data_source[0].table_id
self.assertEquals(table_id.text, 'ga:1174')
def testDataSourceTableName(self):
"""Tests TableName class in the DataSource class."""
table_name = self.feed.data_source[0].table_name
self.assertEquals(table_name.text, 'www.googlestore.com')
def testDataSourceProperty(self):
"""Tests Property class in the DataSource class."""
property = self.feed.data_source[0].property
self.assertEquals(property[0].name, 'ga:profileId')
self.assertEquals(property[0].value, '1174')
self.assertEquals(property[1].name, 'ga:webPropertyId')
self.assertEquals(property[1].value, 'UA-30481-1')
self.assertEquals(property[2].name, 'ga:accountName')
self.assertEquals(property[2].value, 'Google Store')
def testDataSourceGetProperty(self):
"""Tests GetProperty utility method in the DataSource class."""
ds = self.feed.data_source[0]
self.assertEquals(ds.GetProperty('ga:profileId').value, '1174')
self.assertEquals(ds.GetProperty('ga:webPropertyId').value, 'UA-30481-1')
self.assertEquals(ds.GetProperty('ga:accountName').value, 'Google Store')
def testSegment(self):
"""Tests Segment class in DataFeed class."""
segment = self.feed.segment
self.assertEquals(segment.id, 'gaid::-11')
self.assertEquals(segment.name, 'Visits from iPhones')
def testSegmentDefinition(self):
"""Tests Definition class in Segment class."""
definition = self.feed.segment.definition
self.assertEquals(definition.text, 'ga:operatingSystem==iPhone')
def testEntryDimension(self):
"""Tests Dimension class in Entry class."""
dim = self.feed.entry[0].dimension[0]
self.assertEquals(dim.name, 'ga:source')
self.assertEquals(dim.value, 'blogger.com')
def testEntryGetDimension(self):
"""Tests GetDimension utility method in the Entry class."""
dim = self.feed.entry[0].GetDimension('ga:source')
self.assertEquals(dim.name, 'ga:source')
self.assertEquals(dim.value, 'blogger.com')
error = self.feed.entry[0].GetDimension('foo')
self.assertEquals(error, None)
def testEntryMetric(self):
"""Tests Metric class in Entry class."""
met = self.feed.entry[0].metric[0]
self.assertEquals(met.confidence_interval, '0.0')
self.assertEquals(met.name, 'ga:visits')
self.assertEquals(met.type, 'integer')
self.assertEquals(met.value, '68140')
def testEntryGetMetric(self):
"""Tests GetMetric utility method in the Entry class."""
met = self.feed.entry[0].GetMetric('ga:visits')
self.assertEquals(met.confidence_interval, '0.0')
self.assertEquals(met.name, 'ga:visits')
self.assertEquals(met.type, 'integer')
self.assertEquals(met.value, '68140')
error = self.feed.entry[0].GetMetric('foo')
self.assertEquals(error, None)
def testEntryGetObject(self):
"""Tests GetObjectOf utility method in Entry class."""
entry = self.feed.entry[0]
dimension = entry.GetObject('ga:source')
self.assertEquals(dimension.name, 'ga:source')
self.assertEquals(dimension.value, 'blogger.com')
metric = entry.GetObject('ga:visits')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.value, '68140')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.confidence_interval, '0.0')
error = entry.GetObject('foo')
self.assertEquals(error, None)
class ManagementFeedProfileTest(unittest.TestCase):
"""Unit test for all property elements in Google Analytics Management Feed.
Since the Account, Web Property and Profile feed all have the same
structure and XML elements, this single test case covers all three feeds.
"""
def setUp(self):
"""Retrieves the test XML feed into a DataFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_MGMT_PROFILE_FEED,
gdata.analytics.data.ManagementFeed)
def testFeedKindAttribute(self):
"""Tests the kind attribute in the feed."""
self.assertEqual(self.feed.kind, 'analytics#profiles')
def testEntryKindAttribute(self):
"""tests the kind attribute in the entry."""
entry_kind = self.feed.entry[0].kind
self.assertEqual(entry_kind, 'analytics#profile')
def testEntryProperty(self):
"""Tests property classes in Managment Entry class."""
property = self.feed.entry[0].property
self.assertEquals(property[0].name, 'ga:accountId')
self.assertEquals(property[0].value, '30481')
def testEntryGetProperty(self):
"""Tests GetProperty helper method in Management Entry class."""
entry = self.feed.entry[0]
self.assertEquals(entry.GetProperty('ga:accountId').value, '30481')
def testGetParentLinks(self):
"""Tests GetParentLinks utility method."""
parent_links = self.feed.entry[0].GetParentLinks()
self.assertEquals(len(parent_links), 1)
parent_link = parent_links[0]
self.assertEquals(parent_link.rel,
'http://schemas.google.com/ga/2009#parent')
self.assertEquals(parent_link.type,
'application/atom+xml')
self.assertEquals(parent_link.href,
'https://www.google.com/analytics/feeds/datasources'
'/ga/accounts/30481/webproperties/UA-30481-1')
self.assertEquals(parent_link.target_kind,
'analytics#webproperty')
def testGetChildLinks(self):
"""Tests GetChildLinks utility method."""
child_links = self.feed.entry[0].GetChildLinks()
self.assertEquals(len(child_links), 1)
self.ChildLinkTestHelper(child_links[0])
def testGetChildLink(self):
"""Tests getChildLink utility method."""
child_link = self.feed.entry[0].GetChildLink('analytics#goals')
self.ChildLinkTestHelper(child_link)
child_link = self.feed.entry[0].GetChildLink('foo_bar')
self.assertEquals(child_link, None)
def ChildLinkTestHelper(self, child_link):
"""Common method to test a child link."""
self.assertEquals(child_link.rel,
'http://schemas.google.com/ga/2009#child')
self.assertEquals(child_link.type,
'application/atom+xml')
self.assertEquals(child_link.href,
'https://www.google.com/analytics/feeds/datasources'
'/ga/accounts/30481/webproperties/UA-30481-1/profiles/1174/goals')
self.assertEquals(child_link.target_kind,
'analytics#goals')
class ManagementFeedGoalTest(unittest.TestCase):
"""Unit test for all Goal elements in Management Feed."""
def setUp(self):
"""Retrieves the test XML feed into a DataFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_MGMT_GOAL_FEED,
gdata.analytics.data.ManagementFeed)
def testEntryGoal(self):
"""Tests Goal class in Google Anlaytics Account Feed."""
goal = self.feed.entry[0].goal
self.assertEquals(goal.number, '1')
self.assertEquals(goal.name, 'Completing Order')
self.assertEquals(goal.value, '10.0')
self.assertEquals(goal.active, 'true')
def testGoalDestination(self):
"""Tests Destination class in Google Analytics Account Feed."""
destination = self.feed.entry[0].goal.destination
self.assertEquals(destination.expression, '/purchaseComplete.html')
self.assertEquals(destination.case_sensitive, 'false')
self.assertEquals(destination.match_type, 'regex')
self.assertEquals(destination.step1_required, 'false')
def testGoalDestinationStep(self):
"""Tests Step class in Google Analytics Account Feed."""
step = self.feed.entry[0].goal.destination.step[0]
self.assertEquals(step.number, '1')
self.assertEquals(step.name, 'View Product Categories')
self.assertEquals(step.path, '/Apps|Accessories')
def testGoalEngagemet(self):
"""Tests Engagement class in Google Analytics Account Feed."""
engagement = self.feed.entry[1].goal.engagement
self.assertEquals(engagement.type, 'timeOnSite')
self.assertEquals(engagement.comparison, '>')
self.assertEquals(engagement.threshold_value, '300')
class ManagementFeedAdvSegTest(unittest.TestCase):
"""Unit test for all Advanced Segment elements in Management Feed."""
def setUp(self):
"""Retrieves the test XML feed into a DataFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_MGMT_ADV_SEGMENT_FEED,
gdata.analytics.data.ManagementFeed)
def testEntrySegment(self):
"""Tests Segment class in ManagementEntry class."""
segment = self.feed.entry[0].segment
self.assertEquals(segment.id, 'gaid::0')
self.assertEquals(segment.name, 'Sources Form Google')
def testSegmentDefinition(self):
"""Tests Definition class in Segment class."""
definition = self.feed.entry[0].segment.definition
self.assertEquals(definition.text, 'ga:source=~^\Qgoogle\E')
def suite():
"""Test Account Feed, Data Feed and Management API Feeds."""
return conf.build_suite([
AccountFeedTest,
DataFeedTest,
ManagementFeedProfileTest,
ManagementFeedGoalTest,
ManagementFeedAdvSegTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.lliabraa@google.com (Lane LiaBraaten)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata.calendar
import gdata.calendar.service
import gdata.service
import random
import getpass
from gdata import test_data
username = ''
password = ''
class CalendarServiceAclUnitTest(unittest.TestCase):
_aclFeedUri = "/calendar/feeds/default/acl/full"
_aclEntryUri = "%s/user:%s" % (_aclFeedUri, "user@gmail.com",)
def setUp(self):
self.cal_client = gdata.calendar.service.CalendarService()
self.cal_client.email = username
self.cal_client.password = password
self.cal_client.source = 'GCalendarClient ACL "Unit" Tests'
def tearDown(self):
# No teardown needed
pass
def _getRandomNumber(self):
"""Return a random number as a string for testing"""
r = random.Random()
r.seed()
return str(r.randint(100000,1000000))
def _generateAclEntry(self, role="owner", scope_type="user", scope_value=None):
"""Generates a ACL rule from parameters or makes a random user an owner by default"""
if (scope_type=="user" and scope_value is None):
scope_value = "user%s@gmail.com" % (self._getRandomNumber())
rule = gdata.calendar.CalendarAclEntry()
rule.title = atom.Title(text=role)
rule.scope = gdata.calendar.Scope(value=scope_value, type="user")
rule.role = gdata.calendar.Role(value="http://schemas.google.com/gCal/2005#%s" % (role))
return rule
def assertEqualAclEntry(self, expected, actual):
"""Compares the values of two ACL entries"""
self.assertEqual(expected.role.value, actual.role.value)
self.assertEqual(expected.scope.value, actual.scope.value)
self.assertEqual(expected.scope.type, actual.scope.type)
def testGetAclFeedUnauthenticated(self):
"""Fiendishly try to get an ACL feed without authenticating"""
try:
self.cal_client.GetCalendarAclFeed(self._aclFeedUri)
self.fail("Unauthenticated request should fail")
except gdata.service.RequestError, error:
self.assertEqual(error[0]['status'], 401)
self.assertEqual(error[0]['reason'], "Authorization required")
def testGetAclFeed(self):
"""Get an ACL feed"""
self.cal_client.ProgrammaticLogin()
feed = self.cal_client.GetCalendarAclFeed(self._aclFeedUri)
self.assertNotEqual(0,len(feed.entry))
def testGetAclEntryUnauthenticated(self):
"""Fiendishly try to get an ACL entry without authenticating"""
try:
self.cal_client.GetCalendarAclEntry(self._aclEntryUri)
self.fail("Unauthenticated request should fail");
except gdata.service.RequestError, error:
self.assertEqual(error[0]['status'], 401)
self.assertEqual(error[0]['reason'], "Authorization required")
def testGetAclEntry(self):
"""Get an ACL entry"""
self.cal_client.ProgrammaticLogin()
self.cal_client.GetCalendarAclEntry(self._aclEntryUri)
def testCalendarAclFeedFromString(self):
"""Create an ACL feed from a hard-coded string"""
aclFeed = gdata.calendar.CalendarAclFeedFromString(test_data.ACL_FEED)
self.assertEqual("Elizabeth Bennet's access control list", aclFeed.title.text)
self.assertEqual(2,len(aclFeed.entry))
def testCalendarAclEntryFromString(self):
"""Create an ACL entry from a hard-coded string"""
aclEntry = gdata.calendar.CalendarAclEntryFromString(test_data.ACL_ENTRY)
self.assertEqual("owner", aclEntry.title.text)
self.assertEqual("user", aclEntry.scope.type)
self.assertEqual("liz@gmail.com", aclEntry.scope.value)
self.assertEqual("http://schemas.google.com/gCal/2005#owner", aclEntry.role.value)
def testCreateAndDeleteAclEntry(self):
"""Add an ACL rule and verify that is it returned in the ACL feed. Then delete the rule and
verify that the rule is no longer included in the ACL feed."""
# Get the current number of ACL rules
self.cal_client.ProgrammaticLogin()
aclFeed = self.cal_client.GetCalendarAclFeed(self._aclFeedUri)
original_rule_count = len(aclFeed.entry)
# Insert entry
rule = self._generateAclEntry()
returned_rule = self.cal_client.InsertAclEntry(rule, self._aclFeedUri)
# Verify rule was added with correct ACL values
aclFeed = self.cal_client.GetCalendarAclFeed(self._aclFeedUri)
self.assertEqual(original_rule_count+1, len(aclFeed.entry))
self.assertEqualAclEntry(rule, returned_rule)
# Delete the event
self.cal_client.DeleteAclEntry(returned_rule.GetEditLink().href)
aclFeed = self.cal_client.GetCalendarAclFeed(self._aclFeedUri)
self.assertEquals(original_rule_count, len(aclFeed.entry))
def testUpdateAclChangeScopeValue(self):
"""Fiendishly try to insert a test ACL rule and attempt to change the scope value (i.e. username).
Verify that an exception is thrown, then delete the test rule."""
# Insert a user-scoped owner role ot random user
aclEntry = self._generateAclEntry("owner","user");
self.cal_client.ProgrammaticLogin()
rule = self._generateAclEntry()
returned_rule = self.cal_client.InsertAclEntry(rule, self._aclFeedUri)
# Change the scope value (i.e. what user is the owner) and update the entry
updated_rule = returned_rule
updated_rule.scope.value = "user_%s@gmail.com" % (self._getRandomNumber())
try:
returned_rule = self.cal_client.UpdateAclEntry(returned_rule.GetEditLink().href, updated_rule)
except gdata.service.RequestError, error:
self.assertEqual(error[0]['status'], 403)
self.assertEqual(error[0]['reason'], "Forbidden")
self.cal_client.DeleteAclEntry(updated_rule.GetEditLink().href)
def testUpdateAclChangeScopeType(self):
"""Fiendishly try to insert a test ACL rule and attempt to change the scope type (i.e. from 'user' to 'domain').
Verify that an exception is thrown, then delete the test rule."""
# Insert a user-scoped owner role ot random user
aclEntry = self._generateAclEntry("owner","user");
self.cal_client.ProgrammaticLogin()
rule = self._generateAclEntry()
returned_rule = self.cal_client.InsertAclEntry(rule, self._aclFeedUri)
# Change the scope value (i.e. what user is the owner) and update the entry
updated_rule = returned_rule
updated_rule.scope.type = "domain"
try:
returned_rule = self.cal_client.UpdateAclEntry(returned_rule.GetEditLink().href, updated_rule)
except gdata.service.RequestError, error:
self.assertEqual(error[0]['status'], 403)
self.assertEqual(error[0]['reason'], "Forbidden")
self.cal_client.DeleteAclEntry(updated_rule.GetEditLink().href)
def testUpdateAclChangeRoleValue(self):
"""Insert a test ACL rule and attempt to change the scope type (i.e. from 'owner' to 'editor').
Verify that an exception is thrown, then delete the test rule."""
# Insert a user-scoped owner role ot random user
aclEntry = self._generateAclEntry("owner","user");
self.cal_client.ProgrammaticLogin()
rule = self._generateAclEntry()
returned_rule = self.cal_client.InsertAclEntry(rule, self._aclFeedUri)
# Change the scope value (i.e. what user is the owner) and update the entry
updated_rule = returned_rule
updated_rule.role.value = "http://schemas.google.com/gCal/2005#editor"
returned_rule = self.cal_client.UpdateAclEntry(returned_rule.GetEditLink().href, updated_rule)
self.assertEqualAclEntry(updated_rule, returned_rule)
self.cal_client.DeleteAclEntry(updated_rule.GetEditLink().href)
if __name__ == '__main__':
print ('NOTE: Please run these tests only with a test account. ' +
'The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.rboyd@google.com (Ryan Boyd)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import atom.mock_http
import gdata.calendar
import gdata.calendar.service
import random
import getpass
username = ''
password = ''
class CalendarServiceUnitTest(unittest.TestCase):
def setUp(self):
self.cal_client = gdata.calendar.service.CalendarService()
self.cal_client.email = username
self.cal_client.password = password
self.cal_client.source = 'GCalendarClient "Unit" Tests'
def tearDown(self):
# No teardown needed
pass
def testUrlScrubbing(self):
self.assertEquals(self.cal_client._RemoveStandardUrlPrefix(
'/test'), '/test')
self.assertEquals(self.cal_client._RemoveStandardUrlPrefix(
'http://www.google.com/calendar/test'), '/calendar/test')
self.assertEquals(self.cal_client._RemoveStandardUrlPrefix(
'https://www.google.com/calendar/test'),
'https://www.google.com/calendar/test')
def testPostUpdateAndDeleteSubscription(self):
"""Test posting a new subscription, updating it, deleting it"""
self.cal_client.ProgrammaticLogin()
subscription_id = 'c4o4i7m2lbamc4k26sc2vokh5g%40group.calendar.google.com'
subscription_url = '%s%s' % (
'http://www.google.com/calendar/feeds/default/allcalendars/full/',
subscription_id)
# Subscribe to Google Doodles calendar
calendar = gdata.calendar.CalendarListEntry()
calendar.id = atom.Id(text=subscription_id)
returned_calendar = self.cal_client.InsertCalendarSubscription(calendar)
self.assertEquals(subscription_url, returned_calendar.id.text)
self.assertEquals('Google Doodles', returned_calendar.title.text)
# Update subscription
calendar_to_update = self.cal_client.GetCalendarListEntry(subscription_url)
self.assertEquals('Google Doodles', calendar_to_update.title.text)
self.assertEquals('true', calendar_to_update.selected.value)
calendar_to_update.selected.value = 'false'
self.assertEquals('false', calendar_to_update.selected.value)
updated_calendar = self.cal_client.UpdateCalendar(calendar_to_update)
self.assertEquals('false', updated_calendar.selected.value)
# Delete subscription
response = self.cal_client.DeleteCalendarEntry(
returned_calendar.GetEditLink().href)
self.assertEquals(True, response)
def testPostUpdateAndDeleteCalendar(self):
"""Test posting a new calendar, updating it, deleting it"""
self.cal_client.ProgrammaticLogin()
# New calendar to create
title='Little League Schedule'
description='This calendar contains practice and game times'
time_zone='America/Los_Angeles'
hidden=False
location='Oakland'
color='#2952A3'
# Calendar object
calendar = gdata.calendar.CalendarListEntry()
calendar.title = atom.Title(text=title)
calendar.summary = atom.Summary(text=description)
calendar.where = gdata.calendar.Where(value_string=location)
calendar.color = gdata.calendar.Color(value=color)
calendar.timezone = gdata.calendar.Timezone(value=time_zone)
if hidden:
calendar.hidden = gdata.calendar.Hidden(value='true')
else:
calendar.hidden = gdata.calendar.Hidden(value='false')
# Create calendar
new_calendar = self.cal_client.InsertCalendar(new_calendar=calendar)
self.assertEquals(title, new_calendar.title.text)
self.assertEquals(description, new_calendar.summary.text)
self.assertEquals(location, new_calendar.where.value_string)
self.assertEquals(color, new_calendar.color.value)
self.assertEquals(time_zone, new_calendar.timezone.value)
if hidden:
self.assertEquals('true', new_calendar.hidden.value)
else:
self.assertEquals('false', new_calendar.hidden.value)
# Update calendar
calendar_to_update = self.cal_client.GetCalendarListEntry(
new_calendar.id.text)
updated_title = 'This is the updated title'
calendar_to_update.title.text = updated_title
updated_calendar = self.cal_client.UpdateCalendar(calendar_to_update)
self.assertEquals(updated_title, updated_calendar.title.text)
# Delete calendar
calendar_to_delete = self.cal_client.GetCalendarListEntry(
new_calendar.id.text)
self.cal_client.Delete(calendar_to_delete.GetEditLink().href)
return new_calendar
def testPostAndDeleteExtendedPropertyEvent(self):
"""Test posting a new entry with an extended property, deleting it"""
# Get random data for creating event
r = random.Random()
r.seed()
random_event_number = str(r.randint(100000,1000000))
random_event_title = 'My Random Extended Property Test Event %s' % (
random_event_number)
# Set event data
event = gdata.calendar.CalendarEventEntry()
event.author.append(atom.Author(name=atom.Name(text='GData Test user')))
event.title = atom.Title(text=random_event_title)
event.content = atom.Content(text='Picnic with some lunch')
event.extended_property.append(gdata.calendar.ExtendedProperty(
name='prop test name', value='prop test value'))
# Insert event
self.cal_client.ProgrammaticLogin()
new_event = self.cal_client.InsertEvent(event,
'/calendar/feeds/default/private/full')
self.assertEquals(event.extended_property[0].value,
new_event.extended_property[0].value)
# Delete the event
self.cal_client.DeleteEvent(new_event.GetEditLink().href)
# WARNING: Due to server-side issues, this test takes a while (~60seconds)
def testPostEntryWithCommentAndDelete(self):
"""Test posting a new entry with an extended property, deleting it"""
# Get random data for creating event
r = random.Random()
r.seed()
random_event_number = str(r.randint(100000,1000000))
random_event_title = 'My Random Comments Test Event %s' % (
random_event_number)
# Set event data
event = gdata.calendar.CalendarEventEntry()
event.author.append(atom.Author(name=atom.Name(text='GData Test user')))
event.title = atom.Title(text=random_event_title)
event.content = atom.Content(text='Picnic with some lunch')
# Insert event
self.cal_client.ProgrammaticLogin()
new_event = self.cal_client.InsertEvent(event,
'/calendar/feeds/default/private/full')
# Get comments feed
comments_url = new_event.comments.feed_link.href
comments_query = gdata.calendar.service.CalendarEventCommentQuery(comments_url)
comments_feed = self.cal_client.CalendarQuery(comments_query)
# Add comment
comments_entry = gdata.calendar.CalendarEventCommentEntry()
comments_entry.content = atom.Content(text='Comments content')
comments_entry.author.append(
atom.Author(name=atom.Name(text='GData Test user'),
email=atom.Email(text=username)))
new_comments_entry = self.cal_client.InsertEventComment(comments_entry,
comments_feed.GetPostLink().href)
# Delete the event
event_to_delete = self.cal_client.GetCalendarEventEntry(new_event.id.text)
self.cal_client.DeleteEvent(event_to_delete.GetEditLink().href)
def testPostQueryUpdateAndDeleteEvents(self):
"""Test posting a new entry, updating it, deleting it, querying for it"""
# Get random data for creating event
r = random.Random()
r.seed()
random_event_number = str(r.randint(100000,1000000))
random_event_title = 'My Random Test Event %s' % random_event_number
random_start_hour = (r.randint(1,1000000) % 23)
random_end_hour = random_start_hour + 1
non_random_start_minute = 0
non_random_end_minute = 0
random_month = (r.randint(1,1000000) % 12 + 1)
random_day_of_month = (r.randint(1,1000000) % 28 + 1)
non_random_year = 2008
start_time = '%04d-%02d-%02dT%02d:%02d:00.000-05:00' % (
non_random_year, random_month, random_day_of_month,
random_start_hour, non_random_start_minute,)
end_time = '%04d-%02d-%02dT%02d:%02d:00.000-05:00' % (
non_random_year, random_month, random_day_of_month,
random_end_hour, non_random_end_minute,)
# Set event data
event = gdata.calendar.CalendarEventEntry()
event.author.append(atom.Author(name=atom.Name(text='GData Test user')))
event.title = atom.Title(text=random_event_title)
event.content = atom.Content(text='Picnic with some lunch')
event.where.append(gdata.calendar.Where(value_string='Down by the river'))
event.when.append(gdata.calendar.When(start_time=start_time,end_time=end_time))
# Insert event
self.cal_client.ProgrammaticLogin()
new_event = self.cal_client.InsertEvent(event,
'/calendar/feeds/default/private/full')
# Ensure that atom data returned from calendar server equals atom data sent
self.assertEquals(event.title.text, new_event.title.text)
self.assertEquals(event.content.text, new_event.content.text)
# Ensure that gd:where data returned from calendar equals value sent
self.assertEquals(event.where[0].value_string,
new_event.where[0].value_string)
# Commented out as dateutil is not in this repository
# Ensure that dates returned from calendar server equals dates sent
#start_time_py = parse(event.when[0].start_time)
#start_time_py_new = parse(new_event.when[0].start_time)
#self.assertEquals(start_time_py, start_time_py_new)
#end_time_py = parse(event.when[0].end_time)
#end_time_py_new = parse(new_event.when[0].end_time)
#self.assertEquals(end_time_py, end_time_py_new)
# Update event
event_to_update = new_event
updated_title_text = event_to_update.title.text + ' - UPDATED'
event_to_update.title = atom.Title(text=updated_title_text)
updated_event = self.cal_client.UpdateEvent(
event_to_update.GetEditLink().href, event_to_update)
# Ensure that updated title was set in the updated event
self.assertEquals(event_to_update.title.text, updated_event.title.text)
# Delete the event
self.cal_client.DeleteEvent(updated_event.GetEditLink().href)
# Ensure deleted event is marked as canceled in the feed
after_delete_query = gdata.calendar.service.CalendarEventQuery()
after_delete_query.updated_min = '2007-01-01'
after_delete_query.text_query = str(random_event_number)
after_delete_query.max_results = '1'
after_delete_query_result = self.cal_client.CalendarQuery(
after_delete_query)
# Ensure feed returned at max after_delete_query.max_results events
self.assert_(
len(after_delete_query_result.entry) <= after_delete_query.max_results)
# Ensure status of returned event is canceled
self.assertEquals(after_delete_query_result.entry[0].event_status.value,
'CANCELED')
def testEventWithSyncEventAndUID(self):
"""Test posting a new entry (with syncEvent and a UID) and deleting it."""
# Get random data for creating event
r = random.Random()
r.seed()
random_event_number = str(r.randint(100000,1000000))
random_event_title = 'My Random Test Event %s' % random_event_number
random_start_hour = (r.randint(1,1000000) % 23)
random_end_hour = random_start_hour + 1
non_random_start_minute = 0
non_random_end_minute = 0
random_month = (r.randint(1,1000000) % 12 + 1)
random_day_of_month = (r.randint(1,1000000) % 28 + 1)
non_random_year = 2008
start_time = '%04d-%02d-%02dT%02d:%02d:00.000-05:00' % (
non_random_year, random_month, random_day_of_month,
random_start_hour, non_random_start_minute,)
end_time = '%04d-%02d-%02dT%02d:%02d:00.000-05:00' % (
non_random_year, random_month, random_day_of_month,
random_end_hour, non_random_end_minute,)
# create a random event ID. I'm mimicing an example from outlook here,
# the format doesn't seem to be important per the RFC except for being
# globally unique.
uid_string = ''
for i in xrange(121):
uid_string += "%X" % r.randint(0, 0xf)
# Set event data
event = gdata.calendar.CalendarEventEntry()
event.author.append(atom.Author(name=atom.Name(text='GData Test user')))
event.title = atom.Title(text=random_event_title)
event.content = atom.Content(text='Picnic with some lunch')
event.where.append(gdata.calendar.Where(value_string='Down by the river'))
event.when.append(gdata.calendar.When(
start_time=start_time,end_time=end_time))
event.sync_event = gdata.calendar.SyncEvent('true')
event.uid = gdata.calendar.UID(value=uid_string)
# Insert event
self.cal_client.ProgrammaticLogin()
new_event = self.cal_client.InsertEvent(event,
'/calendar/feeds/default/private/full')
# Inserting it a second time should fail, as it'll have the same UID
try:
bad_event = self.cal_client.InsertEvent(event,
'/calendar/feeds/default/private/full')
self.fail('Was able to insert an event with a duplicate UID')
except gdata.service.RequestError, error:
# for the current problem with redirects, just re-raise so the
# failure doesn't seem to be because of the duplicate UIDs.
status = error[0]['status']
if status == 302:
raise
# otherwise, make sure it was the right error
self.assertEquals(error[0]['status'], 409)
self.assertEquals(error[0]['reason'], 'Conflict')
# Ensure that atom data returned from calendar server equals atom data
# sent
self.assertEquals(event.title.text, new_event.title.text)
self.assertEquals(event.content.text, new_event.content.text)
# Ensure that gd:where data returned from calendar equals value sent
self.assertEquals(event.where[0].value_string,
new_event.where[0].value_string)
# Delete the event
self.cal_client.DeleteEvent(new_event.GetEditLink().href)
def testCreateAndDeleteEventUsingBatch(self):
# Get random data for creating event
r = random.Random()
r.seed()
random_event_number = str(r.randint(100000,1000000))
random_event_title = 'My Random Comments Test Event %s' % (
random_event_number)
# Set event data
event = gdata.calendar.CalendarEventEntry()
event.author.append(atom.Author(name=atom.Name(text='GData Test user')))
event.title = atom.Title(text=random_event_title)
event.content = atom.Content(text='Picnic with some lunch')
# Form a batch request
batch_request = gdata.calendar.CalendarEventFeed()
batch_request.AddInsert(entry=event)
# Execute the batch request to insert the event.
self.cal_client.ProgrammaticLogin()
batch_result = self.cal_client.ExecuteBatch(batch_request,
gdata.calendar.service.DEFAULT_BATCH_URL)
self.assertEquals(len(batch_result.entry), 1)
self.assertEquals(batch_result.entry[0].title.text, random_event_title)
self.assertEquals(batch_result.entry[0].batch_operation.type,
gdata.BATCH_INSERT)
self.assertEquals(batch_result.GetBatchLink().href,
gdata.calendar.service.DEFAULT_BATCH_URL)
# Create a batch request to delete the newly created entry.
batch_delete_request = gdata.calendar.CalendarEventFeed()
batch_delete_request.AddDelete(entry=batch_result.entry[0])
batch_delete_result = self.cal_client.ExecuteBatch(batch_delete_request,
batch_result.GetBatchLink().href)
self.assertEquals(len(batch_delete_result.entry), 1)
self.assertEquals(batch_delete_result.entry[0].batch_operation.type,
gdata.BATCH_DELETE)
def testCorrectReturnTypesForGetMethods(self):
self.cal_client.ProgrammaticLogin()
result = self.cal_client.GetCalendarEventFeed()
self.assertEquals(isinstance(result, gdata.calendar.CalendarEventFeed),
True)
def testValidHostName(self):
mock_http = atom.mock_http.MockHttpClient()
response = atom.mock_http.MockResponse(body='<entry/>', status=200,
reason='OK')
mock_http.add_response(response, 'GET',
'https://www.google.com/calendar/feeds/default/allcalendars/full')
self.cal_client.ssl = True
self.cal_client.http_client = mock_http
self.cal_client.SetAuthSubToken('foo')
self.assertEquals(str(self.cal_client.token_store.find_token(
'https://www.google.com/calendar/feeds/default/allcalendars/full')),
'AuthSub token=foo')
resp = self.cal_client.Get('/calendar/feeds/default/allcalendars/full')
self.assert_(resp is not None)
class CalendarEventQueryUnitTest(unittest.TestCase):
def setUp(self):
self.query = gdata.calendar.service.CalendarEventQuery()
def testOrderByValidatesValues(self):
self.query.orderby = 'lastmodified'
self.assertEquals(self.query.orderby, 'lastmodified')
try:
self.query.orderby = 'illegal input'
self.fail()
except gdata.calendar.service.Error:
self.assertEquals(self.query.orderby, 'lastmodified')
def testSortOrderValidatesValues(self):
self.query.sortorder = 'a'
self.assertEquals(self.query.sortorder, 'a')
try:
self.query.sortorder = 'illegal input'
self.fail()
except gdata.calendar.service.Error:
self.assertEquals(self.query.sortorder, 'a')
def testTimezoneParameter(self):
self.query.ctz = 'America/Los_Angeles'
self.assertEquals(self.query['ctz'], 'America/Los_Angeles')
self.assert_(self.query.ToUri().find('America%2FLos_Angeles') > -1)
if __name__ == '__main__':
print ('Google Calendar Test\nNOTE: Please run these tests only with a '
'test account. The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.data
from gdata import test_data
import gdata.test_config as conf
import atom.core
import atom.data
SIMPLE_V2_FEED_TEST_DATA = """<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:gd='http://schemas.google.com/g/2005'
gd:etag='W/"CUMBRHo_fip7ImA9WxRbGU0."'>
<title>Elizabeth Bennet's Contacts</title>
<link rel='next' type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/.../more' />
<entry gd:etag='"Qn04eTVSLyp7ImA9WxRbGEUORAQ."'>
<id>http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base/c9e</id>
<title>Fitzwilliam</title>
<link rel='http://schemas.google.com/contacts/2008/rel#photo'
type='image/*'
href='http://www.google.com/m8/feeds/photos/media/liz%40gmail.com/c9e'
gd:etag='"KTlcZWs1bCp7ImBBPV43VUV4LXEZCXERZAc."' />
<link rel='self' type='application/atom+xml'
href='Changed to ensure we are really getting the edit URL.'/>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/full/c9e'/>
</entry>
<entry gd:etag='"123456"'>
<link rel='edit' href='http://example.com/1' />
</entry>
</feed>"""
XML_ENTRY_1 = """<?xml version='1.0'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:g='http://base.google.com/ns/1.0'>
<category scheme="http://base.google.com/categories/itemtypes"
term="products"/>
<id> http://www.google.com/test/id/url </id>
<title type='text'>Testing 2000 series laptop</title>
<content type='xhtml'>
<div xmlns='http://www.w3.org/1999/xhtml'>A Testing Laptop</div>
</content>
<link rel='alternate' type='text/html'
href='http://www.provider-host.com/123456789'/>
<link rel='license'
href='http://creativecommons.org/licenses/by-nc/2.5/rdf'/>
<g:label>Computer</g:label>
<g:label>Laptop</g:label>
<g:label>testing laptop</g:label>
<g:item_type>products</g:item_type>
</entry>"""
def parse(xml_string, target_class):
"""Convenience wrapper for converting an XML string to an XmlElement."""
return atom.core.xml_element_from_string(xml_string, target_class)
class StartIndexTest(unittest.TestCase):
def setUp(self):
self.start_index = gdata.data.StartIndex()
def testToAndFromString(self):
self.start_index.text = '1'
self.assert_(self.start_index.text == '1')
new_start_index = parse(self.start_index.ToString(),
gdata.data.StartIndex)
self.assert_(self.start_index.text == new_start_index.text)
class ItemsPerPageTest(unittest.TestCase):
def setUp(self):
self.items_per_page = gdata.data.ItemsPerPage()
def testToAndFromString(self):
self.items_per_page.text = '10'
self.assert_(self.items_per_page.text == '10')
new_items_per_page = parse(self.items_per_page.ToString(),
gdata.data.ItemsPerPage)
self.assert_(self.items_per_page.text == new_items_per_page.text)
class GDataEntryTest(unittest.TestCase):
def testIdShouldBeCleaned(self):
entry = parse(XML_ENTRY_1, gdata.data.GDEntry)
tree = parse(XML_ENTRY_1, atom.core.XmlElement)
self.assert_(tree.get_elements('id',
'http://www.w3.org/2005/Atom')[0].text != entry.get_id())
self.assertEqual(entry.get_id(), 'http://www.google.com/test/id/url')
def testGeneratorShouldBeCleaned(self):
feed = parse(test_data.GBASE_FEED, gdata.data.GDFeed)
tree = parse(test_data.GBASE_FEED, atom.core.XmlElement)
self.assert_(tree.get_elements('generator',
'http://www.w3.org/2005/Atom')[0].text != feed.get_generator())
self.assertEqual(feed.get_generator(), 'GoogleBase')
def testAllowsEmptyId(self):
entry = gdata.data.GDEntry()
try:
entry.id = atom.data.Id()
except AttributeError:
self.fail('Empty id should not raise an attribute error.')
class LinkFinderTest(unittest.TestCase):
def setUp(self):
self.entry = parse(XML_ENTRY_1, gdata.data.GDEntry)
def testLinkFinderGetsLicenseLink(self):
self.assertEquals(isinstance(self.entry.FindLicenseLink(), str),
True)
self.assertEquals(self.entry.FindLicenseLink(),
'http://creativecommons.org/licenses/by-nc/2.5/rdf')
def testLinkFinderGetsAlternateLink(self):
self.assert_(isinstance(self.entry.FindAlternateLink(), str))
self.assertEquals(self.entry.FindAlternateLink(),
'http://www.provider-host.com/123456789')
def testFindAclLink(self):
entry = gdata.data.GDEntry()
self.assert_(entry.get_acl_link() is None)
self.assert_(entry.find_acl_link() is None)
entry.link.append(atom.data.Link(
rel=gdata.data.ACL_REL, href='http://example.com/acl'))
self.assertEqual(entry.get_acl_link().href, 'http://example.com/acl')
self.assertEqual(entry.find_acl_link(), 'http://example.com/acl')
del entry.link[0]
self.assert_(entry.get_acl_link() is None)
self.assert_(entry.find_acl_link() is None)
# We should also find an ACL link which is a feed_link.
entry.feed_link = [gdata.data.FeedLink(
rel=gdata.data.ACL_REL, href='http://example.com/acl2')]
self.assertEqual(entry.get_acl_link().href, 'http://example.com/acl2')
self.assertEqual(entry.find_acl_link(), 'http://example.com/acl2')
class GDataFeedTest(unittest.TestCase):
def testCorrectConversionToElementTree(self):
test_feed = parse(test_data.GBASE_FEED, gdata.data.GDFeed)
self.assert_(test_feed.total_results is not None)
self.assert_(test_feed.get_elements('totalResults',
'http://a9.com/-/spec/opensearchrss/1.0/') is not None)
self.assert_(len(test_feed.get_elements('totalResults',
'http://a9.com/-/spec/opensearchrss/1.0/')) > 0)
def testAllowsEmptyId(self):
feed = gdata.data.GDFeed()
try:
feed.id = atom.data.Id()
except AttributeError:
self.fail('Empty id should not raise an attribute error.')
class BatchEntryTest(unittest.TestCase):
def testCorrectConversionFromAndToString(self):
batch_entry = parse(test_data.BATCH_ENTRY, gdata.data.BatchEntry)
self.assertEquals(batch_entry.batch_id.text, 'itemB')
self.assertEquals(batch_entry.id.text,
'http://www.google.com/base/feeds/items/'
'2173859253842813008')
self.assertEquals(batch_entry.batch_operation.type, 'insert')
self.assertEquals(batch_entry.batch_status.code, '201')
self.assertEquals(batch_entry.batch_status.reason, 'Created')
new_entry = parse(str(batch_entry), gdata.data.BatchEntry)
self.assertEquals(batch_entry.batch_id.text, new_entry.batch_id.text)
self.assertEquals(batch_entry.id.text, new_entry.id.text)
self.assertEquals(batch_entry.batch_operation.type,
new_entry.batch_operation.type)
self.assertEquals(batch_entry.batch_status.code,
new_entry.batch_status.code)
self.assertEquals(batch_entry.batch_status.reason,
new_entry.batch_status.reason)
class BatchFeedTest(unittest.TestCase):
def setUp(self):
self.batch_feed = gdata.data.BatchFeed()
self.example_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/1'), text='This is a test')
def testConvertRequestFeed(self):
batch_feed = parse(test_data.BATCH_FEED_REQUEST, gdata.data.BatchFeed)
self.assertEquals(len(batch_feed.entry), 4)
for entry in batch_feed.entry:
self.assert_(isinstance(entry, gdata.data.BatchEntry))
self.assertEquals(batch_feed.title.text, 'My Batch Feed')
new_feed = parse(batch_feed.to_string(), gdata.data.BatchFeed)
self.assertEquals(len(new_feed.entry), 4)
for entry in new_feed.entry:
self.assert_(isinstance(entry, gdata.data.BatchEntry))
self.assertEquals(new_feed.title.text, 'My Batch Feed')
def testConvertResultFeed(self):
batch_feed = parse(test_data.BATCH_FEED_RESULT, gdata.data.BatchFeed)
self.assertEquals(len(batch_feed.entry), 4)
for entry in batch_feed.entry:
self.assert_(isinstance(entry, gdata.data.BatchEntry))
if entry.id.text == ('http://www.google.com/base/feeds/items/'
'2173859253842813008'):
self.assertEquals(entry.batch_operation.type, 'insert')
self.assertEquals(entry.batch_id.text, 'itemB')
self.assertEquals(entry.batch_status.code, '201')
self.assertEquals(entry.batch_status.reason, 'Created')
self.assertEquals(batch_feed.title.text, 'My Batch')
new_feed = parse(str(batch_feed), gdata.data.BatchFeed)
self.assertEquals(len(new_feed.entry), 4)
for entry in new_feed.entry:
self.assert_(isinstance(entry, gdata.data.BatchEntry))
if entry.id.text == ('http://www.google.com/base/feeds/items/'
'2173859253842813008'):
self.assertEquals(entry.batch_operation.type, 'insert')
self.assertEquals(entry.batch_id.text, 'itemB')
self.assertEquals(entry.batch_status.code, '201')
self.assertEquals(entry.batch_status.reason, 'Created')
self.assertEquals(new_feed.title.text, 'My Batch')
def testAddBatchEntry(self):
try:
self.batch_feed.AddBatchEntry(batch_id_string='a')
self.fail('AddBatchEntry with neither entry or URL should raise Error')
except gdata.data.MissingRequiredParameters:
pass
new_entry = self.batch_feed.AddBatchEntry(
id_url_string='http://example.com/1')
self.assertEquals(len(self.batch_feed.entry), 1)
self.assertEquals(self.batch_feed.entry[0].get_id(),
'http://example.com/1')
self.assertEquals(self.batch_feed.entry[0].batch_id.text, '0')
self.assertEquals(new_entry.id.text, 'http://example.com/1')
self.assertEquals(new_entry.batch_id.text, '0')
to_add = gdata.data.BatchEntry(id=atom.data.Id(text='originalId'))
new_entry = self.batch_feed.AddBatchEntry(entry=to_add,
batch_id_string='foo')
self.assertEquals(new_entry.batch_id.text, 'foo')
self.assertEquals(new_entry.id.text, 'originalId')
to_add = gdata.data.BatchEntry(id=atom.data.Id(text='originalId'),
batch_id=gdata.data.BatchId(text='bar'))
new_entry = self.batch_feed.AddBatchEntry(entry=to_add,
id_url_string='newId',
batch_id_string='foo')
self.assertEquals(new_entry.batch_id.text, 'foo')
self.assertEquals(new_entry.id.text, 'originalId')
to_add = gdata.data.BatchEntry(id=atom.data.Id(text='originalId'),
batch_id=gdata.data.BatchId(text='bar'))
new_entry = self.batch_feed.AddBatchEntry(entry=to_add,
id_url_string='newId')
self.assertEquals(new_entry.batch_id.text, 'bar')
self.assertEquals(new_entry.id.text, 'originalId')
to_add = gdata.data.BatchEntry(id=atom.data.Id(text='originalId'),
batch_id=gdata.data.BatchId(text='bar'),
batch_operation=gdata.data.BatchOperation(
type=gdata.data.BATCH_INSERT))
self.assertEquals(to_add.batch_operation.type, gdata.data.BATCH_INSERT)
new_entry = self.batch_feed.AddBatchEntry(entry=to_add,
id_url_string='newId', batch_id_string='foo',
operation_string=gdata.data.BATCH_UPDATE)
self.assertEquals(new_entry.batch_operation.type, gdata.data.BATCH_UPDATE)
def testAddInsert(self):
first_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/1'), text='This is a test1')
self.batch_feed.AddInsert(first_entry)
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_INSERT)
self.assertEquals(self.batch_feed.entry[0].batch_id.text, '0')
second_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/2'), text='This is a test2')
self.batch_feed.AddInsert(second_entry, batch_id_string='foo')
self.assertEquals(self.batch_feed.entry[1].batch_operation.type,
gdata.data.BATCH_INSERT)
self.assertEquals(self.batch_feed.entry[1].batch_id.text, 'foo')
third_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/3'), text='This is a test3')
third_entry.batch_operation = gdata.data.BatchOperation(
type=gdata.data.BATCH_DELETE)
# Add an entry with a delete operation already assigned.
self.batch_feed.AddInsert(third_entry)
# The batch entry should not have the original operation, it should
# have been changed to an insert.
self.assertEquals(self.batch_feed.entry[2].batch_operation.type,
gdata.data.BATCH_INSERT)
self.assertEquals(self.batch_feed.entry[2].batch_id.text, '2')
def testAddDelete(self):
# Try deleting an entry
delete_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/1'), text='This is a test')
self.batch_feed.AddDelete(entry=delete_entry)
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_DELETE)
self.assertEquals(self.batch_feed.entry[0].get_id(),
'http://example.com/1')
self.assertEquals(self.batch_feed.entry[0].text, 'This is a test')
# Try deleting a URL
self.batch_feed.AddDelete(url_string='http://example.com/2')
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_DELETE)
self.assertEquals(self.batch_feed.entry[1].id.text,
'http://example.com/2')
self.assert_(self.batch_feed.entry[1].text is None)
def testAddQuery(self):
# Try querying with an existing batch entry
delete_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/1'))
self.batch_feed.AddQuery(entry=delete_entry)
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_QUERY)
self.assertEquals(self.batch_feed.entry[0].get_id(),
'http://example.com/1')
# Try querying a URL
self.batch_feed.AddQuery(url_string='http://example.com/2')
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_QUERY)
self.assertEquals(self.batch_feed.entry[1].id.text,
'http://example.com/2')
def testAddUpdate(self):
# Try updating an entry
delete_entry = gdata.data.BatchEntry(
id=atom.data.Id(text='http://example.com/1'), text='This is a test')
self.batch_feed.AddUpdate(entry=delete_entry)
self.assertEquals(self.batch_feed.entry[0].batch_operation.type,
gdata.data.BATCH_UPDATE)
self.assertEquals(self.batch_feed.entry[0].get_id(),
'http://example.com/1')
self.assertEquals(self.batch_feed.entry[0].text, 'This is a test')
class ExtendedPropertyTest(unittest.TestCase):
def testXmlBlobRoundTrip(self):
ep = gdata.data.ExtendedProperty(name='blobby')
ep.SetXmlBlob('<some_xml attr="test"/>')
extension = ep.GetXmlBlob()
self.assertEquals(extension.tag, 'some_xml')
self.assert_(extension.namespace is None)
self.assertEquals(extension.attributes['attr'], 'test')
ep2 = parse(ep.ToString(), gdata.data.ExtendedProperty)
extension = ep2.GetXmlBlob()
self.assertEquals(extension.tag, 'some_xml')
self.assert_(extension.namespace is None)
self.assertEquals(extension.attributes['attr'], 'test')
def testGettersShouldReturnNoneWithNoBlob(self):
ep = gdata.data.ExtendedProperty(name='no blob')
self.assert_(ep.GetXmlBlob() is None)
def testGettersReturnCorrectTypes(self):
ep = gdata.data.ExtendedProperty(name='has blob')
ep.SetXmlBlob('<some_xml attr="test"/>')
self.assert_(isinstance(ep.GetXmlBlob(),
atom.core.XmlElement))
self.assert_(isinstance(ep.GetXmlBlob().to_string(), str))
class FeedLinkTest(unittest.TestCase):
def testCorrectFromStringType(self):
link = parse(
'<feedLink xmlns="http://schemas.google.com/g/2005" countHint="5"/>',
gdata.data.FeedLink)
self.assert_(isinstance(link, gdata.data.FeedLink))
self.assertEqual(link.count_hint, '5')
class SimpleV2FeedTest(unittest.TestCase):
def test_parsing_etags_and_edit_url(self):
feed = atom.core.parse(SIMPLE_V2_FEED_TEST_DATA, gdata.data.GDFeed)
# General parsing assertions.
self.assertEqual(feed.get_elements('title')[0].text,
'Elizabeth Bennet\'s Contacts')
self.assertEqual(len(feed.entry), 2)
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.data.GDEntry))
self.assertEqual(feed.entry[0].GetElements('title')[0].text,
'Fitzwilliam')
self.assertEqual(feed.entry[0].get_elements('id')[0].text,
'http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base/c9e')
# ETags checks.
self.assertEqual(feed.etag, 'W/"CUMBRHo_fip7ImA9WxRbGU0."')
self.assertEqual(feed.entry[0].etag, '"Qn04eTVSLyp7ImA9WxRbGEUORAQ."')
self.assertEqual(feed.entry[1].etag, '"123456"')
# Look for Edit URLs.
self.assertEqual(feed.entry[0].find_edit_link(),
'http://www.google.com/m8/feeds/contacts/liz%40gmail.com/full/c9e')
self.assertEqual(feed.entry[1].FindEditLink(), 'http://example.com/1')
# Look for Next URLs.
self.assertEqual(feed.find_next_link(),
'http://www.google.com/m8/feeds/contacts/.../more')
def test_constructor_defauls(self):
feed = gdata.data.GDFeed()
self.assert_(feed.etag is None)
self.assertEqual(feed.link, [])
self.assertEqual(feed.entry, [])
entry = gdata.data.GDEntry()
self.assert_(entry.etag is None)
self.assertEqual(entry.link, [])
link = atom.data.Link()
self.assert_(link.href is None)
self.assert_(link.rel is None)
link1 = atom.data.Link(href='http://example.com', rel='test')
self.assertEqual(link1.href, 'http://example.com')
self.assertEqual(link1.rel, 'test')
link2 = atom.data.Link(href='http://example.org/', rel='alternate')
entry = gdata.data.GDEntry(etag='foo', link=[link1, link2])
feed = gdata.data.GDFeed(etag='12345', entry=[entry])
self.assertEqual(feed.etag, '12345')
self.assertEqual(len(feed.entry), 1)
self.assertEqual(feed.entry[0].etag, 'foo')
self.assertEqual(len(feed.entry[0].link), 2)
class DataClassSanityTest(unittest.TestCase):
def test_basic_element_structure(self):
conf.check_data_classes(self, [
gdata.data.TotalResults, gdata.data.StartIndex,
gdata.data.ItemsPerPage, gdata.data.ExtendedProperty,
gdata.data.GDEntry, gdata.data.GDFeed, gdata.data.BatchId,
gdata.data.BatchOperation, gdata.data.BatchStatus,
gdata.data.BatchEntry, gdata.data.BatchInterrupted,
gdata.data.BatchFeed, gdata.data.EntryLink, gdata.data.FeedLink,
gdata.data.AdditionalName, gdata.data.Comments, gdata.data.Country,
gdata.data.Email, gdata.data.FamilyName, gdata.data.Im,
gdata.data.GivenName, gdata.data.NamePrefix, gdata.data.NameSuffix,
gdata.data.FullName, gdata.data.Name, gdata.data.OrgDepartment,
gdata.data.OrgName, gdata.data.OrgSymbol, gdata.data.OrgTitle,
gdata.data.Organization, gdata.data.When, gdata.data.Who,
gdata.data.OriginalEvent, gdata.data.PhoneNumber,
gdata.data.PostalAddress, gdata.data.Rating, gdata.data.Recurrence,
gdata.data.RecurrenceException, gdata.data.Reminder,
gdata.data.Agent, gdata.data.HouseName, gdata.data.Street,
gdata.data.PoBox, gdata.data.Neighborhood, gdata.data.City,
gdata.data.Subregion, gdata.data.Region, gdata.data.Postcode,
gdata.data.Country, gdata.data.FormattedAddress,
gdata.data.StructuredPostalAddress, gdata.data.Where,
gdata.data.AttendeeType, gdata.data.AttendeeStatus])
def test_member_values(self):
self.assertEqual(
gdata.data.TotalResults._qname,
('{http://a9.com/-/spec/opensearchrss/1.0/}totalResults',
'{http://a9.com/-/spec/opensearch/1.1/}totalResults'))
self.assertEqual(
gdata.data.RecurrenceException._qname,
'{http://schemas.google.com/g/2005}recurrenceException')
self.assertEqual(gdata.data.RecurrenceException.specialized,
'specialized')
def suite():
return conf.build_suite([StartIndexTest, StartIndexTest, GDataEntryTest,
LinkFinderTest, GDataFeedTest, BatchEntryTest, BatchFeedTest,
ExtendedPropertyTest, FeedLinkTest, SimpleV2FeedTest,
DataClassSanityTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.core
import gdata.test_config as conf
PLAYLIST_EXAMPLE = (
'{"apiVersion": "2.0","data": {"totalResults": 347,"startIndex": 1,"it'
'emsPerPage": 2,"items": [{"id": "4DAEFAF23BB3CDD0","created": "2008-1'
'2-09T20:23:06.000Z","updated": "2010-01-04T02:56:19.000Z","author": "'
'GoogleDevelopers","title": "Google Web Toolkit Developers","descripti'
'on": "Developers talk about using Google Web Toolkit ...","tags": ["g'
'oogle","web","toolkit","developers","gwt"],"size": 12},{"id": "586D32'
'2B5E2764CF","created": "2007-11-13T19:41:21.000Z","updated": "2010-01'
'-04T17:41:16.000Z","author": "GoogleDevelopers","title": "Android","d'
'escription": "Demos and tutorials about the new Android platform.","t'
'ags": ["android","google","developers","mobile"],"size": 32}]}}')
VIDEO_EXAMPLE = (
'{"apiVersion": "2.0","data": {"updated": "2010-01-07T19:58:42.949Z","'
'totalItems": 800,"startIndex": 1,"itemsPerPage": 1, "items": [{"id": '
'"hYB0mn5zh2c","uploaded": "2007-06-05T22:07:03.000Z","updated": "2010'
'-01-07T13:26:50.000Z","uploader": "GoogleDeveloperDay","category": "N'
'ews","title": "Google Developers Day US - Maps API Introduction","des'
'cription": "Google Maps API Introduction ...","tags": ["GDD07","GDD07'
'US","Maps"],"thumbnail": {"default": "http://i.ytimg.com/vi/hYB0mn5zh'
'2c/default.jpg","hqDefault": "http://i.ytimg.com/vi/hYB0mn5zh2c/hqdef'
'ault.jpg"},"player": {"default": "http://www.youtube.com/watch?v'
'\u003dhYB0mn5zh2c"},"content": {"1": "rtsp://v5.cache3.c.youtube.com/'
'CiILENy.../0/0/0/video.3gp","5": "http://www.youtube.com/v/hYB0mn5zh2'
'c?f...","6": "rtsp://v1.cache1.c.youtube.com/CiILENy.../0/0/0/video.3'
'gp"},"duration": 2840,"rating": 4.63,"ratingCount": 68,"viewCount": 2'
'20101,"favoriteCount": 201,"commentCount": 22}]}}')
class JsoncConversionTest(unittest.TestCase):
# See http://code.google.com/apis/youtube/2.0/developers_guide_jsonc.html
def test_from_and_to_old_json(self):
json = ('{"media$group":{"media$credit":[{"$t":"GoogleDevelopers", '
'"role":"uploader", "scheme":"urn:youtube"}]}}')
jsonc_obj = gdata.core.parse_json(json)
self.assert_(isinstance(jsonc_obj, gdata.core.Jsonc))
raw = gdata.core._convert_to_object(jsonc_obj)
self.assertEqual(raw['media$group']['media$credit'][0]['$t'],
'GoogleDevelopers')
def test_to_and_from_jsonc(self):
x = {'a': 1}
jsonc_obj = gdata.core._convert_to_jsonc(x)
self.assertEqual(jsonc_obj.a, 1)
# Convert the json_obj back to a dict and compare.
self.assertEqual(x, gdata.core._convert_to_object(jsonc_obj))
def test_from_and_to_new_json(self):
x = gdata.core.parse_json(PLAYLIST_EXAMPLE)
self.assertEqual(x._dict['apiVersion'], '2.0')
self.assertEqual(x._dict['data']._dict['items'][0]._dict['id'],
'4DAEFAF23BB3CDD0')
self.assertEqual(x._dict['data']._dict['items'][1]._dict['id'],
'586D322B5E2764CF')
x = gdata.core.parse_json(VIDEO_EXAMPLE)
self.assertEqual(x._dict['apiVersion'], '2.0')
self.assertEqual(x.data._dict['totalItems'], 800)
self.assertEqual(x.data.items[0]._dict['viewCount'], 220101)
def test_pretty_print(self):
x = gdata.core.Jsonc(x=1, y=2, z=3)
pretty = gdata.core.prettify_jsonc(x)
self.assert_(isinstance(pretty, (str, unicode)))
pretty = gdata.core.prettify_jsonc(x, 4)
self.assert_(isinstance(pretty, (str, unicode)))
class MemberNameConversionTest(unittest.TestCase):
def test_member_to_jsonc(self):
self.assertEqual(gdata.core._to_jsonc_name(''), '')
self.assertEqual(gdata.core._to_jsonc_name('foo'), 'foo')
self.assertEqual(gdata.core._to_jsonc_name('Foo'), 'Foo')
self.assertEqual(gdata.core._to_jsonc_name('test_x'), 'testX')
self.assertEqual(gdata.core._to_jsonc_name('test_x_y_zabc'), 'testXYZabc')
def build_test_object():
return gdata.core.Jsonc(
api_version='2.0',
data=gdata.core.Jsonc(
total_items=800,
items=[
gdata.core.Jsonc(
view_count=220101,
comment_count=22,
favorite_count=201,
content={
'1': ('rtsp://v5.cache3.c.youtube.com'
'/CiILENy.../0/0/0/video.3gp')})]))
class JsoncObjectTest(unittest.TestCase):
def check_video_json(self, x):
"""Validates a JsoncObject similar to VIDEO_EXAMPLE."""
self.assert_(isinstance(x._dict, dict))
self.assert_(isinstance(x.data, gdata.core.Jsonc))
self.assert_(isinstance(x._dict['data'], gdata.core.Jsonc))
self.assert_(isinstance(x.data._dict, dict))
self.assert_(isinstance(x._dict['data']._dict, dict))
self.assert_(isinstance(x._dict['apiVersion'], (str, unicode)))
self.assert_(isinstance(x.api_version, (str, unicode)))
self.assert_(isinstance(x.data._dict['items'], list))
self.assert_(isinstance(x.data.items[0]._dict['commentCount'],
(int, long)))
self.assert_(isinstance(x.data.items[0].favorite_count, (int, long)))
self.assertEqual(x.data.total_items, 800)
self.assertEqual(x._dict['data']._dict['totalItems'], 800)
self.assertEqual(x.data.items[0].view_count, 220101)
self.assertEqual(x._dict['data']._dict['items'][0]._dict['viewCount'],
220101)
self.assertEqual(x.data.items[0].comment_count, 22)
self.assertEqual(x.data.items[0]._dict['commentCount'], 22)
self.assertEqual(x.data.items[0].favorite_count, 201)
self.assertEqual(x.data.items[0]._dict['favoriteCount'], 201)
self.assertEqual(
x.data.items[0].content._dict['1'],
'rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp')
self.assertEqual(x.api_version, '2.0')
self.assertEqual(x.api_version, x._dict['apiVersion'])
def test_convert_to_jsonc(self):
x = gdata.core._convert_to_jsonc(1)
self.assert_(isinstance(x, (int, long)))
self.assertEqual(x, 1)
x = gdata.core._convert_to_jsonc([1, 'a'])
self.assert_(isinstance(x, list))
self.assertEqual(len(x), 2)
self.assert_(isinstance(x[0], (int, long)))
self.assertEqual(x[0], 1)
self.assert_(isinstance(x[1], (str, unicode)))
self.assertEqual(x[1], 'a')
x = gdata.core._convert_to_jsonc([{'b': 1}, 'a'])
self.assert_(isinstance(x, list))
self.assertEqual(len(x), 2)
self.assert_(isinstance(x[0], gdata.core.Jsonc))
self.assertEqual(x[0].b, 1)
def test_non_json_members(self):
x = gdata.core.Jsonc(alpha=1, _beta=2, deep={'_bbb': 3, 'aaa': 2})
x.test = 'a'
x._bar = 'bacon'
# Should be able to access the _beta member.
self.assertEqual(x._beta, 2)
self.assertEqual(getattr(x, '_beta'), 2)
try:
self.assertEqual(getattr(x.deep, '_bbb'), 3)
except AttributeError:
pass
# There should not be a letter 'B' anywhere in the generated JSON.
self.assertEqual(gdata.core.jsonc_to_string(x).find('B'), -1)
# We should find a 'b' becuse we don't consider names of dict keys in
# the constructor as aliases to camelCase names.
self.assert_(not gdata.core.jsonc_to_string(x).find('b') == -1)
def test_constructor(self):
x = gdata.core.Jsonc(a=[{'x': 'y'}, 2])
self.assert_(isinstance(x, gdata.core.Jsonc))
self.assert_(isinstance(x.a, list))
self.assert_(isinstance(x.a[0], gdata.core.Jsonc))
self.assertEqual(x.a[0].x, 'y')
self.assertEqual(x.a[1], 2)
def test_read_json(self):
x = gdata.core.parse_json(PLAYLIST_EXAMPLE)
self.assert_(isinstance(x._dict, dict))
self.assertEqual(x._dict['apiVersion'], '2.0')
self.assertEqual(x.api_version, '2.0')
x = gdata.core.parse_json(VIDEO_EXAMPLE)
self.assert_(isinstance(x._dict, dict))
self.assertEqual(x._dict['apiVersion'], '2.0')
self.assertEqual(x.api_version, '2.0')
x = gdata.core.parse_json(VIDEO_EXAMPLE)
self.check_video_json(x)
def test_write_json(self):
x = gdata.core.Jsonc()
x._dict['apiVersion'] = '2.0'
x.data = {'totalItems': 800}
x.data.items = []
x.data.items.append(gdata.core.Jsonc(view_count=220101))
x.data.items[0]._dict['favoriteCount'] = 201
x.data.items[0].comment_count = 22
x.data.items[0].content = {
'1': 'rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp'}
self.check_video_json(x)
def test_build_using_contructor(self):
x = build_test_object()
self.check_video_json(x)
def test_to_dict(self):
x = build_test_object()
self.assertEqual(
gdata.core._convert_to_object(x),
{'data': {'totalItems': 800, 'items': [
{'content': {
'1': 'rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp'},
'viewCount': 220101, 'commentCount': 22, 'favoriteCount': 201}]},
'apiVersion': '2.0'})
def test_try_json_syntax(self):
x = build_test_object()
self.assertEqual(x.data.items[0].commentCount, 22)
x.data.items[0].commentCount = 33
self.assertEqual(x.data.items[0].commentCount, 33)
self.assertEqual(x.data.items[0].comment_count, 33)
self.assertEqual(x.data.items[0]._dict['commentCount'], 33)
def test_to_string(self):
self.check_video_json(
gdata.core.parse_json(
gdata.core.jsonc_to_string(
gdata.core._convert_to_object(
build_test_object()))))
def test_del_attr(self):
x = build_test_object()
self.assertEqual(x.data.items[0].commentCount, 22)
del x.data.items[0].comment_count
try:
x.data.items[0].commentCount
self.fail('Should not be able to access commentCount after deletion')
except AttributeError:
pass
self.assertEqual(x.data.items[0].favorite_count, 201)
del x.data.items[0].favorite_count
try:
x.data.items[0].favorite_count
self.fail('Should not be able to access favorite_count after deletion')
except AttributeError:
pass
try:
x.data.items[0]._dict['favoriteCount']
self.fail('Should not see [\'favoriteCount\'] after deletion')
except KeyError:
pass
self.assertEqual(x.data.items[0].view_count, 220101)
del x.data.items[0]._dict['viewCount']
try:
x.data.items[0].view_count
self.fail('Should not be able to access view_count after deletion')
except AttributeError:
pass
try:
del x.data.missing
self.fail('Should not delete a missing attribute')
except AttributeError:
pass
def test_del_protected_attribute(self):
x = gdata.core.Jsonc(public='x', _private='y')
self.assertEqual(x.public, 'x')
self.assertEqual(x._private, 'y')
self.assertEqual(x['public'], 'x')
try:
x['_private']
self.fail('Should not be able to getitem with _name')
except KeyError:
pass
del x._private
try:
x._private
self.fail('Should not be able to access deleted member')
except AttributeError:
pass
def test_get_set_del_item(self):
x = build_test_object()
# Check for expected members using different access patterns.
self.assert_(isinstance(x._dict, dict))
self.assert_(isinstance(x['data'], gdata.core.Jsonc))
self.assert_(isinstance(x._dict['data'], gdata.core.Jsonc))
self.assert_(isinstance(x['data']._dict, dict))
self.assert_(isinstance(x._dict['data']._dict, dict))
self.assert_(isinstance(x['apiVersion'], (str, unicode)))
try:
x['api_version']
self.fail('Should not find using Python style name')
except KeyError:
pass
self.assert_(isinstance(x.data['items'], list))
self.assert_(isinstance(x.data['items'][0]._dict['commentCount'],
(int, long)))
self.assert_(isinstance(x['data'].items[0]['favoriteCount'], (int, long)))
self.assertEqual(x['data'].total_items, 800)
self.assertEqual(x['data']['totalItems'], 800)
self.assertEqual(x.data['items'][0]['viewCount'], 220101)
self.assertEqual(x._dict['data'].items[0]._dict['viewCount'],
220101)
self.assertEqual(x['data'].items[0].comment_count, 22)
self.assertEqual(x.data.items[0]['commentCount'], 22)
self.assertEqual(x.data.items[0]['favoriteCount'], 201)
self.assertEqual(x.data.items[0]._dict['favoriteCount'], 201)
self.assertEqual(
x.data.items[0].content['1'],
'rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp')
self.assertEqual(
x.data.items[0]['content']['1'],
'rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp')
self.assertEqual(x.api_version, '2.0')
self.assertEqual(x['apiVersion'], x._dict['apiVersion'])
# Set properties using setitem
x['apiVersion'] = '3.2'
self.assertEqual(x.api_version, '3.2')
x.data['totalItems'] = 500
self.assertEqual(x['data'].total_items, 500)
self.assertEqual(x['data'].items[0].favoriteCount, 201)
try:
del x['data']['favoriteCount']
self.fail('Should not be able to delete missing item')
except KeyError:
pass
del x.data['items'][0]['favoriteCount']
try:
x['data'].items[0].favoriteCount
self.fail('Should not find favoriteCount removed using del item')
except AttributeError:
pass
def suite():
return conf.build_suite([JsoncConversionTest, MemberNameConversionTest,
JsoncObjectTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import sys
import gdata.sample_util
class SettingsUtilTest(unittest.TestCase):
def setUp(self):
self.settings = gdata.sample_util.SettingsUtil()
def test_get_param(self):
self.assert_(self.settings.get_param('missing', ask=False) is None)
self.settings.prefs['x'] = 'something'
self.assertEqual(self.settings.get_param('x'), 'something')
def test_get_param_from_command_line_arg(self):
self.assert_('x' not in self.settings.prefs)
self.assert_(self.settings.get_param('x', ask=False) is None)
sys.argv.append('--x=something')
self.assertEqual(self.settings.get_param('x'), 'something')
self.assert_('x' not in self.settings.prefs)
self.assert_('y' not in self.settings.prefs)
self.assert_(self.settings.get_param('y', ask=False) is None)
sys.argv.append('--y')
sys.argv.append('other')
self.assertEqual(self.settings.get_param('y', reuse=True), 'other')
self.assertEqual(self.settings.prefs['y'], 'other')
def suite():
return conf.build_suite([SettingsUtilTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeffrey Scudder)'
import unittest
import gdata.codesearch
import gdata.test_data
class CodeSearchDataTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.codesearch.CodesearchFeedFromString(
gdata.test_data.CODE_SEARCH_FEED)
def testCorrectXmlConversion(self):
self.assert_(self.feed.id.text ==
'http://www.google.com/codesearch/feeds/search?q=malloc')
self.assert_(len(self.feed.entry) == 10)
for entry in self.feed.entry:
if entry.id.text == ('http://www.google.com/codesearch?hl=en&q=+ma'
'lloc+show:LDjwp-Iqc7U:84hEYaYsZk8:xDGReDhvNi0&sa=N&ct=rx&cd=1'
'&cs_p=http://www.gnu.org&cs_f=software/autoconf/manual/autoco'
'nf-2.60/autoconf.html-002&cs_p=http://www.gnu.org&cs_f=softwa'
're/autoconf/manual/autoconf-2.60/autoconf.html-002#first'):
self.assert_(len(entry.match) == 4)
for match in entry.match:
if match.line_number == '4':
self.assert_(match.type == 'text/html')
self.assert_(entry.file.name ==
'software/autoconf/manual/autoconf-2.60/autoconf.html-002')
self.assert_(entry.package.name == 'http://www.gnu.org')
self.assert_(entry.package.uri == 'http://www.gnu.org')
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import unittest
from gdata import test_data
import gdata.blogger
import atom
class BlogEntryTest(unittest.TestCase):
def testBlogEntryFromString(self):
entry = gdata.blogger.BlogEntryFromString(test_data.BLOG_ENTRY)
self.assertEquals(entry.GetBlogName(), 'blogName')
self.assertEquals(entry.GetBlogId(), 'blogID')
self.assertEquals(entry.title.text, 'Lizzy\'s Diary')
def testBlogPostFeedFromString(self):
feed = gdata.blogger.BlogPostFeedFromString(test_data.BLOG_POSTS_FEED)
self.assertEquals(len(feed.entry), 1)
self.assert_(isinstance(feed, gdata.blogger.BlogPostFeed))
self.assert_(isinstance(feed.entry[0], gdata.blogger.BlogPostEntry))
self.assertEquals(feed.entry[0].GetPostId(), 'postID')
self.assertEquals(feed.entry[0].GetBlogId(), 'blogID')
self.assertEquals(feed.entry[0].title.text, 'Quite disagreeable')
def testCommentFeedFromString(self):
feed = gdata.blogger.CommentFeedFromString(test_data.BLOG_COMMENTS_FEED)
self.assertEquals(len(feed.entry), 1)
self.assert_(isinstance(feed, gdata.blogger.CommentFeed))
self.assert_(isinstance(feed.entry[0], gdata.blogger.CommentEntry))
self.assertEquals(feed.entry[0].GetBlogId(), 'blogID')
self.assertEquals(feed.entry[0].GetCommentId(), 'commentID')
self.assertEquals(feed.entry[0].title.text, 'This is my first comment')
self.assertEquals(feed.entry[0].in_reply_to.source,
'http://blogName.blogspot.com/feeds/posts/default/postID')
self.assertEquals(feed.entry[0].in_reply_to.ref,
'tag:blogger.com,1999:blog-blogID.post-postID')
self.assertEquals(feed.entry[0].in_reply_to.href,
'http://blogName.blogspot.com/2007/04/first-post.html')
self.assertEquals(feed.entry[0].in_reply_to.type, 'text/html')
def testIdParsing(self):
entry = gdata.blogger.BlogEntry()
entry.id = atom.Id(
text='tag:blogger.com,1999:user-146606542.blog-4023408167658848')
self.assertEquals(entry.GetBlogId(), '4023408167658848')
entry.id = atom.Id(text='tag:blogger.com,1999:blog-4023408167658848')
self.assertEquals(entry.GetBlogId(), '4023408167658848')
class InReplyToTest(unittest.TestCase):
def testToAndFromString(self):
in_reply_to = gdata.blogger.InReplyTo(href='http://example.com/href',
ref='http://example.com/ref', source='http://example.com/my_post',
type='text/html')
xml_string = str(in_reply_to)
parsed = gdata.blogger.InReplyToFromString(xml_string)
self.assertEquals(parsed.source, in_reply_to.source)
self.assertEquals(parsed.href, in_reply_to.href)
self.assertEquals(parsed.ref, in_reply_to.ref)
self.assertEquals(parsed.type, in_reply_to.type)
class CommentEntryTest(unittest.TestCase):
def testToAndFromString(self):
comment = gdata.blogger.CommentEntry(content=atom.Content(text='Nifty!'),
in_reply_to=gdata.blogger.InReplyTo(
source='http://example.com/my_post'))
parsed = gdata.blogger.CommentEntryFromString(str(comment))
self.assertEquals(parsed.in_reply_to.source, comment.in_reply_to.source)
self.assertEquals(parsed.content.text, comment.content.text)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Content API for Shopping tests"""
__author__ = 'afshar (Ali Afshar)'
import unittest
from gdata.contentforshopping import client, data
class CFSClientTest(unittest.TestCase):
def test_uri_missing_account_id(self):
c = client.ContentForShoppingClient()
self.assertRaises(ValueError, c._create_uri,
account_id=None, projection=None, resource='a/b')
def test_uri_bad_projection(self):
c = client.ContentForShoppingClient()
self.assertRaises(ValueError, c._create_uri,
account_id='123', projection='banana', resource='a/b')
def test_good_default_account_id(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection=None, resource='a/b')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/a/b/generic')
def test_override_request_account_id(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id='321', projection=None, resource='a/b')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/321/a/b/generic')
def test_default_projection(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection=None, resource='a/b')
self.assertEqual(c.cfs_projection, 'generic')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/a/b/generic')
def test_default_projection_change(self):
c = client.ContentForShoppingClient(account_id='123', projection='schema')
uri = c._create_uri(account_id=None, projection=None, resource='a/b')
self.assertEqual(c.cfs_projection, 'schema')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/a/b/schema')
def test_request_projection(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection='schema', resource='a/b')
self.assertEqual(c.cfs_projection, 'generic')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/a/b/schema')
def test_request_resource(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection=None, resource='x/y/z')
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/x/y/z/generic')
def test_path_single(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection=None, resource='r',
path=['1'])
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/r/generic/1')
def test_path_multiple(self):
c = client.ContentForShoppingClient(account_id='123')
uri = c._create_uri(account_id=None, projection=None, resource='r',
path=['1', '2'])
self.assertEqual(uri,
'https://content.googleapis.com/content/v1/123/r/generic/1/2')
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2007, 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeff Scudder)'
import re
import unittest
import urllib
import gdata.auth
CONSUMER_KEY = 'www.yourwebapp.com'
CONSUMER_SECRET = 'qB1P2kCFDpRjF+/Iww4'
RSA_KEY = """-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQDVbOaFW+KXecfFJn1PIzYHnNXFxhaQ36QM0K5uSb0Y8NeQUlD2
6t8aKgnm6mcb4vaopHjjdIGWgAzM5Dt0oPIiDXo+jSQbvCIXRduuAt+0cFGb2d+L
hALk4AwB8IVIkDJWwgo5Z2OLsP2r/wQlUYKm/tnvQaevK24jNYMLWVJl2QIDAQAB
AoGAU93ERBlUVEPFjaJPUX67p4gotNvfWDSZiXOjZ7FQPnG9s3e1WyH2Y5irZXMs
61dnp+NhobfRiGtvHEB/YJgyLRk/CJDnMKslo95e7o65IE9VkcyY6Yvt7YTslsRX
Eu7T0xLEA7ON46ypCwNLeWxpJ9SWisEKu2yZJnWauCXEsgUCQQD7b2ZuhGx3msoP
YEnwvucp0UxneCvb68otfERZ1J6NfNP47QJw6OwD3r1sWCJ27QZmpvtQH1f8sCk9
t22anGG7AkEA2UzXdtQ8H1uLAN/XXX2qoLuvJK5jRswHS4GeOg4pnnDSiHg3Vbva
AxmMIL93ufvIy/xdoENwDPfcI4CbYlrDewJAGWy7W+OSIEoLsqBW+bwkHetnIXNa
ZAOkzxKoyrigS8hamupEe+xhqUaFuwXyfjobkpfCA+kXeZrKoM4CjEbR7wJAHMbf
Vd4/ZAu0edYq6DenLAgO5rWtcge9A5PTx25utovMZcQ917273mM4unGAwoGEkvcF
0x57LUx5u73hVgIdFwJBAKWGuHRwGPgTWYvhpHM0qveH+8KdU9BUt/kV4ONxIVDB
ftetEmJirqOGLECbImoLcUwQrgfMW4ZCxOioJMz/gY0=
-----END RSA PRIVATE KEY-----
"""
class AuthModuleUtilitiesTest(unittest.TestCase):
def testGenerateClientLoginRequestBody(self):
body = gdata.auth.GenerateClientLoginRequestBody('jo@gmail.com',
'password', 'test service', 'gdata.auth test')
expected_parameters = {'Email':r'jo%40gmail.com', 'Passwd':'password',
'service':'test+service', 'source':'gdata.auth+test',
'accountType':'HOSTED_OR_GOOGLE'}
self.__matchBody(body, expected_parameters)
body = gdata.auth.GenerateClientLoginRequestBody('jo@gmail.com',
'password', 'test service', 'gdata.auth test', account_type='A TEST',
captcha_token='12345', captcha_response='test')
expected_parameters['accountType'] = 'A+TEST'
expected_parameters['logintoken'] = '12345'
expected_parameters['logincaptcha'] = 'test'
self.__matchBody(body, expected_parameters)
def __matchBody(self, body, expected_name_value_pairs):
parameters = body.split('&')
for param in parameters:
(name, value) = param.split('=')
self.assert_(expected_name_value_pairs[name] == value)
def testGenerateClientLoginAuthToken(self):
http_body = ('SID=DQAAAGgA7Zg8CTN\r\n'
'LSID=DQAAAGsAlk8BBbG\r\n'
'Auth=DQAAAGgAdk3fA5N')
self.assert_(gdata.auth.GenerateClientLoginAuthToken(http_body) ==
'GoogleLogin auth=DQAAAGgAdk3fA5N')
class GenerateClientLoginRequestBodyTest(unittest.TestCase):
def testPostBodyShouldMatchShortExample(self):
auth_body = gdata.auth.GenerateClientLoginRequestBody('johndoe@gmail.com',
'north23AZ', 'cl', 'Gulp-CalGulp-1.05')
self.assert_(-1 < auth_body.find('Email=johndoe%40gmail.com'))
self.assert_(-1 < auth_body.find('Passwd=north23AZ'))
self.assert_(-1 < auth_body.find('service=cl'))
self.assert_(-1 < auth_body.find('source=Gulp-CalGulp-1.05'))
def testPostBodyShouldMatchLongExample(self):
auth_body = gdata.auth.GenerateClientLoginRequestBody('johndoe@gmail.com',
'north23AZ', 'cl', 'Gulp-CalGulp-1.05',
captcha_token='DQAAAGgA...dkI1', captcha_response='brinmar')
self.assert_(-1 < auth_body.find('logintoken=DQAAAGgA...dkI1'))
self.assert_(-1 < auth_body.find('logincaptcha=brinmar'))
def testEquivalenceWithOldLogic(self):
email = 'jo@gmail.com'
password = 'password'
account_type = 'HOSTED'
service = 'test'
source = 'auth test'
old_request_body = urllib.urlencode({'Email': email,
'Passwd': password,
'accountType': account_type,
'service': service,
'source': source})
new_request_body = gdata.auth.GenerateClientLoginRequestBody(email,
password, service, source, account_type=account_type)
for parameter in old_request_body.split('&'):
self.assert_(-1 < new_request_body.find(parameter))
class GenerateAuthSubUrlTest(unittest.TestCase):
def testDefaultParameters(self):
url = gdata.auth.GenerateAuthSubUrl('http://example.com/xyz?x=5',
'http://www.google.com/test/feeds')
self.assert_(-1 < url.find(
r'scope=http%3A%2F%2Fwww.google.com%2Ftest%2Ffeeds'))
self.assert_(-1 < url.find(
r'next=http%3A%2F%2Fexample.com%2Fxyz%3Fx%3D5'))
self.assert_(-1 < url.find('secure=0'))
self.assert_(-1 < url.find('session=1'))
def testAllParameters(self):
url = gdata.auth.GenerateAuthSubUrl('http://example.com/xyz?x=5',
'http://www.google.com/test/feeds', secure=True, session=False,
request_url='https://example.com/auth')
self.assert_(-1 < url.find(
r'scope=http%3A%2F%2Fwww.google.com%2Ftest%2Ffeeds'))
self.assert_(-1 < url.find(
r'next=http%3A%2F%2Fexample.com%2Fxyz%3Fx%3D5'))
self.assert_(-1 < url.find('secure=1'))
self.assert_(-1 < url.find('session=0'))
self.assert_(url.startswith('https://example.com/auth'))
class GenerateOAuthRequestTokenUrlTest(unittest.TestCase):
def testDefaultParameters(self):
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.RSA_SHA1, CONSUMER_KEY,
rsa_key=RSA_KEY)
scopes = [
'http://abcd.example.com/feeds',
'http://www.example.com/abcd/feeds'
]
url = gdata.auth.GenerateOAuthRequestTokenUrl(
oauth_input_params, scopes=scopes)
self.assertEquals('https', url.protocol)
self.assertEquals('www.google.com', url.host)
self.assertEquals('/accounts/OAuthGetRequestToken', url.path)
self.assertEquals('1.0', url.params['oauth_version'])
self.assertEquals('RSA-SHA1', url.params['oauth_signature_method'])
self.assert_(url.params['oauth_nonce'])
self.assert_(url.params['oauth_timestamp'])
actual_scopes = url.params['scope'].split(' ')
self.assertEquals(2, len(actual_scopes))
for scope in actual_scopes:
self.assert_(scope in scopes)
self.assertEquals(CONSUMER_KEY, url.params['oauth_consumer_key'])
self.assert_(url.params['oauth_signature'])
def testAllParameters(self):
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.HMAC_SHA1, CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET)
scopes = ['http://abcd.example.com/feeds']
url = gdata.auth.GenerateOAuthRequestTokenUrl(
oauth_input_params, scopes=scopes,
request_token_url='https://www.example.com/accounts/OAuthRequestToken',
extra_parameters={'oauth_version': '2.0', 'my_param': 'my_value'})
self.assertEquals('https', url.protocol)
self.assertEquals('www.example.com', url.host)
self.assertEquals('/accounts/OAuthRequestToken', url.path)
self.assertEquals('2.0', url.params['oauth_version'])
self.assertEquals('HMAC-SHA1', url.params['oauth_signature_method'])
self.assert_(url.params['oauth_nonce'])
self.assert_(url.params['oauth_timestamp'])
actual_scopes = url.params['scope'].split(' ')
self.assertEquals(1, len(actual_scopes))
for scope in actual_scopes:
self.assert_(scope in scopes)
self.assertEquals(CONSUMER_KEY, url.params['oauth_consumer_key'])
self.assert_(url.params['oauth_signature'])
self.assertEquals('my_value', url.params['my_param'])
class GenerateOAuthAuthorizationUrlTest(unittest.TestCase):
def testDefaultParameters(self):
token_key = 'ABCDDSFFDSG'
token_secret = 'SDFDSGSDADADSAF'
request_token = gdata.auth.OAuthToken(key=token_key, secret=token_secret)
url = gdata.auth.GenerateOAuthAuthorizationUrl(request_token)
self.assertEquals('https', url.protocol)
self.assertEquals('www.google.com', url.host)
self.assertEquals('/accounts/OAuthAuthorizeToken', url.path)
self.assertEquals(token_key, url.params['oauth_token'])
def testAllParameters(self):
token_key = 'ABCDDSFFDSG'
token_secret = 'SDFDSGSDADADSAF'
scopes = [
'http://abcd.example.com/feeds',
'http://www.example.com/abcd/feeds'
]
request_token = gdata.auth.OAuthToken(key=token_key, secret=token_secret,
scopes=scopes)
url = gdata.auth.GenerateOAuthAuthorizationUrl(
request_token,
authorization_url='https://www.example.com/accounts/OAuthAuthToken',
callback_url='http://www.yourwebapp.com/print',
extra_params={'permission': '1'},
include_scopes_in_callback=True, scopes_param_prefix='token_scope')
self.assertEquals('https', url.protocol)
self.assertEquals('www.example.com', url.host)
self.assertEquals('/accounts/OAuthAuthToken', url.path)
self.assertEquals(token_key, url.params['oauth_token'])
expected_callback_url = ('http://www.yourwebapp.com/print?'
'token_scope=http%3A%2F%2Fabcd.example.com%2Ffeeds'
'+http%3A%2F%2Fwww.example.com%2Fabcd%2Ffeeds')
self.assertEquals(expected_callback_url, url.params['oauth_callback'])
class GenerateOAuthAccessTokenUrlTest(unittest.TestCase):
def testDefaultParameters(self):
token_key = 'ABCDDSFFDSG'
token_secret = 'SDFDSGSDADADSAF'
authorized_request_token = gdata.auth.OAuthToken(key=token_key,
secret=token_secret)
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.HMAC_SHA1, CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET)
url = gdata.auth.GenerateOAuthAccessTokenUrl(authorized_request_token,
oauth_input_params)
self.assertEquals('https', url.protocol)
self.assertEquals('www.google.com', url.host)
self.assertEquals('/accounts/OAuthGetAccessToken', url.path)
self.assertEquals(token_key, url.params['oauth_token'])
self.assertEquals('1.0', url.params['oauth_version'])
self.assertEquals('HMAC-SHA1', url.params['oauth_signature_method'])
self.assert_(url.params['oauth_nonce'])
self.assert_(url.params['oauth_timestamp'])
self.assertEquals(CONSUMER_KEY, url.params['oauth_consumer_key'])
self.assert_(url.params['oauth_signature'])
def testAllParameters(self):
token_key = 'ABCDDSFFDSG'
authorized_request_token = gdata.auth.OAuthToken(key=token_key)
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.RSA_SHA1, CONSUMER_KEY,
rsa_key=RSA_KEY)
url = gdata.auth.GenerateOAuthAccessTokenUrl(
authorized_request_token, oauth_input_params,
access_token_url='https://www.example.com/accounts/OAuthGetAccessToken',
oauth_version= '2.0')
self.assertEquals('https', url.protocol)
self.assertEquals('www.example.com', url.host)
self.assertEquals('/accounts/OAuthGetAccessToken', url.path)
self.assertEquals(token_key, url.params['oauth_token'])
self.assertEquals('2.0', url.params['oauth_version'])
self.assertEquals('RSA-SHA1', url.params['oauth_signature_method'])
self.assert_(url.params['oauth_nonce'])
self.assert_(url.params['oauth_timestamp'])
self.assertEquals(CONSUMER_KEY, url.params['oauth_consumer_key'])
self.assert_(url.params['oauth_signature'])
class ExtractAuthSubTokensTest(unittest.TestCase):
def testGetTokenFromUrl(self):
url = 'http://www.yourwebapp.com/showcalendar.html?token=CKF50YzIH'
self.assert_(gdata.auth.AuthSubTokenFromUrl(url) ==
'AuthSub token=CKF50YzIH')
self.assert_(gdata.auth.TokenFromUrl(url) == 'CKF50YzIH')
url = 'http://www.yourwebapp.com/showcalendar.html?token==tokenCKF50YzIH='
self.assert_(gdata.auth.AuthSubTokenFromUrl(url) ==
'AuthSub token==tokenCKF50YzIH=')
self.assert_(gdata.auth.TokenFromUrl(url) == '=tokenCKF50YzIH=')
def testGetTokenFromHttpResponse(self):
response_body = ('Token=DQAA...7DCTN\r\n'
'Expiration=20061004T123456Z')
self.assert_(gdata.auth.AuthSubTokenFromHttpBody(response_body) ==
'AuthSub token=DQAA...7DCTN')
class CreateAuthSubTokenFlowTest(unittest.TestCase):
def testGenerateRequest(self):
request_url = gdata.auth.generate_auth_sub_url(next='http://example.com',
scopes=['http://www.blogger.com/feeds/',
'http://www.google.com/base/feeds/'])
self.assertEquals(request_url.protocol, 'https')
self.assertEquals(request_url.host, 'www.google.com')
self.assertEquals(request_url.params['scope'],
'http://www.blogger.com/feeds/ http://www.google.com/base/feeds/')
self.assertEquals(request_url.params['hd'], 'default')
self.assert_(request_url.params['next'].find('auth_sub_scopes') > -1)
self.assert_(request_url.params['next'].startswith('http://example.com'))
# Use a more complicated 'next' URL.
request_url = gdata.auth.generate_auth_sub_url(
next='http://example.com/?token_scope=http://www.blogger.com/feeds/',
scopes=['http://www.blogger.com/feeds/',
'http://www.google.com/base/feeds/'])
self.assert_(request_url.params['next'].find('auth_sub_scopes') > -1)
self.assert_(request_url.params['next'].find('token_scope') > -1)
self.assert_(request_url.params['next'].startswith('http://example.com/'))
def testParseNextUrl(self):
url = ('http://example.com/?auth_sub_scopes=http%3A%2F%2Fwww.blogger.com'
'%2Ffeeds%2F+http%3A%2F%2Fwww.google.com%2Fbase%2Ffeeds%2F&'
'token=my_nifty_token')
token = gdata.auth.extract_auth_sub_token_from_url(url)
self.assertEquals(token.get_token_string(), 'my_nifty_token')
self.assert_(isinstance(token, gdata.auth.AuthSubToken))
self.assert_(token.valid_for_scope('http://www.blogger.com/feeds/'))
self.assert_(token.valid_for_scope('http://www.google.com/base/feeds/'))
self.assert_(
not token.valid_for_scope('http://www.google.com/calendar/feeds/'))
# Parse a more complicated response.
url = ('http://example.com/?auth_sub_scopes=http%3A%2F%2Fwww.blogger.com'
'%2Ffeeds%2F+http%3A%2F%2Fwww.google.com%2Fbase%2Ffeeds%2F&'
'token_scope=http%3A%2F%2Fwww.blogger.com%2Ffeeds%2F&'
'token=second_token')
token = gdata.auth.extract_auth_sub_token_from_url(url)
self.assertEquals(token.get_token_string(), 'second_token')
self.assert_(isinstance(token, gdata.auth.AuthSubToken))
self.assert_(token.valid_for_scope('http://www.blogger.com/feeds/'))
self.assert_(token.valid_for_scope('http://www.google.com/base/feeds/'))
self.assert_(
not token.valid_for_scope('http://www.google.com/calendar/feeds/'))
def testParseNextWithNoToken(self):
token = gdata.auth.extract_auth_sub_token_from_url('http://example.com/')
self.assert_(token is None)
token = gdata.auth.extract_auth_sub_token_from_url(
'http://example.com/?no_token=foo&other=1')
self.assert_(token is None)
class ExtractClientLoginTokenTest(unittest.TestCase):
def testExtractFromBodyWithScopes(self):
http_body_string = ('SID=DQAAAGgA7Zg8CTN\r\n'
'LSID=DQAAAGsAlk8BBbG\r\n'
'Auth=DQAAAGgAdk3fA5N')
token = gdata.auth.extract_client_login_token(http_body_string,
['http://docs.google.com/feeds/'])
self.assertEquals(token.get_token_string(), 'DQAAAGgAdk3fA5N')
self.assert_(isinstance(token, gdata.auth.ClientLoginToken))
self.assert_(token.valid_for_scope('http://docs.google.com/feeds/'))
self.assert_(not token.valid_for_scope('http://www.blogger.com/feeds'))
class ExtractOAuthTokensTest(unittest.TestCase):
def testOAuthTokenFromUrl(self):
scope_1 = 'http://docs.google.com/feeds/'
scope_2 = 'http://www.blogger.com/feeds/'
# Case 1: token and scopes both are present.
url = ('http://dummy.com/?oauth_token_scope=http%3A%2F%2Fwww.blogger.com'
'%2Ffeeds%2F+http%3A%2F%2Fdocs.google.com%2Ffeeds%2F&'
'oauth_token=CMns6t7MCxDz__8B')
token = gdata.auth.OAuthTokenFromUrl(url)
self.assertEquals('CMns6t7MCxDz__8B', token.key)
self.assertEquals(2, len(token.scopes))
self.assert_(scope_1 in token.scopes)
self.assert_(scope_2 in token.scopes)
# Case 2: token and scopes both are present but scope_param_prefix
# passed does not match the one present in the URL.
url = ('http://dummy.com/?oauth_token_scope=http%3A%2F%2Fwww.blogger.com'
'%2Ffeeds%2F+http%3A%2F%2Fdocs.google.com%2Ffeeds%2F&'
'oauth_token=CMns6t7MCxDz__8B')
token = gdata.auth.OAuthTokenFromUrl(url,
scopes_param_prefix='token_scope')
self.assertEquals('CMns6t7MCxDz__8B', token.key)
self.assert_(not token.scopes)
# Case 3: None present.
url = ('http://dummy.com/?no_oauth_token_scope=http%3A%2F%2Fwww.blogger.com'
'%2Ffeeds%2F+http%3A%2F%2Fdocs.google.com%2Ffeeds%2F&'
'no_oauth_token=CMns6t7MCxDz__8B')
token = gdata.auth.OAuthTokenFromUrl(url)
self.assert_(token is None)
def testOAuthTokenFromHttpBody(self):
token_key = 'ABCD'
token_secret = 'XYZ'
# Case 1: token key and secret both present single time.
http_body = 'oauth_token=%s&oauth_token_secret=%s' % (token_key,
token_secret)
token = gdata.auth.OAuthTokenFromHttpBody(http_body)
self.assertEquals(token_key, token.key)
self.assertEquals(token_secret, token.secret)
class OAuthInputParametersTest(unittest.TestCase):
def setUp(self):
self.oauth_input_parameters_hmac = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.HMAC_SHA1, CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET)
self.oauth_input_parameters_rsa = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.RSA_SHA1, CONSUMER_KEY,
rsa_key=RSA_KEY)
def testGetSignatureMethod(self):
self.assertEquals(
'HMAC-SHA1',
self.oauth_input_parameters_hmac.GetSignatureMethod().get_name())
rsa_signature_method = self.oauth_input_parameters_rsa.GetSignatureMethod()
self.assertEquals('RSA-SHA1', rsa_signature_method.get_name())
self.assertEquals(RSA_KEY, rsa_signature_method._fetch_private_cert(None))
def testGetConsumer(self):
self.assertEquals(CONSUMER_KEY,
self.oauth_input_parameters_hmac.GetConsumer().key)
self.assertEquals(CONSUMER_KEY,
self.oauth_input_parameters_rsa.GetConsumer().key)
self.assertEquals(CONSUMER_SECRET,
self.oauth_input_parameters_hmac.GetConsumer().secret)
self.assert_(self.oauth_input_parameters_rsa.GetConsumer().secret is None)
class TokenClassesTest(unittest.TestCase):
def testClientLoginToAndFromString(self):
token = gdata.auth.ClientLoginToken()
token.set_token_string('foo')
self.assertEquals(token.get_token_string(), 'foo')
self.assertEquals(token.auth_header, '%s%s' % (
gdata.auth.PROGRAMMATIC_AUTH_LABEL, 'foo'))
token.set_token_string(token.get_token_string())
self.assertEquals(token.get_token_string(), 'foo')
def testAuthSubToAndFromString(self):
token = gdata.auth.AuthSubToken()
token.set_token_string('foo')
self.assertEquals(token.get_token_string(), 'foo')
self.assertEquals(token.auth_header, '%s%s' % (
gdata.auth.AUTHSUB_AUTH_LABEL, 'foo'))
token.set_token_string(token.get_token_string())
self.assertEquals(token.get_token_string(), 'foo')
def testSecureAuthSubToAndFromString(self):
# Case 1: no token.
token = gdata.auth.SecureAuthSubToken(RSA_KEY)
token.set_token_string('foo')
self.assertEquals(token.get_token_string(), 'foo')
token.set_token_string(token.get_token_string())
self.assertEquals(token.get_token_string(), 'foo')
self.assertEquals(str(token), 'foo')
# Case 2: token is a string
token = gdata.auth.SecureAuthSubToken(RSA_KEY, token_string='foo')
self.assertEquals(token.get_token_string(), 'foo')
token.set_token_string(token.get_token_string())
self.assertEquals(token.get_token_string(), 'foo')
self.assertEquals(str(token), 'foo')
def testOAuthToAndFromString(self):
token_key = 'ABCD'
token_secret = 'XYZ'
# Case 1: token key and secret both present single time.
token_string = 'oauth_token=%s&oauth_token_secret=%s' % (token_key,
token_secret)
token = gdata.auth.OAuthToken()
token.set_token_string(token_string)
self.assert_(-1 < token.get_token_string().find(token_string.split('&')[0]))
self.assert_(-1 < token.get_token_string().find(token_string.split('&')[1]))
self.assertEquals(token_key, token.key)
self.assertEquals(token_secret, token.secret)
# Case 2: token key and secret both present multiple times with unwanted
# parameters.
token_string = ('oauth_token=%s&oauth_token_secret=%s&'
'oauth_token=%s&ExtraParams=GarbageString' % (token_key,
token_secret,
'LMNO'))
token = gdata.auth.OAuthToken()
token.set_token_string(token_string)
self.assert_(-1 < token.get_token_string().find(token_string.split('&')[0]))
self.assert_(-1 < token.get_token_string().find(token_string.split('&')[1]))
self.assertEquals(token_key, token.key)
self.assertEquals(token_secret, token.secret)
# Case 3: Only token key present.
token_string = 'oauth_token=%s' % (token_key,)
token = gdata.auth.OAuthToken()
token.set_token_string(token_string)
self.assertEquals(token_string, token.get_token_string())
self.assertEquals(token_key, token.key)
self.assert_(not token.secret)
# Case 4: Only token key present.
token_string = 'oauth_token_secret=%s' % (token_secret,)
token = gdata.auth.OAuthToken()
token.set_token_string(token_string)
self.assertEquals(token_string, token.get_token_string())
self.assertEquals(token_secret, token.secret)
self.assert_(not token.key)
# Case 5: None present.
token_string = ''
token = gdata.auth.OAuthToken()
token.set_token_string(token_string)
self.assert_(token.get_token_string() is None)
self.assert_(not token.key)
self.assert_(not token.secret)
def testSecureAuthSubGetAuthHeader(self):
# Case 1: Presence of OAuth token (in case of 3-legged OAuth)
url = 'http://dummy.com/?q=notebook&s=true'
token = gdata.auth.SecureAuthSubToken(RSA_KEY, token_string='foo')
auth_header = token.GetAuthHeader('GET', url)
self.assert_('Authorization' in auth_header)
header_value = auth_header['Authorization']
self.assert_(header_value.startswith(r'AuthSub token="foo"'))
self.assert_(-1 < header_value.find(r'sigalg="rsa-sha1"'))
self.assert_(-1 < header_value.find(r'data="'))
self.assert_(-1 < header_value.find(r'sig="'))
m = re.search(r'data="(.*?)"', header_value)
self.assert_(m is not None)
data = m.group(1)
self.assert_(data.startswith('GET'))
self.assert_(-1 < data.find(url))
def testOAuthGetAuthHeader(self):
# Case 1: Presence of OAuth token (in case of 3-legged OAuth)
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.RSA_SHA1, CONSUMER_KEY,
rsa_key=RSA_KEY)
token = gdata.auth.OAuthToken(key='ABCDDSFFDSG',
oauth_input_params=oauth_input_params)
auth_header = token.GetAuthHeader('GET',
'http://dummy.com/?q=notebook&s=true',
realm='http://dummy.com')
self.assert_('Authorization' in auth_header)
header_value = auth_header['Authorization']
self.assert_(-1 < header_value.find(r'OAuth realm="http://dummy.com"'))
self.assert_(-1 < header_value.find(r'oauth_version="1.0"'))
self.assert_(-1 < header_value.find(r'oauth_token="ABCDDSFFDSG"'))
self.assert_(-1 < header_value.find(r'oauth_nonce="'))
self.assert_(-1 < header_value.find(r'oauth_timestamp="'))
self.assert_(-1 < header_value.find(r'oauth_signature="'))
self.assert_(-1 < header_value.find(
r'oauth_consumer_key="%s"' % CONSUMER_KEY))
self.assert_(-1 < header_value.find(r'oauth_signature_method="RSA-SHA1"'))
# Case 2: Absence of OAuth token (in case of 2-legged OAuth)
oauth_input_params = gdata.auth.OAuthInputParams(
gdata.auth.OAuthSignatureMethod.HMAC_SHA1, CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET)
token = gdata.auth.OAuthToken(oauth_input_params=oauth_input_params)
auth_header = token.GetAuthHeader(
'GET', 'http://dummy.com/?xoauth_requestor_id=user@gmail.com&q=book')
self.assert_('Authorization' in auth_header)
header_value = auth_header['Authorization']
self.assert_(-1 < header_value.find(r'OAuth realm=""'))
self.assert_(-1 < header_value.find(r'oauth_version="1.0"'))
self.assertEquals(-1, header_value.find(r'oauth_token='))
self.assert_(-1 < header_value.find(r'oauth_nonce="'))
self.assert_(-1 < header_value.find(r'oauth_timestamp="'))
self.assert_(-1 < header_value.find(r'oauth_signature="'))
self.assert_(-1 < header_value.find(
r'oauth_consumer_key="%s"' % CONSUMER_KEY))
self.assert_(-1 < header_value.find(r'oauth_signature_method="HMAC-SHA1"'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright Google 2007-2008, all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest
import getpass
import gdata.spreadsheet.text_db
import gdata.spreadsheet.service
__author__ = 'api.jscudder (Jeffrey Scudder)'
username = ''
password = ''
class FactoryTest(unittest.TestCase):
def setUp(self):
self.client = gdata.spreadsheet.text_db.DatabaseClient()
def testBadCredentials(self):
try:
self.client.SetCredentials('foo', 'bar')
self.fail()
except gdata.spreadsheet.text_db.Error, e:
pass
def testCreateGetAndDeleteDatabase(self):
db_title = 'google_spreadsheets_db unit test 1'
self.client.SetCredentials(username, password)
db = self.client.CreateDatabase(db_title)
# Test finding the database using the name
time.sleep(5)
db_list = self.client.GetDatabases(name=db_title)
self.assert_(len(db_list) >= 1)
if len(db_list) >= 1:
self.assert_(db_list[0].entry.title.text == db_title)
# Test finding the database using the spreadsheet key
db_list = self.client.GetDatabases(spreadsheet_key=db.spreadsheet_key)
self.assert_(len(db_list) == 1)
self.assert_(db_list[0].entry.title.text == db_title)
# Delete the test spreadsheet
time.sleep(10)
db.Delete()
class DatabaseTest(unittest.TestCase):
def setUp(self):
client = gdata.spreadsheet.text_db.DatabaseClient(username, password)
self.db = client.CreateDatabase('google_spreadsheets_db unit test 2')
def tearDown(self):
time.sleep(10)
self.db.Delete()
def testCreateGetAndDeleteTable(self):
table = self.db.CreateTable('test1', ['1','2','3'])
# Try to get the new table using the worksheet id.
table_list = self.db.GetTables(worksheet_id=table.worksheet_id)
self.assert_(len(table_list) == 1)
self.assert_(table_list[0].entry.title.text, 'test1')
# Try to get the table using the name
table_list = self.db.GetTables(name='test1')
self.assert_(len(table_list) == 1)
self.assert_(table_list[0].entry.title.text, 'test1')
# Delete the table
table.Delete()
class TableTest(unittest.TestCase):
def setUp(self):
client = gdata.spreadsheet.text_db.DatabaseClient(username, password)
self.db = client.CreateDatabase('google_spreadsheets_db unit test 3')
self.table = self.db.CreateTable('test1', ['a','b','c_d','a', 'd:e'])
def tearDown(self):
time.sleep(10)
self.db.Delete()
def testCreateGetAndDeleteRecord(self):
new_record = self.table.AddRecord({'a':'test1', 'b':'test2', 'cd':'test3', 'a_2':'test4', 'de':'test5'})
# Test getting record by line number.
record = self.table.GetRecord(row_number=1)
self.assert_(record is not None)
self.assert_(record.content['a'] == 'test1')
self.assert_(record.content['b'] == 'test2')
self.assert_(record.content['cd'] == 'test3')
self.assert_(record.content['a_2'] == 'test4')
# Test getting record using the id.
record_list = self.table.GetRecord(row_id=new_record.row_id)
self.assert_(record is not None)
# Delete the record.
time.sleep(10)
new_record.Delete()
def testPushPullSyncing(self):
# Get two copies of the same row.
first_copy = self.table.AddRecord({'a':'1', 'b':'2', 'cd':'3', 'a_2':'4', 'de':'5'})
second_copy = self.table.GetRecord(first_copy.row_id)
# Make changes in the first copy
first_copy.content['a'] = '7'
first_copy.content['b'] = '9'
# Try to get the changes before they've been committed
second_copy.Pull()
self.assert_(second_copy.content['a'] == '1')
self.assert_(second_copy.content['b'] == '2')
# Commit the changes, the content should now be different
first_copy.Push()
second_copy.Pull()
self.assert_(second_copy.content['a'] == '7')
self.assert_(second_copy.content['b'] == '9')
# Make changes to the second copy, push, then try to push changes from
# the first copy.
first_copy.content['a'] = '10'
second_copy.content['a'] = '15'
first_copy.Push()
try:
second_copy.Push()
# The second update should raise and exception due to a 409 conflict.
self.fail()
except gdata.spreadsheet.service.RequestError:
pass
except Exception, error:
#TODO: Why won't the except RequestError catch this?
pass
def testFindRecords(self):
# Add lots of test records:
self.table.AddRecord({'a':'1', 'b':'2', 'cd':'3', 'a_2':'4', 'de':'5'})
self.table.AddRecord({'a':'hi', 'b':'2', 'cd':'20', 'a_2':'4', 'de':'5'})
self.table.AddRecord({'a':'2', 'b':'2', 'cd':'3'})
self.table.AddRecord({'a':'2', 'b':'2', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'hi hi hi', 'b':'2', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'"5"', 'b':'5', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'5', 'b':'5', 'cd':'15', 'de':'7'})
time.sleep(10)
matches = self.table.FindRecords('a == 1')
self.assert_(len(matches) == 1)
self.assert_(matches[0].content['a'] == '1')
self.assert_(matches[0].content['b'] == '2')
matches = self.table.FindRecords('a > 1 && cd < 20')
self.assert_(len(matches) == 4)
matches = self.table.FindRecords('cd < de')
self.assert_(len(matches) == 7)
matches = self.table.FindRecords('a == b')
self.assert_(len(matches) == 0)
matches = self.table.FindRecords('a == 5')
self.assert_(len(matches) == 1)
def testIterateResultSet(self):
# Populate the table with test data.
self.table.AddRecord({'a':'1', 'b':'2', 'cd':'3', 'a_2':'4', 'de':'5'})
self.table.AddRecord({'a':'hi', 'b':'2', 'cd':'20', 'a_2':'4', 'de':'5'})
self.table.AddRecord({'a':'2', 'b':'2', 'cd':'3'})
self.table.AddRecord({'a':'2', 'b':'2', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'hi hi hi', 'b':'2', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'"5"', 'b':'5', 'cd':'15', 'de':'7'})
self.table.AddRecord({'a':'5', 'b':'5', 'cd':'15', 'de':'7'})
# Get the first two rows.
records = self.table.GetRecords(1, 2)
self.assert_(len(records) == 2)
self.assert_(records[0].content['a'] == '1')
self.assert_(records[1].content['a'] == 'hi')
# Then get the next two rows.
next_records = records.GetNext()
self.assert_(len(next_records) == 2)
self.assert_(next_records[0].content['a'] == '2')
self.assert_(next_records[0].content['cd'] == '3')
self.assert_(next_records[1].content['cd'] == '15')
self.assert_(next_records[1].content['de'] == '7')
def testLookupFieldsOnPreexistingTable(self):
existing_table = self.db.GetTables(name='test1')[0]
existing_table.LookupFields()
self.assertEquals(existing_table.fields, ['a', 'b', 'cd', 'a_2', 'de'])
if __name__ == '__main__':
if not username:
username = raw_input('Spreadsheets API | Text DB Tests\n'
'Please enter your username: ')
if not password:
password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.laurabeth@gmail.com (Laura Beth Lincoln)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import gdata.spreadsheet.service
import gdata.service
import atom.service
import gdata.spreadsheet
import atom
import getpass
username = ''
password = ''
ss_key = ''
ws_key = ''
class DocumentQueryTest(unittest.TestCase):
def setUp(self):
self.query = gdata.spreadsheet.service.DocumentQuery()
def testTitle(self):
self.query['title'] = 'my title'
self.assert_(self.query['title'] == 'my title')
self.assert_(self.query.ToUri() == '?title=my+title')
def testTitleExact(self):
self.query['title-exact'] = 'true'
self.assert_(self.query['title-exact'] == 'true')
self.assert_(self.query.ToUri() == '?title-exact=true')
class CellQueryTest(unittest.TestCase):
def setUp(self):
self.query = gdata.spreadsheet.service.CellQuery()
def testMinRow(self):
self.query['min-row'] = '1'
self.assert_(self.query['min-row'] == '1')
self.assert_(self.query.ToUri() == '?min-row=1')
def testMaxRow(self):
self.query['max-row'] = '100'
self.assert_(self.query['max-row'] == '100')
self.assert_(self.query.ToUri() == '?max-row=100')
def testMinCol(self):
self.query['min-col'] = '2'
self.assert_(self.query['min-col'] == '2')
self.assert_(self.query.ToUri() == '?min-col=2')
def testMaxCol(self):
self.query['max-col'] = '20'
self.assert_(self.query['max-col'] == '20')
self.assert_(self.query.ToUri() == '?max-col=20')
def testRange(self):
self.query['range'] = 'A1:B4'
self.assert_(self.query['range'] == 'A1:B4')
self.assert_(self.query.ToUri() == '?range=A1%3AB4')
def testReturnEmpty(self):
self.query['return-empty'] = 'false'
self.assert_(self.query['return-empty'] == 'false')
self.assert_(self.query.ToUri() == '?return-empty=false')
class ListQueryTest(unittest.TestCase):
def setUp(self):
self.query = gdata.spreadsheet.service.ListQuery()
def testSpreadsheetQuery(self):
self.query['sq'] = 'first=john&last=smith'
self.assert_(self.query['sq'] == 'first=john&last=smith')
self.assert_(self.query.ToUri() == '?sq=first%3Djohn%26last%3Dsmith')
def testOrderByQuery(self):
self.query['orderby'] = 'column:first'
self.assert_(self.query['orderby'] == 'column:first')
self.assert_(self.query.ToUri() == '?orderby=column%3Afirst')
def testReverseQuery(self):
self.query['reverse'] = 'true'
self.assert_(self.query['reverse'] == 'true')
self.assert_(self.query.ToUri() == '?reverse=true')
class SpreadsheetsServiceTest(unittest.TestCase):
def setUp(self):
self.key = ss_key
self.worksheet = ws_key
self.gd_client = gdata.spreadsheet.service.SpreadsheetsService()
self.gd_client.email = username
self.gd_client.password = password
self.gd_client.source = 'SpreadsheetsClient "Unit" Tests'
self.gd_client.ProgrammaticLogin()
def testGetSpreadsheetsFeed(self):
entry = self.gd_client.GetSpreadsheetsFeed(self.key)
self.assert_(isinstance(entry, gdata.spreadsheet.SpreadsheetsSpreadsheet))
def testGetWorksheetsFeed(self):
feed = self.gd_client.GetWorksheetsFeed(self.key)
self.assert_(isinstance(feed, gdata.spreadsheet.SpreadsheetsWorksheetsFeed))
entry = self.gd_client.GetWorksheetsFeed(self.key, self.worksheet)
self.assert_(isinstance(entry, gdata.spreadsheet.SpreadsheetsWorksheet))
def testGetCellsFeed(self):
feed = self.gd_client.GetCellsFeed(self.key)
self.assert_(isinstance(feed, gdata.spreadsheet.SpreadsheetsCellsFeed))
entry = self.gd_client.GetCellsFeed(self.key, cell='R5C1')
self.assert_(isinstance(entry, gdata.spreadsheet.SpreadsheetsCell))
def testGetListFeed(self):
feed = self.gd_client.GetListFeed(self.key)
self.assert_(isinstance(feed, gdata.spreadsheet.SpreadsheetsListFeed))
entry = self.gd_client.GetListFeed(self.key, row_id='cpzh4')
self.assert_(isinstance(entry, gdata.spreadsheet.SpreadsheetsList))
def testUpdateCell(self):
self.gd_client.UpdateCell(row='5', col='1', inputValue='', key=self.key)
self.gd_client.UpdateCell(row='5', col='1', inputValue='newer data',
key=self.key)
def testBatchUpdateCell(self):
cell_feed = self.gd_client.GetCellsFeed(key=self.key)
edit_cell = cell_feed.entry[0]
old_cell_value = 'a1'
# Create a batch request to change the contents of a cell.
batch_feed = gdata.spreadsheet.SpreadsheetsCellsFeed()
edit_cell.cell.inputValue = 'New Value'
batch_feed.AddUpdate(edit_cell)
result = self.gd_client.ExecuteBatch(batch_feed,
url=cell_feed.GetBatchLink().href)
self.assertEquals(len(result.entry), 1)
self.assertEquals(result.entry[0].cell.inputValue, 'New Value')
# Make a second batch request to change the cell's value back.
edit_cell = result.entry[0]
edit_cell.cell.inputValue = old_cell_value
batch_feed = gdata.spreadsheet.SpreadsheetsCellsFeed()
batch_feed.AddUpdate(edit_cell)
restored = self.gd_client.ExecuteBatch(batch_feed,
url=cell_feed.GetBatchLink().href)
self.assertEquals(len(restored.entry), 1)
self.assertEquals(restored.entry[0].cell.inputValue, old_cell_value)
def testInsertUpdateRow(self):
entry = self.gd_client.InsertRow({'a1':'new', 'b1':'row', 'c1':'was',
'd1':'here'}, self.key)
entry = self.gd_client.UpdateRow(entry, {'a1':'newer',
'b1':entry.custom['b1'].text, 'c1':entry.custom['c1'].text,
'd1':entry.custom['d1'].text})
self.gd_client.DeleteRow(entry)
def testWorksheetCRUD(self):
# Add a new worksheet.
new_worksheet = self.gd_client.AddWorksheet('worksheet_title_test_12', '2',
3, self.key)
self.assertEquals(new_worksheet.col_count.text, '3')
self.assertEquals(new_worksheet.row_count.text, '2')
self.assertEquals(new_worksheet.title.text, 'worksheet_title_test_12')
# Change the dimensions and title of the new worksheet.
new_worksheet.col_count.text = '1'
new_worksheet.title.text = 'edited worksheet test12'
edited_worksheet = self.gd_client.UpdateWorksheet(new_worksheet)
self.assertEquals(edited_worksheet.col_count.text, '1')
self.assertEquals(edited_worksheet.row_count.text, '2')
self.assertEquals(edited_worksheet.title.text, 'edited worksheet test12')
# Delete the new worksheet.
result = self.gd_client.DeleteWorksheet(edited_worksheet)
self.assertEquals(result, True)
if __name__ == '__main__':
print ('Spreadsheet Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.')
print ('These tests must be run on a sheet with this data:\n'
'a1,b1,c1,d1\n'
'1,2,3,4')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
ss_key = raw_input('Please enter your spreadsheet key: ')
ws_key = raw_input('Please enter your worksheet key (usually od6): ')
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder@gmail.com (Jeff Scudder)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata
from gdata import test_data
import gdata.calendar
class CalendarFeedTest(unittest.TestCase):
def setUp(self):
self.calendar_feed = gdata.calendar.CalendarListFeedFromString(
test_data.CALENDAR_FEED)
def testEntryCount(self):
# Assert the number of items in the feed of calendars
self.assertEquals(len(self.calendar_feed.entry),2)
def testToAndFromString(self):
# Assert the appropriate type for each entry
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarListEntry),
'Entry must be an instance of CalendarListEntry')
# Regenerate feed from xml text
new_calendar_feed = (
gdata.calendar.CalendarListFeedFromString(str(self.calendar_feed)))
for an_entry in new_calendar_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarListEntry),
'Entry in regenerated feed must be an instance of CalendarListEntry')
def testAuthor(self):
"""Tests the existence of a <atom:author> and verifies the name and email"""
# Assert that each element in the feed author list is an atom.Author
for an_author in self.calendar_feed.author:
self.assert_(isinstance(an_author, atom.Author),
"Calendar feed <atom:author> element must be an instance of " +
"atom.Author: %s" % an_author)
# Assert the feed author name is as expected
self.assertEquals(self.calendar_feed.author[0].name.text, 'GData Ops Demo')
# Assert the feed author name is as expected
self.assertEquals(self.calendar_feed.author[0].email.text,
'gdata.ops.demo@gmail.com')
# Assert one of the values for an entry author
self.assertEquals(self.calendar_feed.entry[0].author[0].name.text,
'GData Ops Demo')
self.assertEquals(self.calendar_feed.entry[0].author[0].email.text,
'gdata.ops.demo@gmail.com')
def testId(self):
"""Tests the existence of a <atom:id> in the feed and entries
and verifies the value"""
# Assert the feed id exists and is an atom.Id
self.assert_(isinstance(self.calendar_feed.id, atom.Id),
"Calendar feed <atom:id> element must be an instance of atom.Id: %s" % (
self.calendar_feed.id))
# Assert the feed id value is as expected
self.assertEquals(self.calendar_feed.id.text,
'http://www.google.com/calendar/feeds/default')
# Assert that each entry has an id which is an atom.Id
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.id, atom.Id),
"Calendar entry <atom:id> element must be an instance of " +
"atom.Id: %s" % an_entry.id)
# Assert one of the values for an id
self.assertEquals(self.calendar_feed.entry[1].id.text,
'http://www.google.com/calendar/feeds/default/' +
'jnh21ovnjgfph21h32gvms2758%40group.calendar.google.com')
def testPublished(self):
"""Tests the existence of a <atom:published> in the entries
and verifies the value"""
# Assert that each entry has a published value which is an atom.Published
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.published, atom.Published),
"Calendar entry <atom:published> element must be an instance of " +
"atom.Published: %s" % an_entry.published)
# Assert one of the values for published is as expected
self.assertEquals(self.calendar_feed.entry[1].published.text,
'2007-03-20T22:48:57.837Z')
def testUpdated(self):
"""Tests the existence of a <atom:updated> in the feed and the entries
and verifies the value"""
# Assert that the feed updated element exists and is an atom.Updated
self.assert_(isinstance(self.calendar_feed.updated, atom.Updated),
"Calendar feed <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.calendar_feed.updated)
# Assert that each entry has a updated value which is an atom.Updated
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.updated, atom.Updated),
"Calendar entry <atom:updated> element must be an instance of" +
"atom.Updated: %s" % an_entry.updated)
# Assert the feed updated value is as expected
self.assertEquals(self.calendar_feed.updated.text,
'2007-03-20T22:48:57.833Z')
# Assert one of the values for updated
self.assertEquals(self.calendar_feed.entry[0].updated.text,
'2007-03-20T22:48:52.000Z')
def testTitle(self):
"""Tests the existence of a <atom:title> in the feed and the entries and
verifies the value"""
# Assert that the feed title element exists and is an atom.Title
self.assert_(isinstance(self.calendar_feed.title, atom.Title),
"Calendar feed <atom:title> element must be an instance of " +
"atom.Title: %s" % self.calendar_feed.title)
# Assert that each entry has a title value which is an atom.Title
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.title, atom.Title),
"Calendar entry <atom:title> element must be an instance of " +
"atom.Title: %s" % an_entry.title)
# Assert the feed title value is as expected
self.assertEquals(self.calendar_feed.title.text,
'GData Ops Demo\'s Calendar List')
# Assert one of the values for title
self.assertEquals(self.calendar_feed.entry[0].title.text, 'GData Ops Demo')
def testColor(self):
"""Tests the existence of a <gCal:color> and verifies the value"""
# Assert the color is present and is a gdata.calendar.Color
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.color, gdata.calendar.Color),
"Calendar feed <gCal:color> element must be an instance of " +
"gdata.calendar.Color: %s" % an_entry.color)
# Assert the color value is as expected
self.assertEquals(self.calendar_feed.entry[0].color.value, '#2952A3')
def testAccessLevel(self):
"""Tests the existence of a <gCal:accesslevel> element and verifies the
value"""
# Assert the access_level is present and is a gdata.calendar.AccessLevel
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.access_level, gdata.calendar.AccessLevel),
"Calendar feed <gCal:accesslevel> element must be an instance of " +
"gdata.calendar.AccessLevel: %s" % an_entry.access_level)
# Assert the access_level value is as expected
self.assertEquals(self.calendar_feed.entry[0].access_level.value, 'owner')
def testTimezone(self):
"""Tests the existence of a <gCal:timezone> element and verifies the
value"""
# Assert the timezone is present and is a gdata.calendar.Timezone
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.timezone, gdata.calendar.Timezone),
"Calendar feed <gCal:timezone> element must be an instance of " +
"gdata.calendar.Timezone: %s" % an_entry.timezone)
# Assert the timezone value is as expected
self.assertEquals(self.calendar_feed.entry[0].timezone.value,
'America/Los_Angeles')
def testHidden(self):
"""Tests the existence of a <gCal:hidden> element and verifies the
value"""
# Assert the hidden is present and is a gdata.calendar.Hidden
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.hidden, gdata.calendar.Hidden),
"Calendar feed <gCal:hidden> element must be an instance of " +
"gdata.calendar.Hidden: %s" % an_entry.hidden)
# Assert the hidden value is as expected
self.assertEquals(self.calendar_feed.entry[0].hidden.value, 'false')
def testOpenSearch(self):
"""Tests the existence of <openSearch:startIndex>"""
# Assert that the elements exist and are the appropriate type
self.assert_(isinstance(self.calendar_feed.start_index, gdata.StartIndex),
"Calendar feed <openSearch:startIndex> element must be an " +
"instance of gdata.StartIndex: %s" % self.calendar_feed.start_index)
# Assert the values for each openSearch element are as expected
self.assertEquals(self.calendar_feed.start_index.text, '1')
def testGenerator(self):
"""Tests the existence of <atom:generator> and verifies the value"""
# Assert that the element exists and is of the appropriate type
self.assert_(isinstance(self.calendar_feed.generator, atom.Generator),
"Calendar feed <atom:generator> element must be an instance of " +
"atom.Generator: %s" % self.calendar_feed.generator)
# Assert the generator version, uri and text are as expected
self.assertEquals(self.calendar_feed.generator.text, 'Google Calendar')
self.assertEquals(self.calendar_feed.generator.version, '1.0')
self.assertEquals(self.calendar_feed.generator.uri,
'http://www.google.com/calendar')
def testEntryLink(self):
"""Makes sure entry links in the private composite feed are parsed."""
entry = gdata.calendar.CalendarEventEntryFromString(
test_data.RECURRENCE_EXCEPTION_ENTRY)
self.assert_(isinstance(entry.recurrence_exception, list))
self.assert_(isinstance(entry.recurrence_exception[0].entry_link,
gdata.EntryLink))
self.assert_(isinstance(entry.recurrence_exception[0].entry_link.entry,
gdata.calendar.CalendarEventEntry))
self.assertEquals(
entry.recurrence_exception[0].entry_link.entry.author[0].name.text,
'gdata ops')
def testSequence(self):
entry = gdata.calendar.CalendarEventEntry(
sequence=gdata.calendar.Sequence(value='1'))
entry2 = gdata.calendar.CalendarEventEntryFromString(str(entry))
self.assertEqual(entry.sequence.value, entry2.sequence.value)
entry = gdata.calendar.CalendarEventEntryFromString(
'<entry xmlns="%s"><sequence xmlns="%s" value="7" /></entry>' % (
atom.ATOM_NAMESPACE, gdata.calendar.GCAL_NAMESPACE))
self.assertEqual(entry.sequence.value, '7')
def testOriginalEntry(self):
"""Make sure original entry in the private composite feed are parsed."""
entry = gdata.calendar.CalendarEventEntryFromString(
test_data.RECURRENCE_EXCEPTION_ENTRY)
self.assertEquals(
entry.recurrence_exception[0].entry_link.entry.original_event.id,
'i7lgfj69mjqjgnodklif3vbm7g')
class CalendarFeedTestRegenerated(CalendarFeedTest):
def setUp(self):
old_calendar_feed = (
gdata.calendar.CalendarListFeedFromString(test_data.CALENDAR_FEED))
self.calendar_feed = (
gdata.calendar.CalendarListFeedFromString(str(old_calendar_feed)))
tree = ElementTree.fromstring(str(old_calendar_feed))
class CalendarEventFeedTest(unittest.TestCase):
def setUp(self):
self.calendar_event_feed = (
gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_FULL_EVENT_FEED))
def testEntryCount(self):
# Assert the number of items in the feed of events
self.assertEquals(len(self.calendar_event_feed.entry),11)
def testToAndFromString(self):
# Assert the appropriate type for each entry
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarEventEntry),
"Entry must be an instance of a CalendarEventEntry")
# Regenerate feed from xml text
new_calendar_event_feed = gdata.calendar.CalendarEventFeedFromString(
str(self.calendar_event_feed))
for an_entry in new_calendar_event_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarEventEntry),
"Entry in regenerated feed must be an instance of CalendarEventEntry")
def testAuthor(self):
"""Tests the existence of a <atom:author> and verifies the name and email"""
# Assert that each element in the feed author list is an atom.Author
for an_author in self.calendar_event_feed.author:
self.assert_(isinstance(an_author, atom.Author),
"Calendar event feed <atom:author> element must be an instance of " +
"atom.Author: %s" % an_author)
# Assert the feed author name is as expected
self.assertEquals(self.calendar_event_feed.author[0].name.text,
'GData Ops Demo')
# Assert the feed author name is as expected
self.assertEquals(self.calendar_event_feed.author[0].email.text,
'gdata.ops.demo@gmail.com')
# Assert one of the values for an entry author
self.assertEquals(self.calendar_event_feed.entry[0].author[0].name.text,
'GData Ops Demo')
self.assertEquals(self.calendar_event_feed.entry[0].author[0].email.text,
'gdata.ops.demo@gmail.com')
def testId(self):
"""Tests the existence of a <atom:id> in the feed and entries and
verifies the value"""
# Assert the feed id exists and is an atom.Id
self.assert_(isinstance(self.calendar_event_feed.id, atom.Id),
"Calendar event feed <atom:id> element must be an instance of " +
"atom.Id: %s" % self.calendar_event_feed.id)
# Assert the feed id value is as expected
self.assertEquals(self.calendar_event_feed.id.text,
'http://www.google.com/calendar/feeds/default/private/full')
# Assert that each entry has an id which is an atom.Id
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.id, atom.Id),
"Calendar event entry <atom:id> element must be an " +
"instance of atom.Id: %s" % an_entry.id)
# Assert one of the values for an id
self.assertEquals(self.calendar_event_feed.entry[1].id.text,
'http://www.google.com/calendar/feeds/default/private/full/' +
'2qt3ao5hbaq7m9igr5ak9esjo0')
def testPublished(self):
"""Tests the existence of a <atom:published> in the entries and
verifies the value"""
# Assert that each entry has a published value which is an atom.Published
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.published, atom.Published),
"Calendar event entry <atom:published> element must be an instance " +
"of atom.Published: %s" % an_entry.published)
# Assert one of the values for published is as expected
self.assertEquals(self.calendar_event_feed.entry[1].published.text,
'2007-03-20T21:26:04.000Z')
def testUpdated(self):
"""Tests the existence of a <atom:updated> in the feed and the entries and
verifies the value"""
# Assert that the feed updated element exists and is an atom.Updated
self.assert_(isinstance(self.calendar_event_feed.updated, atom.Updated),
"Calendar feed <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.calendar_event_feed.updated)
# Assert that each entry has a updated value which is an atom.Updated
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.updated, atom.Updated),
"Calendar event entry <atom:updated> element must be an instance " +
"of atom.Updated: %s" % an_entry.updated)
# Assert the feed updated value is as expected
self.assertEquals(self.calendar_event_feed.updated.text,
'2007-03-20T21:29:57.000Z')
# Assert one of the values for updated
self.assertEquals(self.calendar_event_feed.entry[3].updated.text,
'2007-03-20T21:25:46.000Z')
def testTitle(self):
"""Tests the existence of a <atom:title> in the feed and the entries
and verifies the value"""
# Assert that the feed title element exists and is an atom.Title
self.assert_(isinstance(self.calendar_event_feed.title, atom.Title),
"Calendar feed <atom:title> element must be an instance of " +
"atom.Title: %s" % self.calendar_event_feed.title)
# Assert that each entry has a title value which is an atom.Title
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.title, atom.Title),
"Calendar event entry <atom:title> element must be an instance of " +
"atom.Title: %s" % an_entry.title)
# Assert the feed title value is as expected
self.assertEquals(self.calendar_event_feed.title.text, 'GData Ops Demo')
# Assert one of the values for title
self.assertEquals(self.calendar_event_feed.entry[0].title.text,
'test deleted')
def testPostLink(self):
"""Tests the existence of a <atom:link> with a rel='...#post'
and verifies the value"""
# Assert that each link in the feed is an atom.Link
for a_link in self.calendar_event_feed.link:
self.assert_(isinstance(a_link, atom.Link),
"Calendar event entry <atom:link> element must be an instance of " +
"atom.Link: %s" % a_link)
# Assert post link exists
self.assert_(self.calendar_event_feed.GetPostLink() is not None)
# Assert the post link value is as expected
self.assertEquals(self.calendar_event_feed.GetPostLink().href,
'http://www.google.com/calendar/feeds/default/private/full')
def testEditLink(self):
"""Tests the existence of a <atom:link> with a rel='edit' in each entry
and verifies the value"""
# Assert that each link in the feed is an atom.Link
for a_link in self.calendar_event_feed.link:
self.assert_(isinstance(a_link, atom.Link),
"Calendar event entry <atom:link> element must be an instance of " +
"atom.Link: %s" % a_link)
# Assert edit link exists
for a_entry in self.calendar_event_feed.entry:
self.assert_(a_entry.GetEditLink() is not None)
# Assert the edit link value is as expected
self.assertEquals(self.calendar_event_feed.entry[0].GetEditLink().href,
'http://www.google.com/calendar/feeds/default/private/full/o99flmgm' +
'kfkfrr8u745ghr3100/63310109397')
self.assertEquals(self.calendar_event_feed.entry[0].GetEditLink().type,
'application/atom+xml')
def testOpenSearch(self):
"""Tests the existence of <openSearch:totalResults>,
<openSearch:startIndex>, <openSearch:itemsPerPage>"""
# Assert that the elements exist and are the appropriate type
self.assert_(isinstance(self.calendar_event_feed.total_results,
gdata.TotalResults),
"Calendar event feed <openSearch:totalResults> element must be an " +
"instance of gdata.TotalResults: %s" % (
self.calendar_event_feed.total_results))
self.assert_(
isinstance(self.calendar_event_feed.start_index, gdata.StartIndex),
"Calendar event feed <openSearch:startIndex> element must be an " +
"instance of gdata.StartIndex: %s" % (
self.calendar_event_feed.start_index))
self.assert_(
isinstance(self.calendar_event_feed.items_per_page, gdata.ItemsPerPage),
"Calendar event feed <openSearch:itemsPerPage> element must be an " +
"instance of gdata.ItemsPerPage: %s" % (
self.calendar_event_feed.items_per_page))
# Assert the values for each openSearch element are as expected
self.assertEquals(self.calendar_event_feed.total_results.text, '10')
self.assertEquals(self.calendar_event_feed.start_index.text, '1')
self.assertEquals(self.calendar_event_feed.items_per_page.text, '25')
def testGenerator(self):
"""Tests the existence of <atom:generator> and verifies the value"""
# Assert that the element exists and is of the appropriate type
self.assert_(isinstance(self.calendar_event_feed.generator, atom.Generator),
"Calendar event feed <atom:generator> element must be an instance " +
"of atom.Generator: %s" % self.calendar_event_feed.generator)
# Assert the generator version, uri and text are as expected
self.assertEquals(self.calendar_event_feed.generator.text,
'Google Calendar')
self.assertEquals(self.calendar_event_feed.generator.version, '1.0')
self.assertEquals(self.calendar_event_feed.generator.uri,
'http://www.google.com/calendar')
def testCategory(self):
"""Tests the existence of <atom:category> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for a_category in self.calendar_event_feed.category:
self.assert_(isinstance(a_category, atom.Category),
"Calendar event feed <atom:category> element must be an instance " +
"of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
'http://schemas.google.com/g/2005#kind')
self.assertEquals(a_category.term,
'http://schemas.google.com/g/2005#event')
for an_event in self.calendar_event_feed.entry:
for a_category in an_event.category:
self.assert_(isinstance(a_category, atom.Category),
"Calendar event feed entry <atom:category> element must be an " +
"instance of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
'http://schemas.google.com/g/2005#kind')
self.assertEquals(a_category.term,
'http://schemas.google.com/g/2005#event')
def testSendEventNotifications(self):
"""Test the existence of <gCal:sendEventNotifications>
and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.send_event_notifications,
gdata.calendar.SendEventNotifications),
("Calendar event feed entry <gCal:sendEventNotifications> element " +
"must be an instance of gdata.calendar.SendEventNotifications: %s") % (
an_event.send_event_notifications,))
# Assert the <gCal:sendEventNotifications> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].send_event_notifications.value,
'false')
self.assertEquals(
self.calendar_event_feed.entry[2].send_event_notifications.value,
'true')
def testQuickAdd(self):
"""Test the existence of <gCal:quickadd>
and verifies the value"""
entry = gdata.calendar.CalendarEventEntry()
entry.quick_add = gdata.calendar.QuickAdd(value='true')
unmarshalled_entry = entry.ToString()
tag = '{%s}quickadd' % (gdata.calendar.GCAL_NAMESPACE)
marshalled_entry = ElementTree.fromstring(unmarshalled_entry).find(tag)
self.assert_(marshalled_entry.attrib['value'],'true')
self.assert_(marshalled_entry.tag,tag)
def testEventStatus(self):
"""Test the existence of <gd:eventStatus>
and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.event_status,
gdata.calendar.EventStatus),
("Calendar event feed entry <gd:eventStatus> element " +
"must be an instance of gdata.calendar.EventStatus: %s") % (
an_event.event_status,))
# Assert the <gd:eventStatus> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].event_status.value,
'CANCELED')
self.assertEquals(
self.calendar_event_feed.entry[1].event_status.value,
'CONFIRMED')
def testComments(self):
"""Tests the existence of <atom:comments> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(an_event.comments is None or isinstance(an_event.comments,
gdata.calendar.Comments),
("Calendar event feed entry <gd:comments> element " +
"must be an instance of gdata.calendar.Comments: %s") % (
an_event.comments,))
def testVisibility(self):
"""Test the existence of <gd:visibility> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.visibility,
gdata.calendar.Visibility),
("Calendar event feed entry <gd:visibility> element " +
"must be an instance of gdata.calendar.Visibility: %s") % (
an_event.visibility,))
# Assert the <gd:visibility> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].visibility.value,
'DEFAULT')
self.assertEquals(
self.calendar_event_feed.entry[1].visibility.value,
'PRIVATE')
self.assertEquals(
self.calendar_event_feed.entry[2].visibility.value,
'PUBLIC')
def testTransparency(self):
"""Test the existence of <gd:transparency> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.transparency,
gdata.calendar.Transparency),
("Calendar event feed entry <gd:transparency> element " +
"must be an instance of gdata.calendar.Transparency: %s") % (
an_event.transparency,))
# Assert the <gd:transparency> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].transparency.value,
'OPAQUE')
self.assertEquals(
self.calendar_event_feed.entry[1].transparency.value,
'OPAQUE')
self.assertEquals(
self.calendar_event_feed.entry[2].transparency.value,
'OPAQUE')
# TODO: TEST VALUES OF VISIBILITY OTHER THAN OPAQUE
def testWhere(self):
"""Tests the existence of a <gd:where> in the entries
and verifies the value"""
# Assert that each entry has a where value which is an gdata.calendar.Where
for an_entry in self.calendar_event_feed.entry:
for a_where in an_entry.where:
self.assert_(isinstance(a_where, gdata.calendar.Where),
"Calendar event entry <gd:where> element must be an instance of " +
"gdata.calendar.Where: %s" % a_where)
# Assert one of the values for where is as expected
self.assertEquals(self.calendar_event_feed.entry[1].where[0].value_string,
'Dolores Park with Kim')
def testWhenAndReminder(self):
"""Tests the existence of a <gd:when> and <gd:reminder> in the entries
and verifies the values"""
# Assert that each entry's when value is a gdata.calendar.When
# Assert that each reminder is a gdata.calendar.Reminder
for an_entry in self.calendar_event_feed.entry:
for a_when in an_entry.when:
self.assert_(isinstance(a_when, gdata.calendar.When),
"Calendar event entry <gd:when> element must be an instance " +
"of gdata.calendar.When: %s" % a_when)
for a_reminder in a_when.reminder:
self.assert_(isinstance(a_reminder, gdata.calendar.Reminder),
"Calendar event entry <gd:reminder> element must be an " +
"instance of gdata.calendar.Reminder: %s" % a_reminder)
# Assert one of the values for when is as expected
self.assertEquals(self.calendar_event_feed.entry[0].when[0].start_time,
'2007-03-23T12:00:00.000-07:00')
self.assertEquals(self.calendar_event_feed.entry[0].when[0].end_time,
'2007-03-23T13:00:00.000-07:00')
# Assert the reminder child of when is as expected
self.assertEquals(
self.calendar_event_feed.entry[0].when[0].reminder[0].minutes, '10')
self.assertEquals(
self.calendar_event_feed.entry[1].when[0].reminder[0].minutes, '20')
def testBatchRequestParsing(self):
batch_request = gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_BATCH_REQUEST)
self.assertEquals(len(batch_request.entry), 4)
# Iterate over the batch request entries and match the operation with
# the batch id. These values are hard coded to match the test data.
for entry in batch_request.entry:
if entry.batch_id.text == '1':
self.assertEquals(entry.batch_operation.type, 'insert')
if entry.batch_id.text == '2':
self.assertEquals(entry.batch_operation.type, 'query')
if entry.batch_id.text == '3':
self.assertEquals(entry.batch_operation.type, 'update')
self.assertEquals(entry.title.text, 'Event updated via batch')
if entry.batch_id.text == '4':
self.assertEquals(entry.batch_operation.type, 'delete')
self.assertEquals(entry.id.text,
'http://www.google.com/calendar/feeds/default/'
'private/full/d8qbg9egk1n6lhsgq1sjbqffqc')
self.assertEquals(entry.GetEditLink().href,
'http://www.google.com/calendar/feeds/default/'
'private/full/d8qbg9egk1n6lhsgq1sjbqffqc/'
'63326018324')
def testBatchResponseParsing(self):
batch_response = gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_BATCH_RESPONSE)
self.assertEquals(len(batch_response.entry), 4)
for entry in batch_response.entry:
if entry.batch_id.text == '1':
self.assertEquals(entry.batch_operation.type, 'insert')
self.assertEquals(entry.batch_status.code, '201')
self.assertEquals(entry.batch_status.reason, 'Created')
self.assertEquals(entry.id.text, 'http://www.google.com/calendar/'
'feeds/default/private/full/'
'n9ug78gd9tv53ppn4hdjvk68ek')
if entry.batch_id.text == '2':
self.assertEquals(entry.batch_operation.type, 'query')
if entry.batch_id.text == '3':
self.assertEquals(entry.batch_operation.type, 'update')
if entry.batch_id.text == '4':
self.assertEquals(entry.batch_operation.type, 'delete')
self.assertEquals(entry.id.text, 'http://www.google.com/calendar/'
'feeds/default/private/full/'
'd8qbg9egk1n6lhsgq1sjbqffqc')
# TODO add reminder tests for absolute_time and hours/seconds (if possible)
# TODO test recurrence and recurrenceexception
# TODO test originalEvent
class CalendarWebContentTest(unittest.TestCase):
def setUp(self):
self.calendar_event_feed = (
gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_FULL_EVENT_FEED))
def testAddSimpleWebContentEventEntry(self):
"""Verifies that we can add a web content link to an event entry."""
title = "Al Einstein's Birthday!"
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
type = 'image/jpeg'
url = 'http://gdata.ops.demo.googlepages.com/einstein.jpg'
width = '300'
height = '225'
# Create a web content event
event = gdata.calendar.CalendarEventEntry()
web_content = gdata.calendar.WebContent(url=url, width=width, height=height)
web_content_link = gdata.calendar.WebContentLink(title=title,
href=href, link_type=type, web_content=web_content)
event.link.append(web_content_link)
# Verify the web content link exists and contains the expected data
web_content_link = event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidSimpleWebContent(url, width, height, web_content_element)
def testAddWebContentGadgetEventEntry(self):
"""Verifies that we can add a web content gadget link to an event entry."""
title = "Date and Time Gadget"
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
url = 'http://google.com/ig/modules/datetime.xml'
type = 'application/x-google-gadgets+xml'
width = '300'
height = '200'
pref_name = 'color'
pref_value = 'green'
# Create a web content event
event = gdata.calendar.CalendarEventEntry()
web_content = gdata.calendar.WebContent(url=url, width=width, height=height)
web_content.gadget_pref.append(
gdata.calendar.WebContentGadgetPref(name=pref_name, value=pref_value))
web_content_link = gdata.calendar.WebContentLink(title=title,
href=href, web_content=web_content, link_type=type)
event.link.append(web_content_link)
# Verify the web content link exists and contains the expected data
web_content_link = event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidWebContentGadget(url, width, height,
pref_name, pref_value, web_content_element)
def testFromXmlToSimpleWebContent(self):
"""Verifies that we can read a web content link from an event entry."""
# Expected values (from test_data.py file)
title = 'World Cup'
href = 'http://www.google.com/calendar/images/google-holiday.gif'
type = 'image/gif'
url = 'http://www.google.com/logos/worldcup06.gif'
width = '276'
height = '120'
# Note: The tenth event entry contains web content
web_content_event = self.calendar_event_feed.entry[9]
# Verify the web content link exists and contains the expected data
web_content_link = web_content_event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidSimpleWebContent(url, width, height, web_content_element)
def testFromXmlToWebContentGadget(self):
"""Verifies that we can read a web content link from an event entry."""
# Expected values (from test_data.py file)
title = 'Date and Time Gadget'
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
url = 'http://google.com/ig/modules/datetime.xml'
type = 'application/x-google-gadgets+xml'
width = '300'
height = '136'
pref_name = 'color'
pref_value = 'green'
# Note: The eleventh event entry contains web content
web_content_event = self.calendar_event_feed.entry[10]
# Verify the web content link exists and contains the expected data
web_content_link = web_content_event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidWebContentGadget(url, width, height, pref_name,
pref_value, web_content_element)
def assertValidWebContentLink(self, expected_title=None, expected_href=None,
expected_type=None, web_content_link=None):
"""Asserts that the web content link is the correct type and contains the
expected values"""
self.assert_(isinstance(web_content_link, gdata.calendar.WebContentLink),
"Web content link element must be an " +
"instance of gdata.calendar.WebContentLink: %s" % web_content_link)
expected_rel = '%s/%s' % (gdata.calendar.GCAL_NAMESPACE, 'webContent')
self.assertEquals(expected_rel, web_content_link.rel)
self.assertEqual(expected_title, web_content_link.title)
self.assertEqual(expected_href, web_content_link.href)
self.assertEqual(expected_type, web_content_link.type)
def assertValidSimpleWebContent(self, expected_url=None, expected_width=None,
expected_height=None, web_content_element=None):
"""Asserts that the web content element is the correct type and contains
the expected values"""
self.assert_(isinstance(web_content_element, gdata.calendar.WebContent),
"Calendar event entry <gCal:webContent> element must be an " +
"instance of gdata.calendar.WebContent: %s" % web_content_element)
self.assertEquals(expected_width, web_content_element.width)
self.assertEquals(expected_height, web_content_element.height)
self.assertEquals(expected_url, web_content_element.url)
def assertValidWebContentGadget(self, expected_url=None, expected_width=None,
expected_height=None, expected_pref_name=None, expected_pref_value=None,
web_content_element=None):
"""Asserts that the web content element is the correct type and contains
the expected values"""
self.assert_(isinstance(web_content_element, gdata.calendar.WebContent),
"Calendar event entry <gCal:webContent> element must be an " +
"instance of gdata.calendar.WebContent: %s" % web_content_element)
self.assertEquals(expected_width, web_content_element.width)
self.assertEquals(expected_height, web_content_element.height)
self.assertEquals(expected_url, web_content_element.url)
self.assertEquals(expected_pref_name,
web_content_element.gadget_pref[0].name)
self.assertEquals(expected_pref_value,
web_content_element.gadget_pref[0].value)
def testSampleCode(self):
# From http://code.google.com/apis/calendar/gadgets/event/
wc = gdata.calendar.WebContent()
wc.url = 'http://www.thefreedictionary.com/_/WoD/wod-module.xml'
wc.width = '300'
wc.height = '136'
wc.gadget_pref.append(gdata.calendar.WebContentGadgetPref(name='Days', value='1'))
wc.gadget_pref.append(gdata.calendar.WebContentGadgetPref(name='Format', value='0'))
wcl = gdata.calendar.WebContentLink()
wcl.title = 'Word of the Day'
wcl.href = 'http://www.thefreedictionary.com/favicon.ico'
wcl.type = 'application/x-google-gadgets+xml'
wcl.web_content = wc
self.assertEqual(wcl.web_content.url,
'http://www.thefreedictionary.com/_/WoD/wod-module.xml')
self.assertEqual(wcl.type, 'application/x-google-gadgets+xml')
self.assertEqual(wcl.web_content.height, '136')
class ExtendedPropertyTest(unittest.TestCase):
def testExtendedPropertyToAndFromXml(self):
ep = gdata.calendar.ExtendedProperty(name='test')
ep.value = 'val'
xml_string = ep.ToString()
ep2 = gdata.ExtendedPropertyFromString(xml_string)
self.assertEquals(ep.name, ep2.name)
self.assertEquals(ep.value, ep2.value)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.eric@google.com (Eric Bidelman)'
import unittest
from gdata import test_data
import gdata.health
import gdata.health.service
class ProfileEntryTest(unittest.TestCase):
def setUp(self):
self.profile_entry = gdata.health.ProfileEntryFromString(
test_data.HEALTH_PROFILE_ENTRY_DIGEST)
def testToAndFromStringWithData(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
self.assert_(isinstance(entry, gdata.health.ProfileEntry))
self.assert_(isinstance(entry.ccr, gdata.health.Ccr))
self.assertEqual(len(entry.ccr.GetMedications()), 3)
self.assertEqual(len(entry.ccr.GetImmunizations()), 1)
self.assertEqual(len(entry.ccr.GetAlerts()), 2)
self.assertEqual(len(entry.ccr.GetResults()), 1)
self.assertEqual(len(entry.ccr.GetProblems()), 2)
self.assertEqual(len(entry.ccr.GetProcedures()), 2)
def testGetResultsTextFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
result = entry.ccr.GetResults()[0].FindChildren('Test')[0]
test_desc = result.FindChildren('Description')[0].FindChildren('Text')[0]
self.assertEqual(test_desc.text, 'Acetaldehyde - Blood')
def testGetMedicationNameFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
product = entry.ccr.GetMedications()[1].FindChildren('Product')[0]
prod_name = product.FindChildren('ProductName')[0].FindChildren('Text')[0]
self.assertEqual(prod_name.text, 'A-Fil')
def testGetProblemCodeValueFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
problem_desc = entry.ccr.GetProblems()[1].FindChildren('Description')[0]
code = problem_desc.FindChildren('Code')[0].FindChildren('Value')[0]
self.assertEqual(code.text, '136.9')
def testGetGetImmunizationActorIdFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
immun_source = entry.ccr.GetImmunizations()[0].FindChildren('Source')[0]
actor_id = immun_source.FindChildren('Actor')[0].FindChildren('ActorID')[0]
self.assertEqual(actor_id.text, 'user@gmail.com')
def testGetGetProceduresNameFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
proc_desc = entry.ccr.GetProcedures()[1].FindChildren('Description')[0]
proc_name = proc_desc.FindChildren('Text')[0]
self.assertEqual(proc_name.text, 'Abdominoplasty')
def testGetAlertsFromCcr(self):
entry = gdata.health.ProfileEntryFromString(str(self.profile_entry))
alert_type = entry.ccr.GetAlerts()[0].FindChildren('Type')[0]
self.assertEqual(alert_type.FindChildren('Text')[0].text, 'Allergy')
class ProfileListEntryTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.health.ProfileListEntryFromString(
test_data.HEALTH_PROFILE_LIST_ENTRY)
def testToAndFromString(self):
self.assert_(isinstance(self.entry, gdata.health.ProfileListEntry))
self.assertEqual(self.entry.GetProfileId(), 'vndCn5sdfwdEIY')
self.assertEqual(self.entry.GetProfileName(), 'profile name')
class ProfileFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.health.ProfileFeedFromString(
test_data.HEALTH_PROFILE_FEED)
def testToAndFromString(self):
self.assert_(len(self.feed.entry) == 15)
for an_entry in self.feed.entry:
self.assert_(isinstance(an_entry, gdata.health.ProfileEntry))
new_profile_feed = gdata.health.ProfileFeedFromString(str(self.feed))
for an_entry in new_profile_feed.entry:
self.assert_(isinstance(an_entry, gdata.health.ProfileEntry))
def testConvertActualData(self):
for an_entry in self.feed.entry:
self.assert_(an_entry.ccr is not None)
class HealthProfileQueryTest(unittest.TestCase):
def testHealthQueryToString(self):
query = gdata.health.service.HealthProfileQuery()
self.assertEqual(query.ToUri(), '/health/feeds/profile/default')
query = gdata.health.service.HealthProfileQuery(feed='feeds/profile')
self.assertEqual(query.ToUri(), '/health/feeds/profile/default')
query = gdata.health.service.HealthProfileQuery(categories=['medication'])
self.assertEqual(query.ToUri(),
'/health/feeds/profile/default/-/medication')
query = gdata.health.service.HealthProfileQuery(projection='ui',
profile_id='12345')
self.assertEqual(query.ToUri(), '/health/feeds/profile/ui/12345')
query = gdata.health.service.HealthProfileQuery()
query.categories.append('medication|condition')
self.assertEqual(query.ToUri(),
'/health/feeds/profile/default/-/medication%7Ccondition')
def testH9QueryToString(self):
query = gdata.health.service.HealthProfileQuery(service='h9')
self.assertEqual(query.ToUri(), '/h9/feeds/profile/default')
query = gdata.health.service.HealthProfileQuery(
service='h9', feed='feeds/profile',
projection='ui', profile_id='12345')
self.assertEqual(query.ToUri(), '/h9/feeds/profile/ui/12345')
def testDigestParam(self):
query = gdata.health.service.HealthProfileQuery(params={'digest': 'true'})
self.assertEqual(query.ToUri(), '/health/feeds/profile/default?digest=true')
query.profile_id = '12345'
query.projection = 'ui'
self.assertEqual(
query.ToUri(), '/health/feeds/profile/ui/12345?digest=true')
class HealthProfileListQueryTest(unittest.TestCase):
def testHealthProfileListQueryToString(self):
query = gdata.health.service.HealthProfileListQuery()
self.assertEqual(query.ToUri(), '/health/feeds/profile/list')
query = gdata.health.service.HealthProfileListQuery(service='health')
self.assertEqual(query.ToUri(), '/health/feeds/profile/list')
query = gdata.health.service.HealthProfileListQuery(
feed='feeds/profile/list')
self.assertEqual(query.ToUri(), '/health/feeds/profile/list')
query = gdata.health.service.HealthProfileListQuery(
service='health', feed='feeds/profile/list')
self.assertEqual(query.ToUri(), '/health/feeds/profile/list')
def testH9ProfileListQueryToString(self):
query = gdata.health.service.HealthProfileListQuery(service='h9')
self.assertEqual(query.ToUri(), '/h9/feeds/profile/list')
query = gdata.health.service.HealthProfileListQuery(
service='h9', feed='feeds/profile/list')
self.assertEqual(query.ToUri(), '/h9/feeds/profile/list')
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import gdata.blogger.client
import gdata.blogger.data
import gdata.gauth
import gdata.client
import atom.http_core
import atom.mock_http_core
import atom.core
import gdata.data
import gdata.test_config as conf
conf.options.register_option(conf.BLOG_ID_OPTION)
class BloggerClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.blogger.client.BloggerClient()
conf.configure_client(self.client, 'BloggerTest', 'blogger')
def tearDown(self):
conf.close_client(self.client)
def test_create_update_delete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete')
# Add a blog post.
created = self.client.add_post(conf.options.get_value('blogid'),
'test post from BloggerClientTest',
'Hey look, another test!',
labels=['test', 'python'])
self.assertEqual(created.title.text, 'test post from BloggerClientTest')
self.assertEqual(created.content.text, 'Hey look, another test!')
self.assertEqual(len(created.category), 2)
self.assert_(created.control is None)
# Change the title of the blog post we just added.
created.title.text = 'Edited'
updated = self.client.update(created)
self.assertEqual(updated.title.text, 'Edited')
self.assert_(isinstance(updated, gdata.blogger.data.BlogPost))
self.assertEqual(updated.content.text, created.content.text)
# Delete the test entry from the blog.
self.client.delete(updated)
def test_create_draft_post(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_create_draft_post')
# Add a draft blog post.
created = self.client.add_post(conf.options.get_value('blogid'),
'draft test post from BloggerClientTest',
'This should only be a draft.',
labels=['test2', 'python'], draft=True)
self.assertEqual(created.title.text,
'draft test post from BloggerClientTest')
self.assertEqual(created.content.text, 'This should only be a draft.')
self.assertEqual(len(created.category), 2)
self.assert_(created.control is not None)
self.assert_(created.control.draft is not None)
self.assertEqual(created.control.draft.text, 'yes')
# Publish the blog post.
created.control.draft.text = 'no'
updated = self.client.update(created)
if updated.control is not None and updated.control.draft is not None:
self.assertNotEqual(updated.control.draft.text, 'yes')
# Delete the test entry from the blog using the URL instead of the entry.
self.client.delete(updated.find_edit_link())
def test_create_draft_page(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_create_draft_page')
# List all pages on the blog.
pages_before = self.client.get_pages(conf.options.get_value('blogid'))
# Add a draft page to blog.
created = self.client.add_page(conf.options.get_value('blogid'),
'draft page from BloggerClientTest',
'draft content',
draft=True)
self.assertEqual(created.title.text, 'draft page from BloggerClientTest')
self.assertEqual(created.content.text, 'draft content')
self.assert_(created.control is not None)
self.assert_(created.control.draft is not None)
self.assertEqual(created.control.draft.text, 'yes')
self.assertEqual(str(int(created.get_page_id())), created.get_page_id())
# List all pages after adding one.
pages_after = self.client.get_pages(conf.options.get_value('blogid'))
self.assertEqual(len(pages_before.entry) + 1, len(pages_after.entry))
# Publish page.
created.control.draft.text = 'no'
updated = self.client.update(created)
if updated.control is not None and updated.control.draft is not None:
self.assertNotEqual(updated.control.draft.text, 'yes')
# Delete test page.
self.client.delete(updated.find_edit_link())
pages_after = self.client.get_pages(conf.options.get_value('blogid'))
self.assertEqual(len(pages_before.entry), len(pages_after.entry))
def test_retrieve_post_with_categories(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_retrieve_post_with_categories')
query = gdata.blogger.client.Query(categories=["news"], strict=True)
posts = self.client.get_posts(conf.options.get_value('blogid'), query=query)
def suite():
return conf.build_suite([BloggerClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to exercise server interactions for blogger."""
__author__ = 'api.jscudder (Jeffrey Scudder)'
import unittest
import getpass
import atom
from gdata import test_data
import gdata.blogger
import gdata.blogger.service
username = ''
password = ''
test_blog_id = ''
class BloggerCrudTests(unittest.TestCase):
def setUp(self):
self.client = gdata.blogger.service.BloggerService(email=username,
password=password, source='GoogleInc-PythonBloggerUnitTests-1')
# TODO: if the test_blog_id is not set, get the list of the user's blogs
# and prompt for which blog to add the test posts to.
self.client.ProgrammaticLogin()
def testPostDraftUpdateAndDelete(self):
new_entry = gdata.blogger.BlogPostEntry(title=atom.Title(
text='Unit Test Post'))
new_entry.content = atom.Content('text', None, 'Hello World')
# Make this post a draft so it will not appear publicly on the blog.
new_entry.control = atom.Control(draft=atom.Draft(text='yes'))
new_entry.AddLabel('test')
posted = self.client.AddPost(new_entry, blog_id=test_blog_id)
self.assertEquals(posted.title.text, new_entry.title.text)
# Should be one category in the posted entry for the 'test' label.
self.assertEquals(len(posted.category), 1)
self.assert_(isinstance(posted, gdata.blogger.BlogPostEntry))
# Change the title and add more labels.
posted.title.text = 'Updated'
posted.AddLabel('second')
updated = self.client.UpdatePost(entry=posted)
self.assertEquals(updated.title.text, 'Updated')
self.assertEquals(len(updated.category), 2)
# Cleanup and delete the draft blog post.
self.client.DeletePost(entry=posted)
def testAddComment(self):
# Create a test post to add comments to.
new_entry = gdata.blogger.BlogPostEntry(title=atom.Title(
text='Comments Test Post'))
new_entry.content = atom.Content('text', None, 'Hello Comments')
target_post = self.client.AddPost(new_entry, blog_id=test_blog_id)
blog_id = target_post.GetBlogId()
post_id = target_post.GetPostId()
new_comment = gdata.blogger.CommentEntry()
new_comment.content = atom.Content(text='Test comment')
posted = self.client.AddComment(new_comment, blog_id=blog_id,
post_id=post_id)
self.assertEquals(posted.content.text, new_comment.content.text)
# Cleanup and delete the comment test blog post.
self.client.DeletePost(entry=target_post)
class BloggerQueryTests(unittest.TestCase):
def testConstructBlogQuery(self):
pass
def testConstructBlogQuery(self):
pass
def testConstructBlogQuery(self):
pass
if __name__ == '__main__':
print ('NOTE: Please run these tests only with a test account. ' +
'The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
test_blog_id = raw_input('Please enter the blog id for the test blog: ')
unittest.main()
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
from gdata import test_data
import gdata.blogger.data
import atom.core
import gdata.test_config as conf
class BlogEntryTest(unittest.TestCase):
def testBlogEntryFromString(self):
entry = atom.core.parse(test_data.BLOG_ENTRY, gdata.blogger.data.Blog)
self.assertEquals(entry.GetBlogName(), 'blogName')
self.assertEquals(entry.GetBlogId(), 'blogID')
self.assertEquals(entry.title.text, 'Lizzy\'s Diary')
def testBlogPostFeedFromString(self):
feed = atom.core.parse(test_data.BLOG_POSTS_FEED,
gdata.blogger.data.BlogPostFeed)
self.assertEquals(len(feed.entry), 1)
self.assert_(isinstance(feed, gdata.blogger.data.BlogPostFeed))
self.assert_(isinstance(feed.entry[0], gdata.blogger.data.BlogPost))
self.assertEquals(feed.entry[0].GetPostId(), 'postID')
self.assertEquals(feed.entry[0].GetBlogId(), 'blogID')
self.assertEquals(feed.entry[0].title.text, 'Quite disagreeable')
def testCommentFeedFromString(self):
feed = atom.core.parse(test_data.BLOG_COMMENTS_FEED,
gdata.blogger.data.CommentFeed)
self.assertEquals(len(feed.entry), 1)
self.assert_(isinstance(feed, gdata.blogger.data.CommentFeed))
self.assert_(isinstance(feed.entry[0], gdata.blogger.data.Comment))
self.assertEquals(feed.entry[0].get_blog_id(), 'blogID')
self.assertEquals(feed.entry[0].get_blog_name(), 'a-blogName')
self.assertEquals(feed.entry[0].get_comment_id(), 'commentID')
self.assertEquals(feed.entry[0].title.text, 'This is my first comment')
self.assertEquals(feed.entry[0].in_reply_to.source,
'http://blogName.blogspot.com/feeds/posts/default/postID')
self.assertEquals(feed.entry[0].in_reply_to.ref,
'tag:blogger.com,1999:blog-blogID.post-postID')
self.assertEquals(feed.entry[0].in_reply_to.href,
'http://blogName.blogspot.com/2007/04/first-post.html')
self.assertEquals(feed.entry[0].in_reply_to.type, 'text/html')
def testIdParsing(self):
entry = gdata.blogger.data.Blog()
entry.id = atom.data.Id(
text='tag:blogger.com,1999:user-146606542.blog-4023408167658848')
self.assertEquals(entry.GetBlogId(), '4023408167658848')
entry.id = atom.data.Id(text='tag:blogger.com,1999:blog-4023408167658848')
self.assertEquals(entry.GetBlogId(), '4023408167658848')
class InReplyToTest(unittest.TestCase):
def testToAndFromString(self):
in_reply_to = gdata.blogger.data.InReplyTo(
href='http://example.com/href', ref='http://example.com/ref',
source='http://example.com/my_post', type='text/html')
xml_string = str(in_reply_to)
parsed = atom.core.parse(xml_string, gdata.blogger.data.InReplyTo)
self.assertEquals(parsed.source, in_reply_to.source)
self.assertEquals(parsed.href, in_reply_to.href)
self.assertEquals(parsed.ref, in_reply_to.ref)
self.assertEquals(parsed.type, in_reply_to.type)
class CommentTest(unittest.TestCase):
def testToAndFromString(self):
comment = gdata.blogger.data.Comment(
content=atom.data.Content(text='Nifty!'),
in_reply_to=gdata.blogger.data.InReplyTo(
source='http://example.com/my_post'))
parsed = atom.core.parse(str(comment), gdata.blogger.data.Comment)
self.assertEquals(parsed.in_reply_to.source, comment.in_reply_to.source)
self.assertEquals(parsed.content.text, comment.content.text)
def suite():
return conf.build_suite([BlogEntryTest, InReplyToTest, CommentTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder (Jeffrey Scudder)'
import unittest
from gdata import test_data
import gdata.photos
class AlbumFeedTest(unittest.TestCase):
def setUp(self):
self.album_feed = gdata.photos.AlbumFeedFromString(test_data.ALBUM_FEED)
def testCorrectXmlParsing(self):
self.assert_(self.album_feed.id.text == 'http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1')
self.assert_(self.album_feed.gphoto_id.text == '1')
self.assert_(len(self.album_feed.entry) == 4)
for entry in self.album_feed.entry:
if entry.id.text == 'http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/2':
self.assert_(entry.summary.text == 'Blue')
class PhotoFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.photos.PhotoFeedFromString(test_data.ALBUM_FEED)
def testCorrectXmlParsing(self):
for entry in self.feed.entry:
if entry.id.text == 'http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/2':
self.assert_(entry.gphoto_id.text == '2')
self.assert_(entry.albumid.text == '1')
self.assert_(entry.exif.flash.text == 'true')
self.assert_(entry.media.title.type == 'plain')
self.assert_(entry.media.title.text == 'Aqua Blue.jpg')
self.assert_(len(entry.media.thumbnail) == 3)
class AnyFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.photos.AnyFeedFromString(test_data.ALBUM_FEED)
def testEntryTypeConversion(self):
for entry in self.feed.entry:
if entry.id.text == 'http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/':
self.assert_(isinstance(entry, gdata.photos.PhotoEntry))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Alexandre Vivien <alex@simplecode.fr>'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.marketplace.client
import gdata.marketplace.data
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class LicensingClientTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
gdata.test_config.options.register(
'appsid',
'Enter the Application ID of your Marketplace application',
description='The Application ID of your Marketplace application')
gdata.test_config.options.register(
'appsconsumerkey',
'Enter the Consumer Key of your Marketplace application',
description='The Consumer Key of your Marketplace application')
gdata.test_config.options.register(
'appsconsumersecret',
'Enter the Consumer Secret of your Marketplace application',
description='The Consumer Secret of your Marketplace application')
def setUp(self):
self.client = gdata.marketplace.client.LicensingClient(domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.marketplace.client.LicensingClient(domain=conf.options.get_value('appsdomain'))
conf.configure_client(self.client, 'LicensingClientTest', self.client.auth_service, True)
self.client.auth_token = gdata.gauth.TwoLeggedOAuthHmacToken(conf.options.get_value('appsconsumerkey'), conf.options.get_value('appsconsumersecret'), '')
self.client.source = 'GData-Python-Client-Test'
self.client.account_type='HOSTED'
self.client.http_client.debug = True
self.app_id = conf.options.get_value('appsid')
def tearDown(self):
conf.close_client(self.client)
def testGetLicense(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testGetLicense')
fetched_feed = self.client.GetLicense(app_id=self.app_id)
self.assertTrue(isinstance(fetched_feed, gdata.marketplace.data.LicenseFeed))
self.assertTrue(isinstance(fetched_feed.entry[0], gdata.marketplace.data.LicenseEntry))
entity = fetched_feed.entry[0].content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.enabled, '')
self.assertNotEqual(entity.customer_id, '')
self.assertNotEqual(entity.state, '')
def testGetLicenseNotifications(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testGetLicenseNotifications')
fetched_feed = self.client.GetLicenseNotifications(app_id=self.app_id, max_results=2)
self.assertTrue(isinstance(fetched_feed, gdata.marketplace.data.LicenseFeed))
self.assertEqual(len(fetched_feed.entry), 2)
for entry in fetched_feed.entry:
entity = entry.content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.domain_name, '')
self.assertNotEqual(entity.installer_email, '')
self.assertNotEqual(entity.tos_acceptance_time, '')
self.assertNotEqual(entity.last_change_time, '')
self.assertNotEqual(entity.product_config_id, '')
self.assertNotEqual(entity.state, '')
next_uri = fetched_feed.find_next_link()
fetched_feed_next = self.client.GetLicenseNotifications(uri=next_uri)
self.assertTrue(isinstance(fetched_feed_next, gdata.marketplace.data.LicenseFeed))
self.assertTrue(len(fetched_feed_next.entry) <= 2)
for entry in fetched_feed_next.entry:
entity = entry.content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.domain_name, '')
self.assertNotEqual(entity.installer_email, '')
self.assertNotEqual(entity.tos_acceptance_time, '')
self.assertNotEqual(entity.last_change_time, '')
self.assertNotEqual(entity.product_config_id, '')
self.assertNotEqual(entity.state, '')
def suite():
return conf.build_suite([LicensingClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'e.bidelman (Eric Bidelman)'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.sites.client
import gdata.sites.data
import gdata.test_config as conf
conf.options.register_option(conf.TEST_IMAGE_LOCATION_OPTION)
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
conf.options.register_option(conf.SITES_NAME_OPTION)
class SitesClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.sites.client.SitesClient(
site=conf.options.get_value('sitename'),
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'SitesTest', self.client.auth_service,
True)
def tearDown(self):
conf.close_client(self.client)
def testCreateUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
new_entry = self.client.CreatePage(
'webpage', 'Title Of Page', '<b>Your html content</b>')
self.assertEqual(new_entry.title.text, 'Title Of Page')
self.assertEqual(new_entry.page_name.text, 'title-of-page')
self.assert_(new_entry.GetAlternateLink().href is not None)
self.assertEqual(new_entry.Kind(), 'webpage')
# Change the title of the webpage we just added.
new_entry.title.text = 'Edited'
updated_entry = self.client.update(new_entry)
self.assertEqual(updated_entry.title.text, 'Edited')
self.assertEqual(updated_entry.page_name.text, 'title-of-page')
self.assert_(isinstance(updated_entry, gdata.sites.data.ContentEntry))
# Delete the test webpage from the Site.
self.client.delete(updated_entry)
def testCreateAndUploadToFilecabinet(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateAndUploadToFilecabinet')
filecabinet = self.client.CreatePage(
'filecabinet', 'FilesGoHere', '<b>Your html content</b>',
page_name='diff-pagename-than-title')
self.assertEqual(filecabinet.title.text, 'FilesGoHere')
self.assertEqual(filecabinet.page_name.text, 'diff-pagename-than-title')
self.assert_(filecabinet.GetAlternateLink().href is not None)
self.assertEqual(filecabinet.Kind(), 'filecabinet')
# Upload a file to the filecabinet
filepath = conf.options.get_value('imgpath')
attachment = self.client.UploadAttachment(
filepath, filecabinet, content_type='image/jpeg', title='TestImageFile',
description='description here')
self.assertEqual(attachment.title.text, 'TestImageFile')
self.assertEqual(attachment.FindParentLink(),
filecabinet.GetSelfLink().href)
# Delete the test filecabinet and attachment from the Site.
self.client.delete(attachment)
self.client.delete(filecabinet)
def suite():
return conf.build_suite([SitesClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'e.bidelman (Eric Bidelman)'
import unittest
import atom
from gdata import test_data
import gdata.acl.data
import gdata.data
import gdata.sites.data
import gdata.test_config as conf
def parse(xml_string, target_class):
"""Convenience wrapper for converting an XML string to an XmlElement."""
return atom.core.xml_element_from_string(xml_string, target_class)
class CommentEntryTest(unittest.TestCase):
def setUp(self):
self.entry = parse(test_data.SITES_COMMENT_ENTRY,
gdata.sites.data.ContentEntry)
def testToAndFromStringCommentEntry(self):
self.assertEqual(self.entry.Kind(), 'comment')
self.assert_(isinstance(self.entry.in_reply_to, gdata.sites.data.InReplyTo))
self.assertEqual(self.entry.in_reply_to.type, 'text/html')
self.assertEqual(
self.entry.FindParentLink(),
'http://sites.google.com/feeds/content/site/gdatatestsite/abc123parent')
self.assertEqual(
self.entry.in_reply_to.href,
'http://sites.google.com/site/gdatatestsite/annoucment/testpost')
self.assertEqual(
self.entry.in_reply_to.ref,
'http://sites.google.com/feeds/content/site/gdatatestsite/abc123')
self.assertEqual(
self.entry.in_reply_to.source,
'http://sites.google.com/feeds/content/site/gdatatestsite')
class ListPageEntryTest(unittest.TestCase):
def setUp(self):
self.entry = parse(test_data.SITES_LISTPAGE_ENTRY,
gdata.sites.data.ContentEntry)
def testToAndFromStringWithData(self):
self.assert_(isinstance(self.entry, gdata.sites.data.ContentEntry))
self.assertEqual(self.entry.title.text, 'ListPagesTitle')
self.assertEqual(len(self.entry.author), 1)
self.assertEqual(self.entry.author[0].name.text, 'Test User')
self.assertEqual(self.entry.author[0].email.text, 'test@gmail.com')
self.assertEqual(self.entry.worksheet.name, 'listpage')
self.assertEqual(self.entry.header.row, '1')
self.assertEqual(self.entry.data.startRow, '2')
self.assertEqual(len(self.entry.data.column), 5)
self.assert_(isinstance(self.entry.data.column[0], gdata.sites.data.Column))
self.assertEqual(self.entry.data.column[0].index, 'A')
self.assertEqual(self.entry.data.column[0].name, 'Owner')
self.assert_(isinstance(self.entry.feed_link, gdata.data.FeedLink))
self.assertEqual(
self.entry.feed_link.href,
'http:///sites.google.com/feeds/content/site/gdatatestsite?parent=abc')
self.assert_(isinstance(self.entry.content, gdata.sites.data.Content))
self.assert_(isinstance(self.entry.content.html, atom.core.XmlElement))
self.assertEqual(self.entry.content.type, 'xhtml')
class ListItemEntryTest(unittest.TestCase):
def setUp(self):
self.entry = parse(test_data.SITES_LISTITEM_ENTRY,
gdata.sites.data.ContentEntry)
def testToAndFromStringWithData(self):
self.assert_(isinstance(self.entry, gdata.sites.data.ContentEntry))
self.assertEqual(len(self.entry.field), 5)
self.assert_(isinstance(self.entry.field[0], gdata.sites.data.Field))
self.assertEqual(self.entry.field[0].index, 'A')
self.assertEqual(self.entry.field[0].name, 'Owner')
self.assertEqual(self.entry.field[0].text, 'test value')
self.assertEqual(
self.entry.FindParentLink(),
'http://sites.google.com/feeds/content/site/gdatatestsite/abc123def')
class BaseSiteEntryTest(unittest.TestCase):
def testCreateBaseSiteEntry(self):
entry = gdata.sites.data.BaseSiteEntry()
parent_link = atom.data.Link(
rel=gdata.sites.data.SITES_PARENT_LINK_REL, href='abc')
entry.link.append(parent_link)
entry.category.append(
atom.data.Category(
scheme=gdata.sites.data.SITES_KIND_SCHEME,
term='%s#%s' % (gdata.sites.data.SITES_NAMESPACE, 'webpage'),
label='webpage'))
self.assertEqual(entry.Kind(), 'webpage')
self.assertEqual(entry.category[0].label, 'webpage')
self.assertEqual(
entry.category[0].term,
'%s#%s' % ('http://schemas.google.com/sites/2008', 'webpage'))
self.assertEqual(entry.link[0].href, 'abc')
self.assertEqual(entry.link[0].rel,
'http://schemas.google.com/sites/2008#parent')
entry2 = gdata.sites.data.BaseSiteEntry(kind='webpage')
self.assertEqual(
entry2.category[0].term,
'%s#%s' % ('http://schemas.google.com/sites/2008', 'webpage'))
class ContentFeedTest(unittest.TestCase):
def setUp(self):
self.feed = parse(test_data.SITES_CONTENT_FEED,
gdata.sites.data.ContentFeed)
def testToAndFromStringContentFeed(self):
self.assert_(isinstance(self.feed, gdata.sites.data.ContentFeed))
self.assertEqual(len(self.feed.entry), 8)
self.assert_(isinstance(self.feed.entry[0].revision,
gdata.sites.data.Revision))
self.assertEqual(int(self.feed.entry[0].revision.text), 2)
self.assertEqual(self.feed.entry[0].GetNodeId(), '1712987567114738703')
self.assert_(isinstance(self.feed.entry[0].page_name,
gdata.sites.data.PageName))
self.assertEqual(self.feed.entry[0].page_name.text, 'home')
self.assertEqual(self.feed.entry[0].FindRevisionLink(),
'http:///sites.google.com/feeds/content/site/gdatatestsite/12345')
for entry in self.feed.entry:
self.assert_(isinstance(entry, gdata.sites.data.ContentEntry))
if entry.deleted is not None:
self.assert_(isinstance(entry.deleted, gdata.sites.data.Deleted))
self.assertEqual(entry.IsDeleted(), True)
else:
self.assertEqual(entry.IsDeleted(), False)
def testCreateContentEntry(self):
new_entry = gdata.sites.data.ContentEntry()
new_entry.content = gdata.sites.data.Content()
new_entry.content.html = '<div><p>here is html</p></div>'
self.assert_(isinstance(new_entry, gdata.sites.data.ContentEntry))
self.assert_(isinstance(new_entry.content, gdata.sites.data.Content))
self.assert_(isinstance(new_entry.content.html, atom.core.XmlElement))
new_entry2 = gdata.sites.data.ContentEntry()
new_entry2.content = gdata.sites.data.Content(
html='<div><p>here is html</p></div>')
self.assert_(isinstance(new_entry2, gdata.sites.data.ContentEntry))
self.assert_(isinstance(new_entry2.content, gdata.sites.data.Content))
self.assert_(isinstance(new_entry2.content.html, atom.core.XmlElement))
def testGetHelpers(self):
kinds = {'announcement': self.feed.GetAnnouncements,
'announcementspage': self.feed.GetAnnouncementPages,
'attachment': self.feed.GetAttachments,
'comment': self.feed.GetComments,
'filecabinet': self.feed.GetFileCabinets,
'listitem': self.feed.GetListItems,
'listpage': self.feed.GetListPages,
'webpage': self.feed.GetWebpages}
for k, v in kinds.iteritems():
entries = v()
self.assertEqual(len(entries), 1)
for entry in entries:
self.assertEqual(entry.Kind(), k)
if k == 'attachment':
self.assertEqual(entry.GetAlternateLink().href,
'http://sites.google.com/feeds/SOMELONGURL')
class ActivityFeedTest(unittest.TestCase):
def setUp(self):
self.feed = parse(test_data.SITES_ACTIVITY_FEED,
gdata.sites.data.ActivityFeed)
def testToAndFromStringActivityFeed(self):
self.assert_(isinstance(self.feed, gdata.sites.data.ActivityFeed))
self.assertEqual(len(self.feed.entry), 2)
for entry in self.feed.entry:
self.assert_(isinstance(entry.summary, gdata.sites.data.Summary))
self.assertEqual(entry.summary.type, 'xhtml')
self.assert_(isinstance(entry.summary.html, atom.core.XmlElement))
class RevisionFeedTest(unittest.TestCase):
def setUp(self):
self.feed = parse(test_data.SITES_REVISION_FEED,
gdata.sites.data.RevisionFeed)
def testToAndFromStringRevisionFeed(self):
self.assert_(isinstance(self.feed, gdata.sites.data.RevisionFeed))
self.assertEqual(len(self.feed.entry), 1)
entry = self.feed.entry[0]
self.assert_(isinstance(entry.content, gdata.sites.data.Content))
self.assert_(isinstance(entry.content.html, atom.core.XmlElement))
self.assertEqual(entry.content.type, 'xhtml')
self.assertEqual(
entry.FindParentLink(),
'http://sites.google.com/feeds/content/site/siteName/54395424125706119')
class SiteFeedTest(unittest.TestCase):
def setUp(self):
self.feed = parse(test_data.SITES_SITE_FEED,
gdata.sites.data.SiteFeed)
def testToAndFromStringSiteFeed(self):
self.assert_(isinstance(self.feed, gdata.sites.data.SiteFeed))
self.assertEqual(len(self.feed.entry), 2)
entry = self.feed.entry[0]
self.assert_(isinstance(entry.site_name, gdata.sites.data.SiteName))
self.assertEqual(entry.title.text, 'New Test Site')
self.assertEqual(entry.site_name.text, 'new-test-site')
self.assertEqual(
entry.FindAclLink(),
'http://sites.google.com/feeds/acl/site/example.com/new-test-site')
self.assertEqual(
entry.FindSourceLink(),
'http://sites.google.com/feeds/site/example.com/source-site')
self.assertEqual(entry.theme.text, 'iceberg')
class AclFeedTest(unittest.TestCase):
def setUp(self):
self.feed = parse(test_data.SITES_ACL_FEED,
gdata.sites.data.AclFeed)
def testToAndFromStringAclFeed(self):
self.assert_(isinstance(self.feed, gdata.sites.data.AclFeed))
self.assertEqual(len(self.feed.entry), 1)
entry = self.feed.entry[0]
self.assert_(isinstance(entry, gdata.sites.data.AclEntry))
self.assert_(isinstance(entry.scope, gdata.acl.data.AclScope))
self.assertEqual(entry.scope.type, 'user')
self.assertEqual(entry.scope.value, 'user@example.com')
self.assert_(isinstance(entry.role, gdata.acl.data.AclRole))
self.assertEqual(entry.role.value, 'owner')
self.assertEqual(
entry.GetSelfLink().href,
('https://sites.google.com/feeds/acl/site/example.com/'
'new-test-site/user%3Auser%40example.com'))
class DataClassSanityTest(unittest.TestCase):
def test_basic_element_structure(self):
conf.check_data_classes(self, [
gdata.sites.data.Revision, gdata.sites.data.PageName,
gdata.sites.data.Deleted, gdata.sites.data.Publisher,
gdata.sites.data.Worksheet, gdata.sites.data.Header,
gdata.sites.data.Column, gdata.sites.data.Data,
gdata.sites.data.Field, gdata.sites.data.InReplyTo,
gdata.sites.data.BaseSiteEntry, gdata.sites.data.ContentEntry,
gdata.sites.data.ContentFeed, gdata.sites.data.ActivityEntry,
gdata.sites.data.ActivityFeed, gdata.sites.data.RevisionEntry,
gdata.sites.data.RevisionFeed, gdata.sites.data.Content,
gdata.sites.data.Summary, gdata.sites.data.SiteName,
gdata.sites.data.SiteEntry, gdata.sites.data.SiteFeed,
gdata.sites.data.AclEntry, gdata.sites.data.AclFeed,
gdata.sites.data.Theme])
def suite():
return conf.build_suite([
CommentEntryTest, ListPageEntryTest, ListItemEntryTest, BaseSiteEntryTest,
ContentFeedTest, ActivityFeedTest, RevisionFeedTest, SiteFeedTest,
AclFeedTest, DataClassSanityTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Live client tests for the Provisioning API."""
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Shraddha Gupta <shraddhag@google.com>'
import random
import unittest
import gdata.apps.client
import gdata.apps.data
import gdata.client
import gdata.data
import gdata.gauth
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class AppsClientTest(unittest.TestCase):
def setUp(self):
self.client = gdata.apps.client.AppsClient(
domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.apps.client.AppsClient(
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'AppsClientTest',
self.client.auth_service, True)
def tearDown(self):
conf.close_client(self.client)
def testClientConfiguration(self):
self.assertEqual('apps-apis.google.com', self.client.host)
self.assertEqual('2.0', self.client.api_version)
self.assertEqual('apps', self.client.auth_service)
self.assertEqual(
('https://apps-apis.google.com/a/feeds/user/',
'https://apps-apis.google.com/a/feeds/policies/',
'https://apps-apis.google.com/a/feeds/alias/',
'https://apps-apis.google.com/a/feeds/groups/'),
self.client.auth_scopes)
if conf.options.get_value('runlive') == 'true':
self.assertEqual(self.client.domain,
conf.options.get_value('appsdomain'))
else:
self.assertEqual(self.client.domain, 'example.com')
def testMakeUserProvisioningUri(self):
self.assertEqual('/a/feeds/%s/user/2.0' % self.client.domain,
self.client._userURL())
def testMakeNicknameProvisioningUri(self):
self.assertEqual('/a/feeds/%s/nickname/2.0' % self.client.domain,
self.client._nicknameURL())
def testCreateRetrieveUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
rnd_number = random.randrange(0, 100001)
username = 'test_user%s' % (rnd_number)
nickname = 'test_alias%s' % (rnd_number)
new_entry = self.client.CreateUser(
user_name=username, given_name='Elizabeth', family_name='Smith',
password='password', admin='true')
self.assert_(isinstance(new_entry,
gdata.apps.data.UserEntry))
self.assertEquals(new_entry.name.given_name, 'Elizabeth')
self.assertEquals(new_entry.name.family_name, 'Smith')
self.assertEquals(new_entry.login.user_name, username)
self.assertEquals(new_entry.login.admin, 'true')
fetched_entry = self.client.RetrieveUser(user_name=username)
self.assertEquals(fetched_entry.name.given_name, 'Elizabeth')
self.assertEquals(fetched_entry.name.family_name, 'Smith')
self.assertEquals(fetched_entry.login.user_name, username)
self.assertEquals(fetched_entry.login.admin, 'true')
new_entry.name.given_name = 'Joe'
new_entry.name.family_name = 'Brown'
updated_entry = self.client.UpdateUser(
user_name=username, user_entry=new_entry)
self.assert_(isinstance(updated_entry,
gdata.apps.data.UserEntry))
self.assertEqual(updated_entry.name.given_name, 'Joe')
self.assertEqual(updated_entry.name.family_name, 'Brown')
new_nickname = self.client.CreateNickname(user_name=username,
nickname=nickname)
self.assert_(isinstance(new_nickname,
gdata.apps.data.NicknameEntry))
self.assertEquals(new_nickname.login.user_name, username)
self.assertEquals(new_nickname.nickname.name, nickname)
fetched_alias = self.client.RetrieveNickname(nickname)
self.assertEquals(fetched_alias.login.user_name, username)
self.assertEquals(fetched_alias.nickname.name, nickname)
self.client.DeleteNickname(nickname)
self.client.DeleteUser(username)
def suite():
return conf.build_suite([AppsClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Live client tests for the Multidomain Provisioning API."""
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Claudio Cherubino <ccherubino@google.com>'
import random
import unittest
import gdata.apps.multidomain.client
import gdata.apps.multidomain.data
import gdata.client
import gdata.data
import gdata.gauth
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class MultidomainProvisioningClientTest(unittest.TestCase):
def setUp(self):
self.client = gdata.apps.multidomain.client.MultiDomainProvisioningClient(
domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.apps.multidomain.client.MultiDomainProvisioningClient(
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'MultidomainProvisioningClientTest',
self.client.auth_service, True)
def tearDown(self):
conf.close_client(self.client)
def testClientConfiguration(self):
self.assertEqual('apps-apis.google.com', self.client.host)
self.assertEqual('2.0', self.client.api_version)
self.assertEqual('apps', self.client.auth_service)
self.assertEqual(
('https://apps-apis.google.com/a/feeds/user/',
'https://apps-apis.google.com/a/feeds/policies/',
'https://apps-apis.google.com/a/feeds/alias/',
'https://apps-apis.google.com/a/feeds/groups/'), self.client.auth_scopes)
if conf.options.get_value('runlive') == 'true':
self.assertEqual(self.client.domain, conf.options.get_value('appsdomain'))
else:
self.assertEqual(self.client.domain, 'example.com')
def testMakeMultidomainUserProvisioningUri(self):
self.assertEqual('/a/feeds/user/2.0/%s' % self.client.domain,
self.client.MakeMultidomainUserProvisioningUri())
self.assertEqual('/a/feeds/user/2.0/%s/liz@example.com'
% self.client.domain,
self.client.MakeMultidomainUserProvisioningUri(
email='liz@example.com'))
self.assertEqual('/a/feeds/user/2.0/%s?start=%s'
% (self.client.domain, 'liz%40example.com'),
self.client.MakeMultidomainUserProvisioningUri(
params={'start': 'liz@example.com'}))
def testMakeMultidomainAliasProvisioningUri(self):
self.assertEqual('/a/feeds/alias/2.0/%s' % self.client.domain,
self.client.MakeMultidomainAliasProvisioningUri())
self.assertEqual('/a/feeds/alias/2.0/%s/liz@example.com'
% self.client.domain,
self.client.MakeMultidomainAliasProvisioningUri(
email='liz@example.com'))
self.assertEqual('/a/feeds/alias/2.0/%s?start=%s'
% (self.client.domain, 'liz%40example.com'),
self.client.MakeMultidomainAliasProvisioningUri(
params={'start': 'liz@example.com'}))
def testCreateRetrieveUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
rnd_number = random.randrange(0, 100001)
email = 'test_user%s@%s' % (rnd_number, self.client.domain)
alias = 'test_alias%s@%s' % (rnd_number, self.client.domain)
new_entry = self.client.CreateUser(
email, 'Elizabeth', 'Smith',
'51eea05d46317fadd5cad6787a8f562be90b4446', 'true',
hash_function='SHA-1')
self.assert_(isinstance(new_entry,
gdata.apps.multidomain.data.UserEntry))
self.assertEquals(new_entry.first_name, 'Elizabeth')
self.assertEquals(new_entry.last_name, 'Smith')
self.assertEquals(new_entry.email, email)
self.assertEquals(new_entry.password,
'51eea05d46317fadd5cad6787a8f562be90b4446')
self.assertEquals(new_entry.is_admin, 'true')
fetched_entry = self.client.RetrieveUser(email=email)
self.assertEquals(fetched_entry.first_name, 'Elizabeth')
self.assertEquals(fetched_entry.last_name, 'Smith')
self.assertEquals(fetched_entry.email, email)
self.assertEquals(fetched_entry.is_admin, 'true')
new_entry.first_name = 'Joe'
new_entry.last_name = 'Brown'
updated_entry = self.client.UpdateUser(
email=email, user_entry=new_entry)
self.assert_(isinstance(updated_entry,
gdata.apps.multidomain.data.UserEntry))
self.assertEqual(updated_entry.first_name, 'Joe')
self.assertEqual(updated_entry.last_name, 'Brown')
new_email = 'renamed_user%s@%s' % (rnd_number, self.client.domain)
renamed_entry = self.client.RenameUser(
old_email=email, new_email=new_email)
self.assert_(isinstance(renamed_entry,
gdata.apps.multidomain.data.UserRenameRequest))
self.assertEqual(renamed_entry.new_email, new_email)
new_alias = self.client.CreateAlias(new_email, alias)
self.assert_(isinstance(new_alias,
gdata.apps.multidomain.data.AliasEntry))
self.assertEquals(new_alias.user_email, new_email)
self.assertEquals(new_alias.alias_email, alias)
fetched_alias = self.client.RetrieveAlias(alias)
self.assertEquals(fetched_alias.user_email, new_email)
self.assertEquals(fetched_alias.alias_email, alias)
fetched_aliases = self.client.RetrieveAllUserAliases(new_email)
self.assertEquals(fetched_aliases.entry[0].user_email, new_email)
self.assertEquals(fetched_aliases.entry[0].alias_email, email)
self.assertEquals(fetched_aliases.entry[1].user_email, new_email)
self.assertEquals(fetched_aliases.entry[1].alias_email, alias)
self.client.DeleteAlias(alias)
self.client.DeleteUser(new_email)
def suite():
return conf.build_suite([MultidomainProvisioningClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data model tests for the Multidomain Provisioning API."""
__author__ = 'Claudio Cherubino <ccherubino@google.com>'
import unittest
import atom.core
from gdata import test_data
import gdata.apps.multidomain.data
import gdata.test_config as conf
class UserEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.MULTIDOMAIN_USER_ENTRY,
gdata.apps.multidomain.data.UserEntry)
self.feed = atom.core.parse(test_data.MULTIDOMAIN_USER_FEED,
gdata.apps.multidomain.data.UserFeed)
def testUserEntryFromString(self):
self.assert_(isinstance(self.entry,
gdata.apps.multidomain.data.UserEntry))
self.assertEquals(self.entry.first_name, 'Liz')
self.assertEquals(self.entry.last_name, 'Smith')
self.assertEquals(self.entry.email, 'liz@example.com')
self.assertEquals(self.entry.password,
'51eea05d46317fadd5cad6787a8f562be90b4446')
self.assertEquals(self.entry.is_admin, 'true')
def testUserFeedFromString(self):
self.assertEquals(len(self.feed.entry), 2)
self.assert_(isinstance(self.feed,
gdata.apps.multidomain.data.UserFeed))
self.assert_(isinstance(self.feed.entry[0],
gdata.apps.multidomain.data.UserEntry))
self.assert_(isinstance(self.feed.entry[1],
gdata.apps.multidomain.data.UserEntry))
self.assertEquals(
self.feed.entry[0].find_edit_link(),
('https://apps-apis.google.com/a/feeds/user/2.0/example.com/'
'admin%40example.com'))
self.assertEquals(self.feed.entry[0].first_name, 'Joe')
self.assertEquals(self.feed.entry[0].last_name, 'Brown')
self.assertEquals(self.feed.entry[0].email, 'admin@example.com')
self.assertEquals(self.feed.entry[0].is_admin, 'true')
self.assertEquals(self.feed.entry[0].suspended, 'false')
self.assertEquals(self.feed.entry[0].change_password_at_next_login, 'false')
self.assertEquals(self.feed.entry[0].ip_whitelisted, 'false')
self.assertEquals(
self.feed.entry[1].find_edit_link(),
('https://apps-apis.google.com/a/feeds/user/2.0/example.com/'
'liz%40example.com'))
self.assertEquals(self.feed.entry[1].first_name, 'Elizabeth')
self.assertEquals(self.feed.entry[1].last_name, 'Smith')
self.assertEquals(self.feed.entry[1].email, 'liz@example.com')
self.assertEquals(self.feed.entry[1].is_admin, 'true')
self.assertEquals(self.feed.entry[1].suspended, 'false')
self.assertEquals(self.feed.entry[1].change_password_at_next_login, 'false')
self.assertEquals(self.feed.entry[1].ip_whitelisted, 'false')
class UserRenameRequestTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.MULTIDOMAIN_USER_RENAME_REQUEST,
gdata.apps.multidomain.data.UserRenameRequest)
def testUserRenameRequestFromString(self):
self.assert_(isinstance(self.entry,
gdata.apps.multidomain.data.UserRenameRequest))
self.assertEquals(self.entry.new_email, 'liz@newexample4liz.com')
class AliasEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.MULTIDOMAIN_ALIAS_ENTRY,
gdata.apps.multidomain.data.AliasEntry)
self.feed = atom.core.parse(test_data.MULTIDOMAIN_ALIAS_FEED,
gdata.apps.multidomain.data.AliasFeed)
def testAliasEntryFromString(self):
self.assert_(isinstance(self.entry,
gdata.apps.multidomain.data.AliasEntry))
self.assertEquals(self.entry.user_email, 'liz@example.com')
self.assertEquals(self.entry.alias_email, 'helpdesk@gethelp_example.com')
def testAliasFeedFromString(self):
self.assertEquals(len(self.feed.entry), 2)
self.assert_(isinstance(self.feed,
gdata.apps.multidomain.data.AliasFeed))
self.assert_(isinstance(self.feed.entry[0],
gdata.apps.multidomain.data.AliasEntry))
self.assert_(isinstance(self.feed.entry[1],
gdata.apps.multidomain.data.AliasEntry))
self.assertEquals(
self.feed.entry[0].find_edit_link(),
('https://apps-apis.google.com/a/feeds/alias/2.0/gethelp_example.com/'
'helpdesk%40gethelp_example.com'))
self.assertEquals(self.feed.entry[0].user_email, 'liz@example.com')
self.assertEquals(self.feed.entry[0].alias_email,
'helpdesk@gethelp_example.com')
self.assertEquals(
self.feed.entry[1].find_edit_link(),
('https://apps-apis.google.com/a/feeds/alias/2.0/gethelp_example.com/'
'support%40gethelp_example.com'))
self.assertEquals(self.feed.entry[1].user_email, 'joe@example.com')
self.assertEquals(self.feed.entry[1].alias_email,
'support@gethelp_example.com')
def suite():
return conf.build_suite([UserEntryTest, AliasEntryTest])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2007 SIOS Technology, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'tmatsuo@sios.com (Takashi Matsuo)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata.apps
import gdata.apps.service
import getpass
import time
apps_domain = ''
apps_username = ''
apps_password = ''
class AppsServiceUnitTest01(unittest.TestCase):
def setUp(self):
self.postfix = time.strftime("%Y%m%d%H%M%S")
email = apps_username + '@' + apps_domain
self.apps_client = gdata.apps.service.AppsService(
email=email, domain=apps_domain, password=apps_password,
source='AppsClient "Unit" Tests')
self.apps_client.ProgrammaticLogin()
self.created_user = None
def tearDown(self):
if self.created_user is not None:
try:
self.apps_client.DeleteUser(self.created_user.login.user_name)
except Exception, e:
pass
def test001RetrieveUser(self):
"""Tests RetrieveUser method"""
try:
self_user_entry = self.apps_client.RetrieveUser(apps_username)
except:
self.fail('Unexpected exception occurred')
self.assert_(isinstance(self_user_entry, gdata.apps.UserEntry),
"The return value of RetrieveUser() must be an instance of " +
"apps.UserEntry: %s" % self_user_entry)
self.assertEquals(self_user_entry.login.user_name, apps_username)
def test002RetrieveUserRaisesException(self):
"""Tests if RetrieveUser() raises AppsForYourDomainException with
appropriate error code"""
try:
non_existance = self.apps_client.RetrieveUser('nobody-' + self.postfix)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
def testSuspendAndRestoreUser(self):
# Create a test user
user_name = 'an-apps-service-test-account-' + self.postfix
family_name = 'Tester'
given_name = 'Apps'
password = '123$$abc'
suspended = 'false'
created_user = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
# Suspend then restore the new user.
entry = self.apps_client.SuspendUser(created_user.login.user_name)
self.assertEquals(entry.login.suspended, 'true')
entry = self.apps_client.RestoreUser(created_user.login.user_name)
self.assertEquals(entry.login.suspended, 'false')
# Clean up, delete the test user.
self.apps_client.DeleteUser(user_name)
def test003MethodsForUser(self):
"""Tests methods for user"""
user_name = 'TakashiMatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Takashi'
password = '123$$abc'
suspended = 'false'
try:
created_user = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
except Exception, e:
self.assert_(False, 'Unexpected exception occurred: %s' % e)
self.created_user = created_user
self.assertEquals(created_user.login.user_name, user_name)
self.assertEquals(created_user.login.suspended, suspended)
self.assertEquals(created_user.name.family_name, family_name)
self.assertEquals(created_user.name.given_name, given_name)
# self.assertEquals(created_user.quota.limit,
# gdata.apps.service.DEFAULT_QUOTA_LIMIT)
"""Tests RetrieveAllUsers method"""
try:
user_feed = self.apps_client.RetrieveAllUsers()
except Exception, e:
self.assert_(False, 'Unexpected exception occurred: %s' % e)
succeed = False
for a_entry in user_feed.entry:
if a_entry.login.user_name == user_name:
succeed = True
self.assert_(succeed, 'There must be a user: %s' % user_name)
"""Tests UpdateUser method"""
new_family_name = 'NewFamilyName'
new_given_name = 'NewGivenName'
new_quota = '4096'
created_user.name.family_name = new_family_name
created_user.name.given_name = new_given_name
created_user.quota.limit = new_quota
created_user.login.suspended = 'true'
try:
new_user_entry = self.apps_client.UpdateUser(user_name, created_user)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(new_user_entry, gdata.apps.UserEntry),
"new user entry must be an instance of gdata.apps.UserEntry: %s"
% new_user_entry)
self.assertEquals(new_user_entry.name.family_name, new_family_name)
self.assertEquals(new_user_entry.name.given_name, new_given_name)
self.assertEquals(new_user_entry.login.suspended, 'true')
# quota limit update does not always success.
# self.assertEquals(new_user_entry.quota.limit, new_quota)
nobody = gdata.apps.UserEntry()
nobody.login = gdata.apps.Login(user_name='nobody-' + self.postfix)
nobody.name = gdata.apps.Name(family_name='nobody', given_name='nobody')
# make sure that there is no account with nobody- + self.postfix
try:
tmp_entry = self.apps_client.RetrieveUser('nobody-' + self.postfix)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
# make sure that UpdateUser fails with AppsForYourDomainException.
try:
new_user_entry = self.apps_client.UpdateUser('nobody-' + self.postfix,
nobody)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
"""Tests DeleteUser method"""
try:
self.apps_client.DeleteUser(user_name)
except Exception, e:
self.assert_(False, 'Unexpected exception occurred: %s' % e)
# make sure that the account deleted
try:
self.apps_client.RetrieveUser(user_name)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
self.created_user = None
# make sure that DeleteUser fails with AppsForYourDomainException.
try:
self.apps_client.DeleteUser(user_name)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
def test004MethodsForNickname(self):
"""Tests methods for nickname"""
# first create a user account
user_name = 'EmmyMatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Emmy'
password = '123$$abc'
suspended = 'false'
try:
created_user = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_user = created_user
# tests CreateNickname method
nickname = 'emmy-' + self.postfix
try:
created_nickname = self.apps_client.CreateNickname(user_name, nickname)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(created_nickname, gdata.apps.NicknameEntry),
"Return value of CreateNickname method must be an instance of " +
"gdata.apps.NicknameEntry: %s" % created_nickname)
self.assertEquals(created_nickname.login.user_name, user_name)
self.assertEquals(created_nickname.nickname.name, nickname)
# tests RetrieveNickname method
retrieved_nickname = self.apps_client.RetrieveNickname(nickname)
self.assert_(isinstance(retrieved_nickname, gdata.apps.NicknameEntry),
"Return value of RetrieveNickname method must be an instance of " +
"gdata.apps.NicknameEntry: %s" % retrieved_nickname)
self.assertEquals(retrieved_nickname.login.user_name, user_name)
self.assertEquals(retrieved_nickname.nickname.name, nickname)
# tests RetrieveNicknames method
nickname_feed = self.apps_client.RetrieveNicknames(user_name)
self.assert_(isinstance(nickname_feed, gdata.apps.NicknameFeed),
"Return value of RetrieveNicknames method must be an instance of " +
"gdata.apps.NicknameFeed: %s" % nickname_feed)
self.assertEquals(nickname_feed.entry[0].login.user_name, user_name)
self.assertEquals(nickname_feed.entry[0].nickname.name, nickname)
# tests RetrieveAllNicknames method
nickname_feed = self.apps_client.RetrieveAllNicknames()
self.assert_(isinstance(nickname_feed, gdata.apps.NicknameFeed),
"Return value of RetrieveAllNicknames method must be an instance of " +
"gdata.apps.NicknameFeed: %s" % nickname_feed)
succeed = False
for a_entry in nickname_feed.entry:
if a_entry.login.user_name == user_name and \
a_entry.nickname.name == nickname:
succeed = True
self.assert_(succeed,
"There must be a nickname entry named %s." % nickname)
# tests DeleteNickname method
self.apps_client.DeleteNickname(nickname)
try:
non_existence = self.apps_client.RetrieveNickname(nickname)
except gdata.apps.service.AppsForYourDomainException, e:
self.assertEquals(e.error_code, gdata.apps.service.ENTITY_DOES_NOT_EXIST)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
else:
self.fail('No exception occurred')
class AppsServiceUnitTest02(unittest.TestCase):
def setUp(self):
self.postfix = time.strftime("%Y%m%d%H%M%S")
email = apps_username + '@' + apps_domain
self.apps_client = gdata.apps.service.AppsService(
email=email, domain=apps_domain, password=apps_password,
source='AppsClient "Unit" Tests')
self.apps_client.ProgrammaticLogin()
self.created_users = []
self.created_email_lists = []
def tearDown(self):
for user in self.created_users:
try:
self.apps_client.DeleteUser(user.login.user_name)
except Exception, e:
print e
for email_list in self.created_email_lists:
try:
self.apps_client.DeleteEmailList(email_list.email_list.name)
except Exception, e:
print e
def test001MethodsForEmaillist(self):
"""Tests methods for emaillist """
user_name = 'YujiMatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Yuji'
password = '123$$abc'
suspended = 'false'
try:
user_yuji = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(user_yuji)
user_name = 'TaroMatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Taro'
password = '123$$abc'
suspended = 'false'
try:
user_taro = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(user_taro)
# tests CreateEmailList method
list_name = 'list01-' + self.postfix
try:
created_email_list = self.apps_client.CreateEmailList(list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(created_email_list, gdata.apps.EmailListEntry),
"Return value of CreateEmailList method must be an instance of " +
"EmailListEntry: %s" % created_email_list)
self.assertEquals(created_email_list.email_list.name, list_name)
self.created_email_lists.append(created_email_list)
# tests AddRecipientToEmailList method
try:
recipient = self.apps_client.AddRecipientToEmailList(
user_yuji.login.user_name + '@' + apps_domain,
list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(recipient, gdata.apps.EmailListRecipientEntry),
"Return value of AddRecipientToEmailList method must be an instance " +
"of EmailListRecipientEntry: %s" % recipient)
self.assertEquals(recipient.who.email,
user_yuji.login.user_name + '@' + apps_domain)
try:
recipient = self.apps_client.AddRecipientToEmailList(
user_taro.login.user_name + '@' + apps_domain,
list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
# tests RetrieveAllRecipients method
try:
recipient_feed = self.apps_client.RetrieveAllRecipients(list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(recipient_feed, gdata.apps.EmailListRecipientFeed),
"Return value of RetrieveAllRecipients method must be an instance " +
"of EmailListRecipientFeed: %s" % recipient_feed)
self.assertEquals(len(recipient_feed.entry), 2)
# tests RemoveRecipientFromEmailList method
try:
self.apps_client.RemoveRecipientFromEmailList(
user_taro.login.user_name + '@' + apps_domain, list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
# make sure that removal succeeded.
try:
recipient_feed = self.apps_client.RetrieveAllRecipients(list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(recipient_feed, gdata.apps.EmailListRecipientFeed),
"Return value of RetrieveAllRecipients method must be an instance " +
"of EmailListRecipientFeed: %s" % recipient_feed)
self.assertEquals(len(recipient_feed.entry), 1)
# tests RetrieveAllEmailLists
try:
list_feed = self.apps_client.RetrieveAllEmailLists()
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(list_feed, gdata.apps.EmailListFeed),
"Return value of RetrieveAllEmailLists method must be an instance" +
"of EmailListFeed: %s" % list_feed)
succeed = False
for email_list in list_feed.entry:
if email_list.email_list.name == list_name:
succeed = True
self.assert_(succeed, "There must be an email list named %s" % list_name)
# tests RetrieveEmailLists method.
try:
list_feed = self.apps_client.RetrieveEmailLists(
user_yuji.login.user_name + '@' + apps_domain)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assert_(isinstance(list_feed, gdata.apps.EmailListFeed),
"Return value of RetrieveEmailLists method must be an instance" +
"of EmailListFeed: %s" % list_feed)
succeed = False
for email_list in list_feed.entry:
if email_list.email_list.name == list_name:
succeed = True
self.assert_(succeed, "There must be an email list named %s" % list_name)
def testRetrieveEmailList(self):
new_list = self.apps_client.CreateEmailList('my_testing_email_list')
retrieved_list = self.apps_client.RetrieveEmailList('my_testing_email_list')
self.assertEquals(new_list.title.text, retrieved_list.title.text)
self.assertEquals(new_list.id.text, retrieved_list.id.text)
self.assertEquals(new_list.email_list.name, retrieved_list.email_list.name)
self.apps_client.DeleteEmailList('my_testing_email_list')
# Should not be able to retrieve the deleted list.
try:
removed_list = self.apps_client.RetrieveEmailList('my_testing_email_list')
self.fail()
except gdata.apps.service.AppsForYourDomainException:
pass
class AppsServiceUnitTest03(unittest.TestCase):
def setUp(self):
self.postfix = time.strftime("%Y%m%d%H%M%S")
email = apps_username + '@' + apps_domain
self.apps_client = gdata.apps.service.AppsService(
email=email, domain=apps_domain, password=apps_password,
source='AppsClient "Unit" Tests')
self.apps_client.ProgrammaticLogin()
self.created_users = []
self.created_email_lists = []
def tearDown(self):
for user in self.created_users:
try:
self.apps_client.DeleteUser(user.login.user_name)
except Exception, e:
print e
for email_list in self.created_email_lists:
try:
self.apps_client.DeleteEmailList(email_list.email_list.name)
except Exception, e:
print e
def test001Pagenation(self):
"""Tests for pagination. It takes toooo long."""
list_feed = self.apps_client.RetrieveAllEmailLists()
quantity = len(list_feed.entry)
list_nums = 101
for i in range(list_nums):
list_name = 'list%03d-' % i + self.postfix
try:
created_email_list = self.apps_client.CreateEmailList(list_name)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_email_lists.append(created_email_list)
list_feed = self.apps_client.RetrieveAllEmailLists()
self.assertEquals(len(list_feed.entry), list_nums + quantity)
if __name__ == '__main__':
print ('Google Apps Service Tests\nNOTE: Please run these tests only with '
'a test domain. The tests may delete or update your domain\'s '
'account data.')
apps_domain = raw_input('Please enter your domain: ')
apps_username = raw_input('Please enter your username of admin account: ')
apps_password = getpass.getpass()
unittest.main()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Live client tests for the Groups Provisioning API."""
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Shraddha gupta <shraddhag@google.com>'
import random
import unittest
import gdata.apps.groups.client
import gdata.apps.groups.data
import gdata.client
import gdata.data
import gdata.gauth
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class GroupsProvisioningClientTest(unittest.TestCase):
def setUp(self):
self.client = gdata.apps.groups.client.GroupsProvisioningClient(
domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.apps.groups.client.GroupsProvisioningClient(
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'GroupsProvisioningClientTest',
self.client.auth_service, True)
def tearDown(self):
conf.close_client(self.client)
def testClientConfiguration(self):
self.assertEqual('apps-apis.google.com', self.client.host)
self.assertEqual('2.0', self.client.api_version)
self.assertEqual('apps', self.client.auth_service)
self.assertEqual(
('https://apps-apis.google.com/a/feeds/user/',
'https://apps-apis.google.com/a/feeds/policies/',
'https://apps-apis.google.com/a/feeds/alias/',
'https://apps-apis.google.com/a/feeds/groups/'),
self.client.auth_scopes)
if conf.options.get_value('runlive') == 'true':
self.assertEqual(self.client.domain, conf.options.get_value('appsdomain'))
else:
self.assertEqual(self.client.domain, 'example.com')
def testMakeGroupProvisioningUri(self):
self.assertEqual('/a/feeds/group/2.0/%s' % self.client.domain,
self.client.MakeGroupProvisioningUri())
self.assertEqual('/a/feeds/group/2.0/%s/firstgroup@example.com'
% self.client.domain,
self.client.MakeGroupProvisioningUri(group_id='firstgroup@example.com'))
self.assertEqual(
'/a/feeds/group/2.0/%s?member=member1' % self.client.domain,
self.client.MakeGroupProvisioningUri(params={'member':'member1'}))
def testMakeGroupMembersUri(self):
self.assertEqual('/a/feeds/group/2.0/%s/firstgroup@example.com/member'
% self.client.domain,
self.client.MakeGroupMembersUri(group_id='firstgroup@example.com'))
self.assertEqual(
'/a/feeds/group/2.0/%s/firstgroup@example.com/member/liz@example.com'
% self.client.domain,
self.client.MakeGroupMembersUri(
group_id='firstgroup@example.com', member_id='liz@example.com'))
def testCreateRetrieveUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
rnd_number = random.randrange(0, 100)
group_id = 'test_groupid%s@%s' % (rnd_number, self.client.domain)
group_name = 'test_groupname%s' % (rnd_number)
member_id = 'test_member%s@%s' % (rnd_number, self.client.domain)
new_group = self.client.CreateGroup(
group_id=group_id, group_name=group_name, description='Test Group',
email_permission='Domain')
self.assert_(isinstance(new_group,
gdata.apps.groups.data.GroupEntry))
self.assertEquals(new_group.group_id, group_id)
self.assertEquals(new_group.group_name, group_name)
self.assertEquals(new_group.description, 'Test Group')
self.assertEquals(new_group.email_permission, 'Domain')
fetched_entry = self.client.RetrieveGroup(group_id=group_id)
self.assert_(isinstance(fetched_entry,
gdata.apps.groups.data.GroupEntry))
self.assertEquals(new_group.group_id, group_id)
self.assertEquals(new_group.group_name, group_name)
self.assertEquals(new_group.description, 'Test Group')
self.assertEquals(new_group.email_permission, 'Domain')
new_group.group_name = 'updated name'
updated_group = self.client.UpdateGroup(
group_id=group_id, group_entry=new_group)
self.assert_(isinstance(updated_group,
gdata.apps.groups.data.GroupEntry))
self.assertEqual(updated_group.group_name, 'updated name')
new_member = self.client.AddMemberToGroup(group_id=group_id,
member_id=member_id)
self.assert_(isinstance(new_member,
gdata.apps.groups.data.GroupMemberEntry))
self.assertEquals(new_member.member_id, member_id)
fetched_member = self.client.RetrieveGroupMember(group_id=group_id,
member_id=member_id)
self.assertEquals(fetched_member.member_id, member_id)
self.client.RemoveMemberFromGroup(group_id=group_id,
member_id=member_id)
self.client.DeleteGroup(group_id=group_id)
def suite():
return conf.build_suite([GroupsProvisioningClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| Python |
#!/usr/bin/python
#
# Copyright (C) 2008 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for Groups service."""
__author__ = 'google-apps-apis@googlegroups.com'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata.apps
import gdata.apps.service
import gdata.apps.groups.service
import getpass
import time
domain = ''
admin_email = ''
admin_password = ''
username = ''
class GroupsTest(unittest.TestCase):
"""Test for the GroupsService."""
def setUp(self):
self.postfix = time.strftime("%Y%m%d%H%M%S")
self.apps_client = gdata.apps.service.AppsService(
email=admin_email, domain=domain, password=admin_password,
source='GroupsClient "Unit" Tests')
self.apps_client.ProgrammaticLogin()
self.groups_client = gdata.apps.groups.service.GroupsService(
email=admin_email, domain=domain, password=admin_password,
source='GroupsClient "Unit" Tests')
self.groups_client.ProgrammaticLogin()
self.created_users = []
self.created_groups = []
self.createUsers();
def createUsers(self):
user_name = 'yujimatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Yuji'
password = '123$$abc'
suspended = 'false'
try:
self.user_yuji = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
print 'User ' + user_name + ' created'
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(self.user_yuji)
user_name = 'taromatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Taro'
password = '123$$abc'
suspended = 'false'
try:
self.user_taro = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
print 'User ' + user_name + ' created'
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(self.user_taro)
def tearDown(self):
print '\n'
for user in self.created_users:
try:
self.apps_client.DeleteUser(user.login.user_name)
print 'User ' + user.login.user_name + ' deleted'
except Exception, e:
print e
for group in self.created_groups:
try:
self.groups_client.DeleteGroup(group)
print 'Group ' + group + ' deleted'
except Exception, e:
print e
def test001GroupsMethods(self):
# tests CreateGroup method
group01_id = 'group01-' + self.postfix
group02_id = 'group02-' + self.postfix
try:
created_group01 = self.groups_client.CreateGroup(group01_id, 'US Sales 1',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
created_group02 = self.groups_client.CreateGroup(group02_id, 'US Sales 2',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(created_group01['groupId'], group01_id)
self.assertEquals(created_group02['groupId'], group02_id)
self.created_groups.append(group01_id)
self.created_groups.append(group02_id)
# tests UpdateGroup method
try:
updated_group = self.groups_client.UpdateGroup(group01_id, 'Updated!',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(updated_group['groupName'], 'Updated!')
# tests RetrieveGroup method
try:
retrieved_group = self.groups_client.RetrieveGroup(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_group['groupId'], group01_id + '@' + domain)
# tests RetrieveAllGroups method
try:
retrieved_groups = self.groups_client.RetrieveAllGroups()
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_groups),
len(self.apps_client.RetrieveAllEmailLists().entry))
# tests AddMemberToGroup
try:
added_member = self.groups_client.AddMemberToGroup(
self.user_yuji.login.user_name, group01_id)
self.groups_client.AddMemberToGroup(
self.user_taro.login.user_name, group02_id)
self.groups_client.AddMemberToGroup(
group01_id, group02_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(added_member['memberId'],
self.user_yuji.login.user_name)
# tests RetrieveGroups method
try:
retrieved_direct_groups = self.groups_client.RetrieveGroups(
self.user_yuji.login.user_name, True)
retrieved_groups = self.groups_client.RetrieveGroups(
self.user_yuji.login.user_name, False)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_direct_groups), 1)
# TODO: Enable this test after a directOnly bug is fixed
#self.assertEquals(len(retrieved_groups), 2)
# tests IsMember method
try:
result = self.groups_client.IsMember(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(result, True)
# tests RetrieveMember method
try:
retrieved_member = self.groups_client.RetrieveMember(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_member['memberId'],
self.user_yuji.login.user_name + '@' + domain)
# tests RetrieveAllMembers method
try:
retrieved_members = self.groups_client.RetrieveAllMembers(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_members), 1)
# tests RemoveMemberFromGroup method
try:
self.groups_client.RemoveMemberFromGroup(self.user_yuji.login.user_name,
group01_id)
retrieved_members = self.groups_client.RetrieveAllMembers(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_members), 0)
# tests AddOwnerToGroup
try:
added_owner = self.groups_client.AddOwnerToGroup(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(added_owner['email'],
self.user_yuji.login.user_name)
# tests IsOwner method
try:
result = self.groups_client.IsOwner(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(result, True)
# tests RetrieveOwner method
try:
retrieved_owner = self.groups_client.RetrieveOwner(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_owner['email'],
self.user_yuji.login.user_name + '@' + domain)
# tests RetrieveAllOwners method
try:
retrieved_owners = self.groups_client.RetrieveAllOwners(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_owners), 1)
# tests RemoveOwnerFromGroup method
try:
self.groups_client.RemoveOwnerFromGroup(self.user_yuji.login.user_name,
group01_id)
retrieved_owners = self.groups_client.RetrieveAllOwners(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_owners), 0)
if __name__ == '__main__':
print("""Google Apps Groups Service Tests
NOTE: Please run these tests only with a test user account.
""")
domain = raw_input('Google Apps domain: ')
admin_email = '%s@%s' % (raw_input('Administrator username: '), domain)
admin_password = getpass.getpass('Administrator password: ')
unittest.main()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data model tests for the Groups Provisioning API."""
__author__ = 'Shraddha gupta <shraddhag@google.com>'
import unittest
import atom.core
from gdata import test_data
import gdata.apps.groups.data
import gdata.test_config as conf
class GroupEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.GROUP_ENTRY,
gdata.apps.groups.data.GroupEntry, 2)
self.feed = atom.core.parse(test_data.GROUP_FEED,
gdata.apps.groups.data.GroupFeed, 2)
def testGroupEntryFromString(self):
self.assert_(isinstance(self.entry,
gdata.apps.groups.data.GroupEntry))
self.assertEquals(self.entry.group_id, 'trial@srkapps.com')
self.assertEquals(self.entry.group_name, 'Trial')
self.assertEquals(self.entry.email_permission, 'Domain')
self.assertEquals(self.entry.description, 'For try')
def testGroupFeedFromString(self):
self.assertEquals(len(self.feed.entry), 2)
self.assert_(isinstance(self.feed,
gdata.apps.groups.data.GroupFeed))
self.assert_(isinstance(self.feed.entry[0],
gdata.apps.groups.data.GroupEntry))
self.assert_(isinstance(self.feed.entry[1],
gdata.apps.groups.data.GroupEntry))
self.assertEquals(
self.feed.entry[0].find_edit_link(),
('http://apps-apis.google.com/a/feeds/group/2.0/srkapps.com/'
'firstgroup%40srkapps.com'))
self.assertEquals(self.feed.entry[0].group_id, 'firstgroup@srkapps.com')
self.assertEquals(self.feed.entry[0].group_name, 'FirstGroup')
self.assertEquals(self.feed.entry[0].email_permission, 'Domain')
self.assertEquals(self.feed.entry[0].description, 'First group')
self.assertEquals(
self.feed.entry[1].find_edit_link(),
('http://apps-apis.google.com/a/feeds/group/2.0/srkapps.com/'
'trial%40srkapps.com'))
self.assertEquals(self.feed.entry[1].group_id, 'trial@srkapps.com')
self.assertEquals(self.feed.entry[1].group_name, 'Trial')
self.assertEquals(self.feed.entry[1].email_permission, 'Domain')
self.assertEquals(self.feed.entry[1].description, 'For try')
class GroupMemberEntryTest(unittest.TestCase):
def setUp(self):
self.entry = atom.core.parse(test_data.GROUP_MEMBER_ENTRY,
gdata.apps.groups.data.GroupMemberEntry)
self.feed = atom.core.parse(test_data.GROUP_MEMBER_FEED,
gdata.apps.groups.data.GroupMemberFeed)
def testGroupMemberEntryFromString(self):
self.assert_(isinstance(self.entry,
gdata.apps.groups.data.GroupMemberEntry))
self.assertEquals(self.entry.member_id, 'abcd12310@srkapps.com')
self.assertEquals(self.entry.member_type, 'User')
self.assertEquals(self.entry.direct_member, 'true')
def testGroupMemberFeedFromString(self):
self.assertEquals(len(self.feed.entry), 2)
self.assert_(isinstance(self.feed,
gdata.apps.groups.data.GroupMemberFeed))
self.assert_(isinstance(self.feed.entry[0],
gdata.apps.groups.data.GroupMemberEntry))
self.assert_(isinstance(self.feed.entry[1],
gdata.apps.groups.data.GroupMemberEntry))
self.assertEquals(
self.feed.entry[0].find_edit_link(),
('http://apps-apis.google.com/a/feeds/group/2.0/srkapps.com/trial/'
'member/abcd12310%40srkapps.com'))
self.assertEquals(self.feed.entry[0].member_id, 'abcd12310@srkapps.com')
self.assertEquals(self.feed.entry[0].member_type, 'User')
self.assertEquals(self.feed.entry[0].direct_member, 'true')
self.assertEquals(
self.feed.entry[1].find_edit_link(),
('http://apps-apis.google.com/a/feeds/group/2.0/srkapps.com/trial/'
'member/neha.technocrat%40srkapps.com'))
self.assertEquals(self.feed.entry[1].member_id,
'neha.technocrat@srkapps.com')
self.assertEquals(self.feed.entry[1].member_type, 'User')
self.assertEquals(self.feed.entry[1].direct_member, 'true')
def suite():
return conf.build_suite([GroupEntryTest, GroupMemberEntryTest])
if __name__ == '__main__':
unittest.main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.