code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
#!/usr/bin/env python
import os,sys, logging, socket
import subprocess
import string
from exceptions import Exception
from threading import Thread
from optparse import OptionParser
from subprocess import Popen, PIPE, STDOUT
import re
import glob
sys.stderr=sys.stdout
arff_head_s=('''@RELATION <netmate>
@ATTRIBUTE srcip STRING
@ATTRIBUTE srcport NUMERIC
@ATTRIBUTE dstip STRING
@ATTRIBUTE dstport NUMERIC
@ATTRIBUTE proto NUMERIC
@ATTRIBUTE total_fpackets NUMERIC
@ATTRIBUTE total_fvolume NUMERIC
@ATTRIBUTE total_bpackets NUMERIC
@ATTRIBUTE total_bvolume NUMERIC
@ATTRIBUTE min_fpktl NUMERIC
@ATTRIBUTE mean_fpktl NUMERIC
@ATTRIBUTE max_fpktl NUMERIC
@ATTRIBUTE std_fpktl NUMERIC
@ATTRIBUTE min_bpktl NUMERIC
@ATTRIBUTE mean_bpktl NUMERIC
@ATTRIBUTE max_bpktl NUMERIC
@ATTRIBUTE std_bpktl NUMERIC
@ATTRIBUTE min_fiat NUMERIC
@ATTRIBUTE mean_fiat NUMERIC
@ATTRIBUTE max_fiat NUMERIC
@ATTRIBUTE std_fiat NUMERIC
@ATTRIBUTE min_biat NUMERIC
@ATTRIBUTE mean_biat NUMERIC
@ATTRIBUTE max_biat NUMERIC
@ATTRIBUTE std_biat NUMERIC
@ATTRIBUTE duration NUMERIC
@ATTRIBUTE min_active NUMERIC
@ATTRIBUTE mean_active NUMERIC
@ATTRIBUTE max_active NUMERIC
@ATTRIBUTE std_active NUMERIC
@ATTRIBUTE min_idle NUMERIC
@ATTRIBUTE mean_idle NUMERIC
@ATTRIBUTE max_idle NUMERIC
@ATTRIBUTE std_idle NUMERIC
@ATTRIBUTE sflow_fpackets NUMERIC
@ATTRIBUTE sflow_fbytes NUMERIC
@ATTRIBUTE sflow_bpackets NUMERIC
@ATTRIBUTE sflow_bbytes NUMERIC
@ATTRIBUTE fpsh_cnt NUMERIC
@ATTRIBUTE bpsh_cnt NUMERIC
@ATTRIBUTE furg_cnt NUMERIC
@ATTRIBUTE burg_cnt NUMERIC
@ATTRIBUTE opendpi_class {''' , '''}
% you need to add a nominal class attribute!
% @ATTRIBUTE class {class0,class1}
@DATA
''')
if __name__ == '__main__':
usage = "usage: %prog [options] arg1 arg2"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="make lots of noise")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose",
help="be very quiet")
parser.add_option("-f", "--from_path",dest="from_path",
metavar="INPUT_PATH", help="read from INPUT_PATH")
parser.add_option("-o", "--output",dest="output_arff_file_name",
metavar="OUTPUT_FILES", help="write output OUTPUT_FILES")
(options, args) = parser.parse_args()
output_real_arff_file_name=""
items = set()
if os.path.isdir(options.output_arff_file_name):
output_real_arff_file_name= os.path.join(output_arff_file_name,'default.arff' )
elif options.output_arff_file_name:
output_real_arff_file_name=options.output_arff_file_name
else:
output_real_arff_file_name="./default.arff"
if options.from_path:
if os.path.isdir(options.from_path):
for f in glob.glob(os.path.join(options.from_path, '*.merged.arff')):
if os.path.isfile(f):
items.add(os.path.abspath(f))
elif '*' in options.from_path:
for n in glob.glob(options.from_path):
items.add(os.path.abspath(f))
else:
print "not set input file/path"
#exit()
for arg in args:
# if os.path.isdir(arg):
# for f in glob.glob(os.path.join(arg, '*.merged.arff')):
# if os.path.isfile(f):
# items.add(os.path.abspath(f))
#print arg
if '*' in arg:
for n in glob.glob(arg):
items.add(os.path.abspath(n))
elif os.path.isfile(arg):
items.add(os.path.abspath(arg))
else:
pass
#add arff header into output_real_arff_file_name
if os.path.isfile(output_real_arff_file_name):
os.remove(output_real_arff_file_name)
output_file = open(output_real_arff_file_name,'a')
#output_file.write(arff_head_s[0])
#output_file.write(arff_head_s[1])
#from collections import deque
applist=[]
#writelines_header=[]
writelines_data=[]
for input_arff_filename in items:
foundData = False
p = open(input_arff_filename,'r')
for line in p.readlines():
prog=re.compile("^@DATA")
m = prog.match(line)
if m:
foundData = True
continue
if ( foundData==True and ( not line.isspace() ) and (not re.match('^@',line)) and (not re.match('^%',line)) ):
appname = input_arff_filename.split('@')[0].split('/')[-1]
print appname
writline=line.strip()+appname+"\n"
opendpi_class = writline.split(',')[-1].strip()
if opendpi_class not in applist:
applist.append(opendpi_class)
writelines_data.append( writline )
p.close()
#write output arff file
output_file.write(arff_head_s[0])
for i in applist:
output_file.write( "%s," % i )
output_file.write(arff_head_s[1])
for ii in writelines_data:
output_file.write(ii)
output_file.close()
exit()
| Python |
#!/usr/bin/env python
import os,sys,logging
import string
from exceptions import Exception
from optparse import OptionParser
import re
import glob
import yaml
arff_head_s=('''@RELATION <netmate>
@ATTRIBUTE srcip STRING
@ATTRIBUTE srcport NUMERIC
@ATTRIBUTE dstip STRING
@ATTRIBUTE dstport NUMERIC
@ATTRIBUTE proto NUMERIC
@ATTRIBUTE total_fpackets NUMERIC
@ATTRIBUTE total_fvolume NUMERIC
@ATTRIBUTE total_bpackets NUMERIC
@ATTRIBUTE total_bvolume NUMERIC
@ATTRIBUTE min_fpktl NUMERIC
@ATTRIBUTE mean_fpktl NUMERIC
@ATTRIBUTE max_fpktl NUMERIC
@ATTRIBUTE std_fpktl NUMERIC
@ATTRIBUTE min_bpktl NUMERIC
@ATTRIBUTE mean_bpktl NUMERIC
@ATTRIBUTE max_bpktl NUMERIC
@ATTRIBUTE std_bpktl NUMERIC
@ATTRIBUTE min_fiat NUMERIC
@ATTRIBUTE mean_fiat NUMERIC
@ATTRIBUTE max_fiat NUMERIC
@ATTRIBUTE std_fiat NUMERIC
@ATTRIBUTE min_biat NUMERIC
@ATTRIBUTE mean_biat NUMERIC
@ATTRIBUTE max_biat NUMERIC
@ATTRIBUTE std_biat NUMERIC
@ATTRIBUTE duration NUMERIC
@ATTRIBUTE min_active NUMERIC
@ATTRIBUTE mean_active NUMERIC
@ATTRIBUTE max_active NUMERIC
@ATTRIBUTE std_active NUMERIC
@ATTRIBUTE min_idle NUMERIC
@ATTRIBUTE mean_idle NUMERIC
@ATTRIBUTE max_idle NUMERIC
@ATTRIBUTE std_idle NUMERIC
@ATTRIBUTE sflow_fpackets NUMERIC
@ATTRIBUTE sflow_fbytes NUMERIC
@ATTRIBUTE sflow_bpackets NUMERIC
@ATTRIBUTE sflow_bbytes NUMERIC
@ATTRIBUTE fpsh_cnt NUMERIC
@ATTRIBUTE bpsh_cnt NUMERIC
@ATTRIBUTE furg_cnt NUMERIC
@ATTRIBUTE burg_cnt NUMERIC
@ATTRIBUTE open_class {''' ,
'}',
'\n@ATTRIBUTE app_class {',
'}',
'\n@ATTRIBUTE cata_class {',
'}',
'''
% you need to add a nominal class attribute!
% @ATTRIBUTE class {class0,class1}
@DATA
''')
def LoadStream(FileName_s='default.yaml'):
f = file(FileName_s,'r')
stream=yaml.load(f)
return stream
def SaveStream(stream,FileName_s='default.yaml'):
f = file(FileName_s,'w')
yaml.dump(stream,f)
f.close()
def FindCataFromYAML(realapp_name,fromyaml):
#print ("looking for %s in %s"%(realapp_name,yamlfile))
#f=LoadStream(yamlfile)
for i in fromyaml:
for j in i['Applications']:
for k in i['Applications'][j]:
#print (k)
if k.lower() == realapp_name.lower():
return i['CatalogueName']
return "Others"
if __name__ == '__main__':
usage = "usage: %prog [options] arg1 arg2"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="make lots of noise")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose",default=True,
help="be very quiet")
parser.add_option("-f", "--from_arff",dest="from_arff",
metavar="INPUT_ARFF", help="read from INPUT_ARFF")
parser.add_option("-o", "--output_arff",dest="output_arff_file_name",
metavar="OUTPUT_FILES", help="write output OUTPUT_FILES")
parser.add_option("-c","--catalogue",dest="cataloguefile",
metavar="catalogue", help="read from catalogue.yaml",)
parser.add_option("-d","--details",dest="isdetails",default=True,
action="store_true",
help="parser ")
(options, args) = parser.parse_args()
output_real_arff_file_name=""
items = set()
if os.path.isdir(options.output_arff_file_name):
output_real_arff_file_name= os.path.join(output_arff_file_name,'catalogue.arff' )
elif options.output_arff_file_name:
output_real_arff_file_name=options.output_arff_file_name
else:
output_real_arff_file_name="./catalogue.arff"
if options.cataloguefile:
catalogue_yaml_file=options.cataloguefile
else:
catalogue_yaml_file="catalogue.yaml"
if options.from_arff:
if os.path.isdir(options.from_arff):
for f in glob.glob(os.path.join(options.from_path, '*.arff')):
if os.path.isfile(f):
items.add(os.path.abspath(f))
if os.path.isfile(options.from_arff):
items.add(options.from_arff)
elif '*' in options.from_arff:
for n in glob.glob(options.from_path):
items.add(os.path.abspath(f))
else:
print "not set input file/path"
#exit()
for arg in args:
# if os.path.isdir(arg):
# for f in glob.glob(os.path.join(arg, '*.merged.arff')):
# if os.path.isfile(f):
# items.add(os.path.abspath(f))
#print arg
if '*' in arg:
for n in glob.glob(arg):
items.add(os.path.abspath(n))
elif os.path.isfile(arg):
items.add(os.path.abspath(arg))
else:
pass
#add arff header into output_real_arff_file_name
if os.path.isfile(output_real_arff_file_name):
os.remove(output_real_arff_file_name)
#output_file = open(output_real_arff_file_name,'a')
#output_file.write(arff_head_s[0])
#output_file.write(arff_head_s[1])
#from collections import deque
applist=[]
opendpi_class_list=[]
#writelines_header=[]
writelines_data=[]
for input_arff_filename in items:
foundData = False
p = open(input_arff_filename,'r')
for line in p.readlines():
prog=re.compile("^@DATA")
m = prog.match(line)
if m:
foundData = True
continue
if ( foundData==True and ( not line.isspace() ) and (not re.match('^@',line)) and (not re.match('^%',line)) ):
#appname = input_arff_filename.split('@')[0].split('/')[-1]
#print appname
writline=line
opendpi_class=''
pp_line = writline.split(',')[-1].strip()
p_line=pp_line.split('_')[-3:]
if not p_line[0] == 'notfound' :
#print p_line[-1]
opendpi_class=p_line[-1]
else:
#print ("ignore notfound apps")
continue
#a=re.compile('^[ \t]*\r?\n?$')
a=re.compile('^[ \t]*\r?\n?$')
if not a.match(opendpi_class):
if opendpi_class not in applist:
applist.append(opendpi_class)
if pp_line not in opendpi_class_list:
opendpi_class_list.append(pp_line)
#print (opendpi_class)
#for i in writline.split(',')[:-1]:
# writelines_data.append( i+"," )
writelines_data.append(writline.strip()+","+opendpi_class+"\n")
else:
print ("ignore blank apps:"),
print (opendpi_class)
continue
p.close()
#write output arff file
f_yaml=LoadStream(catalogue_yaml_file)
realapp_list=[]
cata_list=[]
final_data_to_write=[]
for write_item in writelines_data:
splited=write_item.strip().split(',')
realapp=splited[-1]
if options.isdetails:
cata=FindCataFromYAML(splited[-2],f_yaml)
else:
cata=FindCataFromYAML(splited[-1],f_yaml)
if cata not in cata_list:
cata_list.append(cata)
if realapp not in realapp_list:
realapp_list.append(realapp)
final_data_to_write.append(write_item.strip()+","+cata+"\n")
output_file = open(output_real_arff_file_name,'a')
#opendpi_class_list=[]
output_file.write(arff_head_s[0])
print("opendpi_class:")
for i in opendpi_class_list:
output_file.write( "%s," % i )
print("\t%s"%i)
output_file.write(arff_head_s[1])
output_file.write(arff_head_s[2])
print ("realapp_class:")
for i in realapp_list:
output_file.write( "%s,"% i )
print ("\t%s"%i)
output_file.write(arff_head_s[3])
output_file.write(arff_head_s[4])
print ("catalogue_class:")
for i in cata_list:
output_file.write("%s,"%i)
print ("\t%s"%i)
output_file.write(arff_head_s[5])
output_file.write(arff_head_s[6])
for ii in final_data_to_write:
output_file.write(ii)
output_file.close()
exit()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" A pure Python GlobalConfig implementation. Copyright (c) 2010, flk3y """
__version__ = '0.1'
import os
import sys
GlobalConfig = {'xmlfilename':"test.xml",
'pcapfilename':"test.pcap",
'outputpathname':"outputdir",
'appname':"default_app",
'tmp_arff_filename':"/tmp/testdata.log",
'tmp_netdude_path':"/tmp/netdude_demux",
'isALL':False,
'isSplit':False,
'isMerge':False,
'isFeature':False,
'isVerbose':False,
'ismergearff':False,
'isNetdude':False,
}
xmlbuffer=[]
def mkdir_p(newdir):
"""works the way a good mkdir should :)
- already exists, silently complete
- regular file in the way, raise an exception
- parent directory(ies) does not exist, make them as well
"""
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
head, tail = os.path.split(newdir)
if head and not os.path.isdir(head):
os.mkdir(head)
#print "_mkdir %s" % repr(newdir)
if tail:
os.mkdir(newdir)
| Python |
env = Environment()
env.Append( CPPPATH=['agent/','front/','network/','thread', 'connection'])
myfile = env.Command( ['network/com.pb.cc', 'network/com.pb.h'], 'com.proto', 'cd ./$SOURCE.dir && protoc --cpp_out=network/ $SOURCE.file')
print myfile
#NetworkLib = env.StaticLibrary("Network",['network/com.pb.cc'], myfile)
NetworkLib = env.StaticLibrary("Network",myfile)
ThreadLib = env.StaticLibrary("Thread", Glob('thread/*.cpp'))
#NetworkLib = env.StaticLibrary("Network",[Glob('network/*.cpp'),['network/com.pb.cc'] ])
FrontEngineLib = env.StaticLibrary("FrontEngine", Glob('front/*.cpp'))
AgentLib = env.StaticLibrary("Agent", Glob('agent/*.cpp'))
ConnectionLib = env.StaticLibrary("Connection", Glob('connection/*.cpp'))
env.Append( CPPFLAGS=['-Wall', '-std=gnu++0x', '-O0'] )
env.Append( LINKFLAGS="--static" )
libraries = ['protobuf','boost_system','boost_filesystem', 'pthread','pcap++','pcap','protobuf','nsl','z','m']
env.Append( LIBS = [AgentLib,NetworkLib,FrontEngineLib,ThreadLib,ConnectionLib, libraries])
env.Program('../program', ['main.cpp'])
env.Program('../front_engine', ['front.cpp'])
env.Program('../agent_jpg', ['agent_jpg.cpp'])
env.Program('../main_connection_server', ['main_connection_server.cpp'], LIBS = [ConnectionLib,NetworkLib, libraries])
env.Program('../main_connection_client', ['main_connection_client.cpp'], LIBS = [ConnectionLib,NetworkLib, libraries])
| Python |
SConscript('SConscript', variant_dir='build')
| Python |
#!/usr/bin/env python
# Copyright (c) 2008 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import platform
import sys
from PyQt4.QtCore import (QSize, QString, QVariant, Qt, SIGNAL)
from PyQt4.QtGui import (QAction, QApplication, QColor, QFont,
QFontMetrics, QIcon, QKeySequence, QMenu, QPixmap,
QTextCharFormat, QTextEdit)
class RichTextLineEdit(QTextEdit):
(Bold, Italic, Underline, StrikeOut, Monospaced, Sans, Serif,
NoSuperOrSubscript, Subscript, Superscript) = range(10)
def __init__(self, parent=None):
super(RichTextLineEdit, self).__init__(parent)
self.monofamily = QString("courier")
self.sansfamily = QString("helvetica")
self.seriffamily = QString("times")
self.setLineWrapMode(QTextEdit.NoWrap)
self.setTabChangesFocus(True)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
fm = QFontMetrics(self.font())
h = int(fm.height() * (1.4 if platform.system() == "Windows"
else 1.2))
self.setMinimumHeight(h)
self.setMaximumHeight(int(h * 1.2))
self.setToolTip("Press <b>Ctrl+M</b> for the text effects "
"menu and <b>Ctrl+K</b> for the color menu")
def toggleItalic(self):
self.setFontItalic(not self.fontItalic())
def toggleUnderline(self):
self.setFontUnderline(not self.fontUnderline())
def toggleBold(self):
self.setFontWeight(QFont.Normal
if self.fontWeight() > QFont.Normal else QFont.Bold)
def sizeHint(self):
return QSize(self.document().idealWidth() + 5,
self.maximumHeight())
def minimumSizeHint(self):
fm = QFontMetrics(self.font())
return QSize(fm.width("WWWW"), self.minimumHeight())
def contextMenuEvent(self, event):
self.textEffectMenu()
def keyPressEvent(self, event):
if event.modifiers() & Qt.ControlModifier:
handled = False
if event.key() == Qt.Key_B:
self.toggleBold()
handled = True
elif event.key() == Qt.Key_I:
self.toggleItalic()
handled = True
elif event.key() == Qt.Key_K:
self.colorMenu()
handled = True
elif event.key() == Qt.Key_M:
self.textEffectMenu()
handled = True
elif event.key() == Qt.Key_U:
self.toggleUnderline()
handled = True
if handled:
event.accept()
return
if event.key() in (Qt.Key_Enter, Qt.Key_Return):
self.emit(SIGNAL("returnPressed()"))
event.accept()
else:
QTextEdit.keyPressEvent(self, event)
def colorMenu(self):
pixmap = QPixmap(22, 22)
menu = QMenu("Colour")
for text, color in (
("&Black", Qt.black),
("B&lue", Qt.blue),
("Dark Bl&ue", Qt.darkBlue),
("&Cyan", Qt.cyan),
("Dar&k Cyan", Qt.darkCyan),
("&Green", Qt.green),
("Dark Gr&een", Qt.darkGreen),
("M&agenta", Qt.magenta),
("Dark Mage&nta", Qt.darkMagenta),
("&Red", Qt.red),
("&Dark Red", Qt.darkRed)):
color = QColor(color)
pixmap.fill(color)
action = menu.addAction(QIcon(pixmap), text, self.setColor)
action.setData(QVariant(color))
self.ensureCursorVisible()
menu.exec_(self.viewport().mapToGlobal(
self.cursorRect().center()))
def setColor(self):
action = self.sender()
if action is not None and isinstance(action, QAction):
color = QColor(action.data())
if color.isValid():
self.setTextColor(color)
def textEffectMenu(self):
format = self.currentCharFormat()
menu = QMenu("Text Effect")
for text, shortcut, data, checked in (
("&Bold", "Ctrl+B", RichTextLineEdit.Bold,
self.fontWeight() > QFont.Normal),
("&Italic", "Ctrl+I", RichTextLineEdit.Italic,
self.fontItalic()),
("Strike &out", None, RichTextLineEdit.StrikeOut,
format.fontStrikeOut()),
("&Underline", "Ctrl+U", RichTextLineEdit.Underline,
self.fontUnderline()),
("&Monospaced", None, RichTextLineEdit.Monospaced,
format.fontFamily() == self.monofamily),
("&Serifed", None, RichTextLineEdit.Serif,
format.fontFamily() == self.seriffamily),
("S&ans Serif", None, RichTextLineEdit.Sans,
format.fontFamily() == self.sansfamily),
("&No super or subscript", None,
RichTextLineEdit.NoSuperOrSubscript,
format.verticalAlignment() ==
QTextCharFormat.AlignNormal),
("Su&perscript", None, RichTextLineEdit.Superscript,
format.verticalAlignment() ==
QTextCharFormat.AlignSuperScript),
("Subs&cript", None, RichTextLineEdit.Subscript,
format.verticalAlignment() ==
QTextCharFormat.AlignSubScript)):
action = menu.addAction(text, self.setTextEffect)
if shortcut is not None:
action.setShortcut(QKeySequence(shortcut))
action.setData(QVariant(data))
action.setCheckable(True)
action.setChecked(checked)
self.ensureCursorVisible()
menu.exec_(self.viewport().mapToGlobal(
self.cursorRect().center()))
def setTextEffect(self):
action = self.sender()
if action is not None and isinstance(action, QAction):
what = action.data().toInt()[0]
if what == RichTextLineEdit.Bold:
self.toggleBold()
return
if what == RichTextLineEdit.Italic:
self.toggleItalic()
return
if what == RichTextLineEdit.Underline:
self.toggleUnderline()
return
format = self.currentCharFormat()
if what == RichTextLineEdit.Monospaced:
format.setFontFamily(self.monofamily)
elif what == RichTextLineEdit.Serif:
format.setFontFamily(self.seriffamily)
elif what == RichTextLineEdit.Sans:
format.setFontFamily(self.sansfamily)
if what == RichTextLineEdit.StrikeOut:
format.setFontStrikeOut(not format.fontStrikeOut())
if what == RichTextLineEdit.NoSuperOrSubscript:
format.setVerticalAlignment(
QTextCharFormat.AlignNormal)
elif what == RichTextLineEdit.Superscript:
format.setVerticalAlignment(
QTextCharFormat.AlignSuperScript)
elif what == RichTextLineEdit.Subscript:
format.setVerticalAlignment(
QTextCharFormat.AlignSubScript)
self.mergeCurrentCharFormat(format)
def toSimpleHtml(self):
html = QString()
black = QColor(Qt.black)
block = self.document().begin()
while block.isValid():
iterator = block.begin()
while iterator != block.end():
fragment = iterator.fragment()
if fragment.isValid():
format = fragment.charFormat()
family = format.fontFamily()
color = format.foreground().color()
text = Qt.escape(fragment.text())
if (format.verticalAlignment() ==
QTextCharFormat.AlignSubScript):
text = QString("<sub>%1</sub>").arg(text)
elif (format.verticalAlignment() ==
QTextCharFormat.AlignSuperScript):
text = QString("<sup>%1</sup>").arg(text)
if format.fontUnderline():
text = QString("<u>%1</u>").arg(text)
if format.fontItalic():
text = QString("<i>%1</i>").arg(text)
if format.fontWeight() > QFont.Normal:
text = QString("<b>%1</b>").arg(text)
if format.fontStrikeOut():
text = QString("<s>%1</s>").arg(text)
if color != black or not family.isEmpty():
attribs = ""
if color != black:
attribs += ' color="{0}"'.format(color.name())
if not family.isEmpty():
attribs += ' face="{0}"'.format(family)
text = (QString("<font%1>%2</font>")
.arg(attribs).arg(text))
html += text
iterator += 1
block = block.next()
return html
if __name__ == "__main__":
app = QApplication(sys.argv)
lineedit = RichTextLineEdit()
lineedit.show()
lineedit.setWindowTitle("RichTextEdit")
app.exec_()
print(unicode(lineedit.toHtml()))
print(unicode(lineedit.toPlainText()))
print(unicode(lineedit.toSimpleHtml()))
| Python |
#!/usr/bin/env python
# Copyright (c) 2008 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import platform
import sys
from PyQt4.QtCore import (QSize, QString, QVariant, Qt, SIGNAL)
from PyQt4.QtGui import (QAction, QApplication, QColor, QFont,
QFontMetrics, QIcon, QKeySequence, QMenu, QPixmap,
QTextCharFormat, QTextEdit)
class RichTextLineEdit(QTextEdit):
(Bold, Italic, Underline, StrikeOut, Monospaced, Sans, Serif,
NoSuperOrSubscript, Subscript, Superscript) = range(10)
def __init__(self, parent=None):
super(RichTextLineEdit, self).__init__(parent)
self.monofamily = QString("courier")
self.sansfamily = QString("helvetica")
self.seriffamily = QString("times")
self.setLineWrapMode(QTextEdit.NoWrap)
self.setTabChangesFocus(True)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
fm = QFontMetrics(self.font())
h = int(fm.height() * (1.4 if platform.system() == "Windows"
else 1.2))
self.setMinimumHeight(h)
self.setMaximumHeight(int(h * 1.2))
self.setToolTip("Press <b>Ctrl+M</b> for the text effects "
"menu and <b>Ctrl+K</b> for the color menu")
def toggleItalic(self):
self.setFontItalic(not self.fontItalic())
def toggleUnderline(self):
self.setFontUnderline(not self.fontUnderline())
def toggleBold(self):
self.setFontWeight(QFont.Normal
if self.fontWeight() > QFont.Normal else QFont.Bold)
def sizeHint(self):
return QSize(self.document().idealWidth() + 5,
self.maximumHeight())
def minimumSizeHint(self):
fm = QFontMetrics(self.font())
return QSize(fm.width("WWWW"), self.minimumHeight())
def contextMenuEvent(self, event):
self.textEffectMenu()
def keyPressEvent(self, event):
if event.modifiers() & Qt.ControlModifier:
handled = False
if event.key() == Qt.Key_B:
self.toggleBold()
handled = True
elif event.key() == Qt.Key_I:
self.toggleItalic()
handled = True
elif event.key() == Qt.Key_K:
self.colorMenu()
handled = True
elif event.key() == Qt.Key_M:
self.textEffectMenu()
handled = True
elif event.key() == Qt.Key_U:
self.toggleUnderline()
handled = True
if handled:
event.accept()
return
if event.key() in (Qt.Key_Enter, Qt.Key_Return):
self.emit(SIGNAL("returnPressed()"))
event.accept()
else:
QTextEdit.keyPressEvent(self, event)
def colorMenu(self):
pixmap = QPixmap(22, 22)
menu = QMenu("Colour")
for text, color in (
("&Black", Qt.black),
("B&lue", Qt.blue),
("Dark Bl&ue", Qt.darkBlue),
("&Cyan", Qt.cyan),
("Dar&k Cyan", Qt.darkCyan),
("&Green", Qt.green),
("Dark Gr&een", Qt.darkGreen),
("M&agenta", Qt.magenta),
("Dark Mage&nta", Qt.darkMagenta),
("&Red", Qt.red),
("&Dark Red", Qt.darkRed)):
color = QColor(color)
pixmap.fill(color)
action = menu.addAction(QIcon(pixmap), text, self.setColor)
action.setData(QVariant(color))
self.ensureCursorVisible()
menu.exec_(self.viewport().mapToGlobal(
self.cursorRect().center()))
def setColor(self):
action = self.sender()
if action is not None and isinstance(action, QAction):
color = QColor(action.data())
if color.isValid():
self.setTextColor(color)
def textEffectMenu(self):
format = self.currentCharFormat()
menu = QMenu("Text Effect")
for text, shortcut, data, checked in (
("&Bold", "Ctrl+B", RichTextLineEdit.Bold,
self.fontWeight() > QFont.Normal),
("&Italic", "Ctrl+I", RichTextLineEdit.Italic,
self.fontItalic()),
("Strike &out", None, RichTextLineEdit.StrikeOut,
format.fontStrikeOut()),
("&Underline", "Ctrl+U", RichTextLineEdit.Underline,
self.fontUnderline()),
("&Monospaced", None, RichTextLineEdit.Monospaced,
format.fontFamily() == self.monofamily),
("&Serifed", None, RichTextLineEdit.Serif,
format.fontFamily() == self.seriffamily),
("S&ans Serif", None, RichTextLineEdit.Sans,
format.fontFamily() == self.sansfamily),
("&No super or subscript", None,
RichTextLineEdit.NoSuperOrSubscript,
format.verticalAlignment() ==
QTextCharFormat.AlignNormal),
("Su&perscript", None, RichTextLineEdit.Superscript,
format.verticalAlignment() ==
QTextCharFormat.AlignSuperScript),
("Subs&cript", None, RichTextLineEdit.Subscript,
format.verticalAlignment() ==
QTextCharFormat.AlignSubScript)):
action = menu.addAction(text, self.setTextEffect)
if shortcut is not None:
action.setShortcut(QKeySequence(shortcut))
action.setData(QVariant(data))
action.setCheckable(True)
action.setChecked(checked)
self.ensureCursorVisible()
menu.exec_(self.viewport().mapToGlobal(
self.cursorRect().center()))
def setTextEffect(self):
action = self.sender()
if action is not None and isinstance(action, QAction):
what = action.data().toInt()[0]
if what == RichTextLineEdit.Bold:
self.toggleBold()
return
if what == RichTextLineEdit.Italic:
self.toggleItalic()
return
if what == RichTextLineEdit.Underline:
self.toggleUnderline()
return
format = self.currentCharFormat()
if what == RichTextLineEdit.Monospaced:
format.setFontFamily(self.monofamily)
elif what == RichTextLineEdit.Serif:
format.setFontFamily(self.seriffamily)
elif what == RichTextLineEdit.Sans:
format.setFontFamily(self.sansfamily)
if what == RichTextLineEdit.StrikeOut:
format.setFontStrikeOut(not format.fontStrikeOut())
if what == RichTextLineEdit.NoSuperOrSubscript:
format.setVerticalAlignment(
QTextCharFormat.AlignNormal)
elif what == RichTextLineEdit.Superscript:
format.setVerticalAlignment(
QTextCharFormat.AlignSuperScript)
elif what == RichTextLineEdit.Subscript:
format.setVerticalAlignment(
QTextCharFormat.AlignSubScript)
self.mergeCurrentCharFormat(format)
def toSimpleHtml(self):
html = QString()
black = QColor(Qt.black)
block = self.document().begin()
while block.isValid():
iterator = block.begin()
while iterator != block.end():
fragment = iterator.fragment()
if fragment.isValid():
format = fragment.charFormat()
family = format.fontFamily()
color = format.foreground().color()
text = Qt.escape(fragment.text())
if (format.verticalAlignment() ==
QTextCharFormat.AlignSubScript):
text = QString("<sub>%1</sub>").arg(text)
elif (format.verticalAlignment() ==
QTextCharFormat.AlignSuperScript):
text = QString("<sup>%1</sup>").arg(text)
if format.fontUnderline():
text = QString("<u>%1</u>").arg(text)
if format.fontItalic():
text = QString("<i>%1</i>").arg(text)
if format.fontWeight() > QFont.Normal:
text = QString("<b>%1</b>").arg(text)
if format.fontStrikeOut():
text = QString("<s>%1</s>").arg(text)
if color != black or not family.isEmpty():
attribs = ""
if color != black:
attribs += ' color="{0}"'.format(color.name())
if not family.isEmpty():
attribs += ' face="{0}"'.format(family)
text = (QString("<font%1>%2</font>")
.arg(attribs).arg(text))
html += text
iterator += 1
block = block.next()
return html
if __name__ == "__main__":
app = QApplication(sys.argv)
lineedit = RichTextLineEdit()
lineedit.show()
lineedit.setWindowTitle("RichTextEdit")
app.exec_()
print(unicode(lineedit.toHtml()))
print(unicode(lineedit.toPlainText()))
print(unicode(lineedit.toSimpleHtml()))
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import os, sys
from PyQt4.QtCore import (QChar, QFile, QDir, QString, QTimer, QVariant, Qt,
SIGNAL, QFileInfo)
from PyQt4.QtGui import (QApplication, QDialog, QHBoxLayout, QLabel,
QListWidget, QListWidgetItem, QMessageBox, QPushButton,
QSplitter, QTableWidget, QTableWidgetItem, QTreeWidget,
QTreeWidgetItem, QVBoxLayout, QWidget, QWizard, QFileDialog)
import flows
import configwizard
import sigutils
#from configwizardui import Ui_ConfigWizard
MAC = True
try:
from PyQt4.QtGui import qt_mac_set_native_menubar
except ImportError:
MAC = False
class MainForm(QDialog):
def __init__(self, parent=None):
super(MainForm, self).__init__(parent)
treeLabel = QLabel("Input Flows Tree")
self.treeWidget = QTreeWidget()
#self.treeWidget
self.treeWidget.setSelectionMode(self.treeWidget.ExtendedSelection)
self.treeWidget.setDragDropMode(self.treeWidget.InternalMove)
self.treeWidget.setDragEnabled(True)
self.treeWidget.setDropIndicatorShown(True)
treeLabel.setBuddy(self.treeWidget)
tableLabel = QLabel("Selected Flows Table")
self.tableWidget = QTableWidget()
self.tableWidget.setSelectionBehavior(QTableWidget.SelectRows)
tableLabel.setBuddy(self.tableWidget)
configurewizardButton = QPushButton("Run &Config")
extractButton = QPushButton("E&xtract flows")
generatorwizardButton = QPushButton("&Generator")
addFlowButton = QPushButton("&Add Flow")
removeFlowButton = QPushButton("&Remove Flow")
refreshFlowButton = QPushButton("Re&fresh")
quitButton = QPushButton("&Quit")
if not MAC:
addFlowButton.setFocusPolicy(Qt.NoFocus)
removeFlowButton.setFocusPolicy(Qt.NoFocus)
quitButton.setFocusPolicy(Qt.NoFocus)
configurewizardButton.setFocusPolicy(Qt.NoFocus)
generatorwizardButton.setFocusPolicy(Qt.NoFocus)
splitter = QSplitter(Qt.Horizontal)
#tree
vbox = QVBoxLayout()
vbox.addWidget(treeLabel)
vbox.addWidget(self.treeWidget)
widget = QWidget()
widget.setLayout(vbox)
splitter.addWidget(widget)
#table
vbox = QVBoxLayout()
vbox.addWidget(tableLabel)
vbox.addWidget(self.tableWidget)
widget = QWidget()
widget.setLayout(vbox)
splitter.addWidget(widget)
buttonLayout = QHBoxLayout()
buttonLayout.addWidget(configurewizardButton)
buttonLayout.addWidget(extractButton)
buttonLayout.addStretch()
buttonLayout.addWidget(addFlowButton)
buttonLayout.addStretch()
buttonLayout.addWidget(removeFlowButton)
buttonLayout.addWidget(refreshFlowButton)
buttonLayout.addStretch()
buttonLayout.addWidget(generatorwizardButton)
buttonLayout.addWidget(quitButton)
layout = QVBoxLayout()
layout.addWidget(splitter)
layout.addLayout(buttonLayout)
self.setLayout(layout)
#self.connect(self.tableWidget,SIGNAL("itemChanged(QTableWidgetItem*)"), self.tableItemChanged)
self.connect(configurewizardButton, SIGNAL("clicked()"), self.configurewiz)
self.connect(extractButton, SIGNAL("clicked()"), self.extractflow)
self.connect(addFlowButton, SIGNAL("clicked()"), self.addFlow)
self.connect(removeFlowButton, SIGNAL("clicked()"), self.removeFlow)
self.connect(refreshFlowButton, SIGNAL("clicked()"), self.refreshtable)
self.connect(generatorwizardButton, SIGNAL("clicked()"), self.generatePayloadFileListFile)
self.connect(quitButton, SIGNAL("clicked()"), self.accept)
self.flows = flows.FlowContainer(QString("flows.dat"))
self.selectedflows = flows.FlowContainer(QString("selectedflows.dat"))
self.setWindowTitle("Flows (dict)")
self.myGlobalConfig = {}
self.PayloadFullname_list = []
self.payloadfullnamelist_file = '/tmp/payloadfullnamelist'
self.Payload5tuple_list = []
QTimer.singleShot(0, self.initialLoad)
def initialLoad(self):
if not QFile.exists(self.flows.filename):
#for flow in flows.generateFakeFlows():
#self.flows.addFlow(flow)
self.flows.dirty = False
else:
try:
self.flows.load()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to load: {0}".format(e))
if not QFile.exists(self.selectedflows.filename):
#for flow in flows.generateFakeFlows():
# self.selectedflows.addFlow(flow)
self.selectedflows.dirty = False
else:
try:
self.selectedflows.load()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to load: {0}".format(e))
self.myGlobalConfig = sigutils.LoadYAMLStream(configwizard.CONFIGFileName)
self.populateTable()
self.tableWidget.sortItems(0)
self.populateTree()
def configurewiz(self):
myapp = configwizard.StartConfigWizard(self)
myapp.show()
def reject(self):
self.accept()
def accept(self):
if ((self.selectedflows.dirty or self.flows.dirty) and
QMessageBox.question(self, "Flows - Save?",
"Save unsaved changes?",
QMessageBox.Yes | QMessageBox.No) ==
QMessageBox.Yes):
try:
self.flows.save()
self.selectedflows.save()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to save: {0}".format(e))
QDialog.accept(self)
def extractflow(self):
#load yaml file
#self.myGlobalConfig = sigutils.LoadYAMLStream(configwizard.CONFIGFileName)
print("extracting...")
print("%s\t%s\t%s" % (self.myGlobalConfig['ARFFFileName'], \
self.myGlobalConfig['InputPayloadPath_s'], \
self.myGlobalConfig['OutputSigDir_s']))
if (self.myGlobalConfig['ARFFFileName'] == '' or \
self.myGlobalConfig['InputPayloadPath_s'] == '' or \
self.myGlobalConfig['OutputSigDir_s'] == ''):
self.configurewiz()
self.FindnLoadPayloadFiles()
self.extractFromARFF()
def FindnLoadPayloadFiles(self):
m_PayloadFilesPath = self.myGlobalConfig['InputPayloadPath_s']
#m_payloadfullname_list = []
for (thisDir, subsHere, filesHere) in os.walk(str(m_PayloadFilesPath)):
for filename in filesHere:
(shortname, extension) = os.path.splitext(filename)
payloadfullname = os.path.join(thisDir, filename)
if(os.path.isfile(payloadfullname) and (extension == ".fldat")):
#print(payloadfullname)
self.PayloadFullname_list.append(payloadfullname)
self.Payload5tuple_list.append(shortname)
#self.PayloadFullname_list = m_payloadfullname_list
#print (self.Payload5tuple_list)
f_name = self.payloadfullnamelist_file
f = open(f_name, 'w')
for i in self.PayloadFullname_list:
#print(i)
f.write(i + "\n")
#sigutils.SaveYAMLStream(f_name,self.PayloadFullname_list)
f.close()
def extractFromARFF(self):
import re
attribute_prog = re.compile("^@ATTRIBUTE\ ")
data_prog = re.compile("^@DATA")
pARFFFile = open(self.myGlobalConfig['ARFFFileName'], 'r')
feature_data = []
all_index = {}
feature_count = 0
selectedfeature_names = ['max_fpktl', 'min_bpktl', 'std_bpktl', 'min_biat', 'sflow_fbytes', 'fpsh_cnt' ]
selectedfivetupe_names = ['srcip', 'srcport', 'proto', 'dstip', 'dstport']
selected_app_names = ['open_class'] #, 'app_class', 'cata_class']
foundData = False
flows_found_count = 0
flows_all_count = 0
flows_missed_count = 0
for line in pARFFFile.readlines():
attribute_re = attribute_prog.search(line)
if attribute_re:
o = line.split()
# to build a dict for 5-tuples, features and application info.
print (feature_count, o[1])
appendqueue_cmd = "all_index[\'%s\']= %d" % (str(o[1].strip()), feature_count)
appendqueue_code = compile(appendqueue_cmd, '', 'exec')
exec(appendqueue_code)
feature_count += 1
continue
if 0:#foundData == False: #for debug
for i in selectedfeature_names:
print(i)
if all_index.has_key(i):
print(i, all_index[i])
m = data_prog.match(line)
if m:
foundData = True
#print ("...")
if (foundData == True and (\
not line.isspace()) \
and (not re.match('^@', line)) \
and (not re.match('^%', line)) \
):
l = line.strip().split(',')
#print(l)
feature_data.append(l)
m_opendpi =''
m_l4 =''
m_processname =''
m_feature =''
m_payloadfilename =''
m_5tuple = ''
#print (l[ all_index['proto'] ])
flows_all_count += 1
if (l[ all_index['proto'] ] == '17' or l[ all_index['proto'] ] == '6'):
count = 0
for ii in selectedfivetupe_names:
if ii == 'proto':
if l[ all_index['proto'] ] == '17':
m_5tuple += 'UDP' + '-'
m_l4 = 'UDP'
else:#l[ all_index['proto'] ] == '6'
m_5tuple += 'TCP' + '-'
m_l4 = 'TCP'
elif (count < len(selectedfivetupe_names) - 1):
m_5tuple += l[all_index[ii]] + '-'
else:
m_5tuple += l[all_index[ii]]
count += 1
print ( m_5tuple, len(selectedfivetupe_names) )
#
if (m_5tuple in self.Payload5tuple_list):
flows_found_count += 1
print ("---------%d flow found-----------" % (flows_found_count))
m_payloadfilename = self.myGlobalConfig['InputPayloadPath_s'] +'/'+ m_5tuple + '.fldat'
print (m_payloadfilename)
#print('payloadfile is: %s/%s.fldat' \
# % (self.myGlobalConfig['InputPayloadPath_s'] , m_5tuple) )
count = 0
for ii in selectedfeature_names:
#print(ii, len(selectedfeature_names))
if (count < len(selectedfeature_names) -1):
m_feature += l[all_index[ii]] + ','
else:
m_feature += l[all_index[ii]]
count += 1
print (m_feature, len(selectedfeature_names) ) #<-- this the features
# extract opendpi
m_opendpi = l[all_index['open_class']]
print (m_opendpi)
m_opendpi_splited = m_opendpi.strip().split('_')
#print (m_opendpi_splited)
m_processname = m_opendpi_splited[-1]
print (m_processname)
flow = flows.Flow(m_opendpi, m_l4, m_processname, m_5tuple , m_feature, m_payloadfilename)
self.flows.addFlow(flow)
else:
print('no payload found.')
flows_missed_count +=1
print ("--------------------\n%s found flows\n%s missed flows\n%s flows in all" % \
(flows_found_count,flows_missed_count,flows_all_count))
pARFFFile.close()
self.populateTree()
def populateTable(self, selectedFlow=None):
selected = None
self.tableWidget.clear()
self.tableWidget.setSortingEnabled(True)
self.tableWidget.setRowCount(len(self.selectedflows))
headers = ["OpenDPI", "L4", "ProcessName", "FiveTuples", "FEATURES", "PayloadFileName"]
self.tableWidget.setColumnCount(len(headers))
self.tableWidget.setHorizontalHeaderLabels(headers)
for row, flow in enumerate(self.selectedflows):
item = QTableWidgetItem(flow.opendpi)
item.setData(Qt.UserRole, QVariant(long(id(flow))))
if selectedFlow is not None and selectedFlow == id(flow):
selected = item
self.tableWidget.setItem(row, flows.OPENDPI, item)
self.tableWidget.setItem(row, flows.L4,
QTableWidgetItem(flow.l4))
self.tableWidget.setItem(row, flows.PROCESSNAME,
QTableWidgetItem(flow.processname))
self.tableWidget.setItem(row, flows.FIVETUPLE,
QTableWidgetItem(flow.fivetuple))
self.tableWidget.setItem(row, flows.FEATURES,
QTableWidgetItem(flow.features))
self.tableWidget.setItem(row, flows.PAYLOADFILENAME,
QTableWidgetItem(flow.payloadfilename))
self.tableWidget.setSortingEnabled(True)
self.tableWidget.resizeColumnsToContents()
if selected is not None:
selected.setSelected(True)
self.tableWidget.setCurrentItem(selected)
def populateTree(self, selectedFlow=None):
selected = None
self.treeWidget.clear()
self.treeWidget.setColumnCount(3)
self.treeWidget.setHeaderLabels(["ProcessName/L4/OpenDPI", "FEATURES", "PayloadFileName"])
self.treeWidget.setItemsExpandable(True)
parentFromProcessName = {}
parentFromProcessNameL4 = {}
parentFromProcessNameL4OpenDPI = {}
for flow in self.flows.inProcessL4Order():
ancestor = parentFromProcessName.get(flow.processname)
if ancestor is None:
ancestor = QTreeWidgetItem(self.treeWidget, [flow.processname])
parentFromProcessName[flow.processname] = ancestor
processnameL4 = flow.processname + "/" + flow.l4
parent = parentFromProcessNameL4.get(processnameL4)
if parent is None:
parent = QTreeWidgetItem(ancestor, [flow.l4])
parentFromProcessNameL4[processnameL4] = parent
processnameL4OpenDPI = processnameL4 + "/" + flow.opendpi
subparent = parentFromProcessNameL4OpenDPI.get(processnameL4OpenDPI)
if subparent is None:
subparent = QTreeWidgetItem(parent, [flow.opendpi])
parentFromProcessNameL4OpenDPI[processnameL4OpenDPI] = subparent
item = QTreeWidgetItem(subparent, [flow.fivetuple, QString("%L1").arg(flow.features) , \
QString("%L1").arg(flow.payloadfilename)])
item.setTextAlignment(1, Qt.AlignRight | Qt.AlignVCenter)
if selectedFlow is not None and selectedFlow == id(flow):
selected = item
self.treeWidget.expandItem(subparent)
self.treeWidget.expandItem(parent)
self.treeWidget.expandItem(ancestor)
self.treeWidget.resizeColumnToContents(0)
self.treeWidget.resizeColumnToContents(1)
if selected is not None:
selected.setSelected(True)
self.treeWidget.setCurrentItem(selected)
def generatePayloadFileListFile(self):
#print("generating payloadfile:%s..."%( str( self.myGlobalConfig['OutputSigDir_s'] ) ))
fd = QFileDialog(self)
m_outputfilename = fd.getSaveFileName( self,"Output file to...", self.myGlobalConfig['OutputSigDir_s'], "" )
m_outputfile = open(m_outputfilename,'w')
for i in self.selectedflows:
print(i.payloadfilename)
m_outputfile.write("%s\n"%(i.payloadfilename) )
m_outputfile.close()
fi = QFileInfo(m_outputfilename)
out_path = fi.dir().absolutePath()
cmd_1 = ( "d2o -l %s -o %s/data -x %s/offsets" % (m_outputfilename, out_path, out_path) )
print(cmd_1)
os.system(cmd_1)
datafilename = ("%s/data" % (out_path) )
print(datafilename)
if ( not os.path.isfile(datafilename) ):
cmd_2 = ( "mksary -i %s.ary %s" % (datafilename,datafilename) )
#cmd_3 = ( "" % () )
def addFlow(self):
#flow = flows.Flow(" Unknown", " Unknown", " Unknown")
for i in self.treeWidget.selectedItems ():
if (i.childCount() > 0):
print ("TODO: skipping, instead of select all children")
else:
print ("%d\t%s\t%s%s" % (i.childCount(), i.text(0), i.text(1), i.text(2)))
print (i.parent().text(0), i.parent().parent().text(0), i.parent().parent().parent().text(0))
m_fivetuple = QString (i.text(0))
m_feature = QString(i.text(1))
m_payloadfilename = i.text(2)
m_opendpi = QString(i.parent().text(0))
m_l4 = QString(i.parent().parent().text(0))
m_processname = QString(i.parent().parent().parent().text(0))
flow = flows.Flow(m_opendpi, m_l4, m_processname, m_fivetuple , m_feature, m_payloadfilename)
flow.fivetuple = QString(i.text(0))
self.selectedflows.addFlow(flow)
self.populateTable(id(flow))
def refreshtable(self):
self.populateTable()
def currentTableFlow(self):
item = self.tableWidget.item(self.tableWidget.currentRow(), 0)
if item is None:
return None
return self.selectedflows.flow(item.data(Qt.UserRole).toLongLong()[0])
def removeFlow1(self):
flow = self.currentTableFlow()
if flow is None:
return
self.selectedflows.removeFlow(flow)
self.populateTable()
def removeFlow2(self):
flowitemlist = self.tableWidget.selectedItems()
#flow = self.currentTableFlow()
for item in flowitemlist:
if item is None:
return
fl = self.selectedflows.flow( item.data(Qt.UserRole).toLongLong()[0] )
self.selectedflows.removeFlow(fl)
def removeFlow(self):
self.removeFlow2()
self.populateTable()
def main():
app = QApplication(sys.argv)
form = MainForm()
form.show()
app.exec_()
# Process command-line arguments.
if __name__ == '__main__':
main()
exit()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import platform
from PyQt4.QtCore import (QAbstractTableModel, QDataStream, QFile,
QIODevice, QModelIndex, QRegExp, QSize, QString, QVariant, Qt,
SIGNAL)
from PyQt4.QtGui import (QApplication, QColor, QComboBox, QLineEdit,
QSpinBox, QStyle, QStyledItemDelegate, QTextDocument, QTextEdit)
import richtextlineedit
OPENDPI, L4, PROCESSNAME, FIVETUPLE, FEATURES, PAYLOADFILENAME = range(6)
MAGIC_NUMBER = 0x570C4
FILE_VERSION = 1
class Flow(object):
def __init__(self, opendpi="unknown", l4="unknown_L4", processname="unknown_process", \
fivetuple="0.0.0.0", features="", payload=""):
self.opendpi = QString(opendpi)
self.l4 = QString(l4)
self.processname = QString(processname)
self.fivetuple = QString(fivetuple)
self.features = QString(features)
self.payloadfilename = QString(payload)
def __hash__(self):
return super(Flow, self).__hash__()
def __lt__(self, other):
r = QString.localeAwareCompare(self.fivetuple.toLower(),
other.fivetuple.toLower())
return True if r < 0 else False
def __eq__(self, other):
return 0 == QString.localeAwareCompare(self.fivetuple.toLower(),
other.fivetuple.toLower())
class FlowContainer(object):
def __init__(self, filename=QString()):
self.filename = QString(filename)
self.dirty = False
self.flows = {}
self.l4set = set()
self.processnameset = set()
self.featuresset = set()
self.payloadfilenameset = set()
def flow(self, identity):
return self.flows.get(identity)
def addFlow(self, flow):
self.flows[id(flow)] = flow
self.l4set.add(unicode(flow.l4))
self.processnameset.add(unicode(flow.processname))
self.featuresset.add(flow.features)
self.payloadfilenameset.add(flow.payloadfilename)
self.dirty = True
def removeFlow(self, flow):
del self.flows[id(flow)]
del flow
self.dirty = True
def emptyFlows(self):
self.flows = {}
def __len__(self):
return len(self.flows)
def __iter__(self):
for flow in self.flows.values():
yield flow
def inOrder(self):
return sorted(self.flows.values())
def inProcessL4Order(self):
return sorted(self.flows.values(),
key=lambda x: (x.processname, x.l4, x.opendpi))
def load(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for loading"
fh = QFile(self.filename)
if not fh.open(QIODevice.ReadOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
magic = stream.readInt32()
if magic != MAGIC_NUMBER:
raise IOError, "unrecognized file type"
fileVersion = stream.readInt16()
if fileVersion != FILE_VERSION:
raise IOError, "unrecognized file type version"
self.flows = {}
while not stream.atEnd():
opendpi = QString()
l4 = QString()
processname = QString()
fivetuple = QString()
features = QString()
payload = QString()
stream >> opendpi >> l4 >> processname >> fivetuple >> features >>payload
#features = stream.readInt32()
flow = Flow(opendpi, l4, processname, fivetuple, features, payload)
self.flows[id(flow)] = flow
self.l4set.add(unicode(l4))
self.processnameset.add(unicode(processname))
self.featuresset.add(features)
self.payloadfilenameset.add(payload)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
def save(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for saving"
fh = QFile(self.filename)
if not fh.open(QIODevice.WriteOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
stream.writeInt32(MAGIC_NUMBER)
stream.writeInt16(FILE_VERSION)
stream.setVersion(QDataStream.Qt_4_1)
for flow in self.flows.values():
stream << flow.opendpi << flow.l4 << flow.processname \
<< flow.fivetuple << flow.features <<flow.payloadfilename
#stream.writeInt32(flow.features)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
class FlowTableModel(QAbstractTableModel):
def __init__(self, filename=QString()):
super(FlowTableModel, self).__init__()
self.filename = filename
self.dirty = False
self.flows = []
self.l4set = set()
self.processnameset = set()
self.featuresset = set()
self.payloadfilenameset = set()
def sortByName(self):
self.flows = sorted(self.flows)
self.reset()
def sortByCountryOwner(self):
self.flows = sorted(self.flows,
key=lambda x: (x.processname, x.l4, x.opendpi))
self.reset()
def flags(self, index):
if not index.isValid():
return Qt.ItemIsEnabled
return Qt.ItemFlags(
QAbstractTableModel.flags(self, index)|
Qt.ItemIsEditable)
def data(self, index, role=Qt.DisplayRole):
if (not index.isValid() or
not (0 <= index.row() < len(self.flows))):
return QVariant()
flow = self.flows[index.row()]
column = index.column()
if role == Qt.DisplayRole:
if column == OPENDPI:
return QVariant(flow.opendpi)
elif column == L4:
return QVariant(flow.l4)
elif column == PROCESSNAME:
return QVariant(flow.processname)
elif column == FIVETUPLE:
return QVariant(flow.fivetuple)
elif column == FEATURES:
return QVariant(flow.features )
elif column == PAYLOADFILENAME:
return QVariant(flow.payloadfilename )
elif role == Qt.TextAlignmentRole:
if column == FEATURES:
return QVariant( int(Qt.AlignRight|Qt.AlignVCenter) )
return QVariant( int(Qt.AlignLeft|Qt.AlignVCenter) )
elif role == Qt.TextColorRole and column == FEATURES:
return QVariant(QColor(Qt.red))
elif role == Qt.BackgroundColorRole:
if flow.processname in ("thunder.exe"):
return QVariant(QColor(250, 230, 250))
elif flow.processname in ("pplive.exe"):
return QVariant(QColor(250, 250, 230))
elif flow.processname in ("others",):
return QVariant(QColor(230, 250, 250))
else:
return QVariant(QColor(210, 230, 230))
return QVariant()
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role == Qt.TextAlignmentRole:
if orientation == Qt.Horizontal:
return QVariant(int(Qt.AlignLeft|Qt.AlignVCenter))
return QVariant(int(Qt.AlignRight|Qt.AlignVCenter))
if role != Qt.DisplayRole:
return QVariant()
if orientation == Qt.Horizontal:
if section == OPENDPI:
return QVariant("OpenDPI")
elif section == L4:
return QVariant("L4")
elif section == PROCESSNAME:
return QVariant("ProcessName")
elif section == FIVETUPLE:
return QVariant("Fivetuple")
elif section == FEATURES:
return QVariant("Features")
elif section == PAYLOADFILENAME:
return QVariant("Payloadfilename")
return QVariant(int(section + 1))
def rowCount(self, index=QModelIndex()):
return len(self.flows)
def columnCount(self, index=QModelIndex()):
return 5
def setData(self, index, value, role=Qt.EditRole):
if index.isValid() and 0 <= index.row() < len(self.flows):
flow = self.flows[index.row()]
column = index.column()
if column == OPENDPI:
flow.opendpi = value.toString()
elif column == L4:
flow.l4 = value.toString()
elif column == PROCESSNAME:
flow.processname = value.toString()
elif column == FIVETUPLE:
flow.fivetuple = value.toString()
elif column == FEATURES:
flow.features = value.toString()
elif column == PAYLOADFILENAME:
flow.payloadfilename = value.toString()
self.dirty = True
self.emit(SIGNAL("dataChanged(QModelIndex,QModelIndex)"),
index, index)
return True
return False
def insertRows(self, position, rows=1, index=QModelIndex()):
self.beginInsertRows(QModelIndex(), position, position + rows - 1)
for row in range(rows):
self.flows.insert(position + row,
Flow())
self.endInsertRows()
self.dirty = True
return True
def removeRows(self, position, rows=1, index=QModelIndex()):
self.beginRemoveRows(QModelIndex(), position, position + rows - 1)
self.flows = (self.flows[:position] +
self.flows[position + rows:])
self.endRemoveRows()
self.dirty = True
return True
def load(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for loading"
fh = QFile(self.filename)
if not fh.open(QIODevice.ReadOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
magic = stream.readInt32()
if magic != MAGIC_NUMBER:
raise IOError, "unrecognized file type"
fileVersion = stream.readInt16()
if fileVersion != FILE_VERSION:
raise IOError, "unrecognized file type version"
self.flows = []
while not stream.atEnd():
opendpi = QString()
l4 = QString()
processname = QString()
fivetuple = QString()
features = QString()
payload = QString()
stream >> opendpi >> l4 >> processname >> fivetuple >> features >> payload
#features = stream.readInt32()
self.flows.append(Flow(opendpi, l4, processname,fivetuple, features, payload))
self.l4set.add(unicode(l4))
self.processnameset.add(unicode(processname))
self.featuresset.add(features)
self.payloadfilenameset.add(payload)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
def save(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for saving"
fh = QFile(self.filename)
if not fh.open(QIODevice.WriteOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
stream.writeInt32(MAGIC_NUMBER)
stream.writeInt16(FILE_VERSION)
stream.setVersion(QDataStream.Qt_4_1)
for flow in self.flows:
stream << flow.opendpi << flow.l4 << flow.processname << flow.fivetuple << flow.features <<flow.payloadfilename
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
class FlowDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
super(FlowDelegate, self).__init__(parent)
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, index)
def sizeHint(self, option, index):
return QStyledItemDelegate.sizeHint(self, option, index)
def generateFakeFlows():
for opendpi, l4, processname, fivetuple, features, payloadfilename in (
( "unknown_pplive", "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/1.fldata"),
( "unknown_pplive", "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-223", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/2.fldata"),
( "unknown_pplive", "udp" , "pplive.exe" ,"1.1.1.1-111-udp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/3.fldata"),
( "pplive" , "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-224", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/4.fldata"),
( "pplive" ,"udp" , "thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/5.fldata"),
( "unknown" , "tcp" , "thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/6.fldata"),
( "unknown_thunder", "tcp" ,"thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/7.fldata"),
): yield Flow(opendpi, l4, processname, fivetuple, features, payloadfilename)
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
import os, sys
from PyQt4 import QtCore, QtGui
from configwizardui import Ui_ConfigWizard
import sigutils
GlobalConfig = {'ARFFFileName_s':'',
'InputPayloadPath_s':'',
'OutputSigDir_s':''
}
CONFIGFileName = 'config.yaml'
class StartConfigWizard(QtGui.QWizard):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.ui = Ui_ConfigWizard()
self.ui.setupUi(self)
self.user_dir = QtCore.QDir.homePath()
#self.DisplayInit()
self.connect(self.ui.pushButton_ARFF,QtCore.SIGNAL("clicked()"), self.loadARFF)
self.connect(self.ui.pushButton_PayloadPath,QtCore.SIGNAL("clicked()"), self.setPayloadPath)
self.connect(self.ui.pushButton_OutputPath,QtCore.SIGNAL("clicked()"), self.setOutputPath)
global GlobalConfig, CONFIGFileName
GlobalConfig = sigutils.LoadYAMLStream(CONFIGFileName)
self.ui.lineEdit_ARFF.setText(GlobalConfig['ARFFFileName_s'])
self.ui.lineEdit_PayloadPath.setText(GlobalConfig['InputPayloadPath_s'])
self.ui.lineEdit_OutputPath.setText(GlobalConfig['OutputSigDir_s'])
#self.connect(self.ui.pushButton_ARFF,QtCore.SIGNAL("valueChanged(QString)"), self.ForLineEditorsChanged)
def loadARFF(self):
#print("load ARFF...")
fd = QtGui.QFileDialog(self)
GlobalConfig['ARFFFileName_s'] = fd.getOpenFileName( self,"Open ARFF file", self.user_dir, "arff files (*.arff *.ARFF *.txt)" )
self.ui.lineEdit_ARFF.setText(GlobalConfig['ARFFFileName_s'])
def setPayloadPath(self):
fd = QtGui.QFileDialog(self)
GlobalConfig['InputPayloadPath_s'] = fd.getExistingDirectory(self, "Open Directory", \
self.user_dir,\
QtGui.QFileDialog.ShowDirsOnly|QtGui.QFileDialog.DontResolveSymlinks)
self.ui.lineEdit_PayloadPath.setText(GlobalConfig['InputPayloadPath_s'])
def setOutputPath(self):
fd = QtGui.QFileDialog(self)
GlobalConfig['OutputSigDir_s'] = fd.getExistingDirectory(self, "Open Directory", \
self.user_dir,\
QtGui.QFileDialog.ShowDirsOnly|QtGui.QFileDialog.DontResolveSymlinks)
self.ui.lineEdit_OutputPath.setText(GlobalConfig['OutputSigDir_s'])
def ForLineEditorsChanged(self):
GlobalConfig['ARFFFileName'] = self.ui.lineEdit_ARFF.displayText()
print(GlobalConfig['ARFFFileName'])
def accept(self):
GlobalConfig['ARFFFileName'] = self.ui.lineEdit_ARFF.displayText()
GlobalConfig['InputPayloadPath_s'] = self.ui.lineEdit_PayloadPath.displayText()
GlobalConfig['OutputSigDir_s'] = self.ui.lineEdit_OutputPath.displayText()
print( "%s\t%s\t%s" % \
(GlobalConfig['ARFFFileName'],\
GlobalConfig['InputPayloadPath_s'],\
GlobalConfig['OutputSigDir_s']) )
sigutils.SaveYAMLStream( GlobalConfig, CONFIGFileName)
self.hide()
super(StartConfigWizard, self).accept()
def main():
app = QtGui.QApplication(sys.argv)
myapp = StartConfigWizard()
myapp.show()
sys.exit(app.exec_())
print ("done!")
# Process command-line arguments.
if __name__ == '__main__':
main()
exit()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import platform
from PyQt4.QtCore import (QAbstractTableModel, QDataStream, QFile,
QIODevice, QModelIndex, QRegExp, QSize, QString, QVariant, Qt,
SIGNAL)
from PyQt4.QtGui import (QApplication, QColor, QComboBox, QLineEdit,
QSpinBox, QStyle, QStyledItemDelegate, QTextDocument, QTextEdit)
import richtextlineedit
OPENDPI, L4, PROCESSNAME, FIVETUPLE, FEATURES, PAYLOADFILENAME = range(6)
MAGIC_NUMBER = 0x570C4
FILE_VERSION = 1
class Flow(object):
def __init__(self, opendpi="unknown", l4="unknown_L4", processname="unknown_process", \
fivetuple="0.0.0.0", features="", payload=""):
self.opendpi = QString(opendpi)
self.l4 = QString(l4)
self.processname = QString(processname)
self.fivetuple = QString(fivetuple)
self.features = QString(features)
self.payloadfilename = QString(payload)
def __hash__(self):
return super(Flow, self).__hash__()
def __lt__(self, other):
r = QString.localeAwareCompare(self.fivetuple.toLower(),
other.fivetuple.toLower())
return True if r < 0 else False
def __eq__(self, other):
return 0 == QString.localeAwareCompare(self.fivetuple.toLower(),
other.fivetuple.toLower())
class FlowContainer(object):
def __init__(self, filename=QString()):
self.filename = QString(filename)
self.dirty = False
self.flows = {}
self.l4set = set()
self.processnameset = set()
self.featuresset = set()
self.payloadfilenameset = set()
def flow(self, identity):
return self.flows.get(identity)
def addFlow(self, flow):
self.flows[id(flow)] = flow
self.l4set.add(unicode(flow.l4))
self.processnameset.add(unicode(flow.processname))
self.featuresset.add(flow.features)
self.payloadfilenameset.add(flow.payloadfilename)
self.dirty = True
def removeFlow(self, flow):
del self.flows[id(flow)]
del flow
self.dirty = True
def emptyFlows(self):
self.flows = {}
def __len__(self):
return len(self.flows)
def __iter__(self):
for flow in self.flows.values():
yield flow
def inOrder(self):
return sorted(self.flows.values())
def inProcessL4Order(self):
return sorted(self.flows.values(),
key=lambda x: (x.processname, x.l4, x.opendpi))
def load(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for loading"
fh = QFile(self.filename)
if not fh.open(QIODevice.ReadOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
magic = stream.readInt32()
if magic != MAGIC_NUMBER:
raise IOError, "unrecognized file type"
fileVersion = stream.readInt16()
if fileVersion != FILE_VERSION:
raise IOError, "unrecognized file type version"
self.flows = {}
while not stream.atEnd():
opendpi = QString()
l4 = QString()
processname = QString()
fivetuple = QString()
features = QString()
payload = QString()
stream >> opendpi >> l4 >> processname >> fivetuple >> features >>payload
#features = stream.readInt32()
flow = Flow(opendpi, l4, processname, fivetuple, features, payload)
self.flows[id(flow)] = flow
self.l4set.add(unicode(l4))
self.processnameset.add(unicode(processname))
self.featuresset.add(features)
self.payloadfilenameset.add(payload)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
def save(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for saving"
fh = QFile(self.filename)
if not fh.open(QIODevice.WriteOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
stream.writeInt32(MAGIC_NUMBER)
stream.writeInt16(FILE_VERSION)
stream.setVersion(QDataStream.Qt_4_1)
for flow in self.flows.values():
stream << flow.opendpi << flow.l4 << flow.processname \
<< flow.fivetuple << flow.features <<flow.payloadfilename
#stream.writeInt32(flow.features)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
class FlowTableModel(QAbstractTableModel):
def __init__(self, filename=QString()):
super(FlowTableModel, self).__init__()
self.filename = filename
self.dirty = False
self.flows = []
self.l4set = set()
self.processnameset = set()
self.featuresset = set()
self.payloadfilenameset = set()
def sortByName(self):
self.flows = sorted(self.flows)
self.reset()
def sortByCountryOwner(self):
self.flows = sorted(self.flows,
key=lambda x: (x.processname, x.l4, x.opendpi))
self.reset()
def flags(self, index):
if not index.isValid():
return Qt.ItemIsEnabled
return Qt.ItemFlags(
QAbstractTableModel.flags(self, index)|
Qt.ItemIsEditable)
def data(self, index, role=Qt.DisplayRole):
if (not index.isValid() or
not (0 <= index.row() < len(self.flows))):
return QVariant()
flow = self.flows[index.row()]
column = index.column()
if role == Qt.DisplayRole:
if column == OPENDPI:
return QVariant(flow.opendpi)
elif column == L4:
return QVariant(flow.l4)
elif column == PROCESSNAME:
return QVariant(flow.processname)
elif column == FIVETUPLE:
return QVariant(flow.fivetuple)
elif column == FEATURES:
return QVariant(flow.features )
elif column == PAYLOADFILENAME:
return QVariant(flow.payloadfilename )
elif role == Qt.TextAlignmentRole:
if column == FEATURES:
return QVariant( int(Qt.AlignRight|Qt.AlignVCenter) )
return QVariant( int(Qt.AlignLeft|Qt.AlignVCenter) )
elif role == Qt.TextColorRole and column == FEATURES:
return QVariant(QColor(Qt.red))
elif role == Qt.BackgroundColorRole:
if flow.processname in ("thunder.exe"):
return QVariant(QColor(250, 230, 250))
elif flow.processname in ("pplive.exe"):
return QVariant(QColor(250, 250, 230))
elif flow.processname in ("others",):
return QVariant(QColor(230, 250, 250))
else:
return QVariant(QColor(210, 230, 230))
return QVariant()
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role == Qt.TextAlignmentRole:
if orientation == Qt.Horizontal:
return QVariant(int(Qt.AlignLeft|Qt.AlignVCenter))
return QVariant(int(Qt.AlignRight|Qt.AlignVCenter))
if role != Qt.DisplayRole:
return QVariant()
if orientation == Qt.Horizontal:
if section == OPENDPI:
return QVariant("OpenDPI")
elif section == L4:
return QVariant("L4")
elif section == PROCESSNAME:
return QVariant("ProcessName")
elif section == FIVETUPLE:
return QVariant("Fivetuple")
elif section == FEATURES:
return QVariant("Features")
elif section == PAYLOADFILENAME:
return QVariant("Payloadfilename")
return QVariant(int(section + 1))
def rowCount(self, index=QModelIndex()):
return len(self.flows)
def columnCount(self, index=QModelIndex()):
return 5
def setData(self, index, value, role=Qt.EditRole):
if index.isValid() and 0 <= index.row() < len(self.flows):
flow = self.flows[index.row()]
column = index.column()
if column == OPENDPI:
flow.opendpi = value.toString()
elif column == L4:
flow.l4 = value.toString()
elif column == PROCESSNAME:
flow.processname = value.toString()
elif column == FIVETUPLE:
flow.fivetuple = value.toString()
elif column == FEATURES:
flow.features = value.toString()
elif column == PAYLOADFILENAME:
flow.payloadfilename = value.toString()
self.dirty = True
self.emit(SIGNAL("dataChanged(QModelIndex,QModelIndex)"),
index, index)
return True
return False
def insertRows(self, position, rows=1, index=QModelIndex()):
self.beginInsertRows(QModelIndex(), position, position + rows - 1)
for row in range(rows):
self.flows.insert(position + row,
Flow())
self.endInsertRows()
self.dirty = True
return True
def removeRows(self, position, rows=1, index=QModelIndex()):
self.beginRemoveRows(QModelIndex(), position, position + rows - 1)
self.flows = (self.flows[:position] +
self.flows[position + rows:])
self.endRemoveRows()
self.dirty = True
return True
def load(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for loading"
fh = QFile(self.filename)
if not fh.open(QIODevice.ReadOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
magic = stream.readInt32()
if magic != MAGIC_NUMBER:
raise IOError, "unrecognized file type"
fileVersion = stream.readInt16()
if fileVersion != FILE_VERSION:
raise IOError, "unrecognized file type version"
self.flows = []
while not stream.atEnd():
opendpi = QString()
l4 = QString()
processname = QString()
fivetuple = QString()
features = QString()
payload = QString()
stream >> opendpi >> l4 >> processname >> fivetuple >> features >> payload
#features = stream.readInt32()
self.flows.append(Flow(opendpi, l4, processname,fivetuple, features, payload))
self.l4set.add(unicode(l4))
self.processnameset.add(unicode(processname))
self.featuresset.add(features)
self.payloadfilenameset.add(payload)
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
def save(self):
exception = None
fh = None
try:
if self.filename.isEmpty():
raise IOError, "no filename specified for saving"
fh = QFile(self.filename)
if not fh.open(QIODevice.WriteOnly):
raise IOError, unicode(fh.errorString())
stream = QDataStream(fh)
stream.writeInt32(MAGIC_NUMBER)
stream.writeInt16(FILE_VERSION)
stream.setVersion(QDataStream.Qt_4_1)
for flow in self.flows:
stream << flow.opendpi << flow.l4 << flow.processname << flow.fivetuple << flow.features <<flow.payloadfilename
self.dirty = False
except IOError, e:
exception = e
finally:
if fh is not None:
fh.close()
if exception is not None:
raise exception
class FlowDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
super(FlowDelegate, self).__init__(parent)
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, index)
def sizeHint(self, option, index):
return QStyledItemDelegate.sizeHint(self, option, index)
def generateFakeFlows():
for opendpi, l4, processname, fivetuple, features, payloadfilename in (
( "unknown_pplive", "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/1.fldata"),
( "unknown_pplive", "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-223", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/2.fldata"),
( "unknown_pplive", "udp" , "pplive.exe" ,"1.1.1.1-111-udp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/3.fldata"),
( "pplive" , "tcp" , "pplive.exe" ,"1.1.1.1-111-tcp-2.2.2.2-224", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/4.fldata"),
( "pplive" ,"udp" , "thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/5.fldata"),
( "unknown" , "tcp" , "thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/6.fldata"),
( "unknown_thunder", "tcp" ,"thunder.exe" , "1.1.1.1-111-tcp-2.2.2.2-222", "a,b,c,d,e,f,g", "/home/flykeysky/dataset/7.fldata"),
): yield Flow(opendpi, l4, processname, fivetuple, features, payloadfilename)
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
import os, sys
from PyQt4 import QtCore, QtGui
from configwizardui import Ui_ConfigWizard
import sigutils
GlobalConfig = {'ARFFFileName_s':'',
'InputPayloadPath_s':'',
'OutputSigDir_s':''
}
CONFIGFileName = 'config.yaml'
class StartConfigWizard(QtGui.QWizard):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.ui = Ui_ConfigWizard()
self.ui.setupUi(self)
self.user_dir = QtCore.QDir.homePath()
#self.DisplayInit()
self.connect(self.ui.pushButton_ARFF,QtCore.SIGNAL("clicked()"), self.loadARFF)
self.connect(self.ui.pushButton_PayloadPath,QtCore.SIGNAL("clicked()"), self.setPayloadPath)
self.connect(self.ui.pushButton_OutputPath,QtCore.SIGNAL("clicked()"), self.setOutputPath)
global GlobalConfig, CONFIGFileName
GlobalConfig = sigutils.LoadYAMLStream(CONFIGFileName)
self.ui.lineEdit_ARFF.setText(GlobalConfig['ARFFFileName_s'])
self.ui.lineEdit_PayloadPath.setText(GlobalConfig['InputPayloadPath_s'])
self.ui.lineEdit_OutputPath.setText(GlobalConfig['OutputSigDir_s'])
#self.connect(self.ui.pushButton_ARFF,QtCore.SIGNAL("valueChanged(QString)"), self.ForLineEditorsChanged)
def loadARFF(self):
#print("load ARFF...")
fd = QtGui.QFileDialog(self)
GlobalConfig['ARFFFileName_s'] = fd.getOpenFileName( self,"Open ARFF file", self.user_dir, "arff files (*.arff *.ARFF *.txt)" )
self.ui.lineEdit_ARFF.setText(GlobalConfig['ARFFFileName_s'])
def setPayloadPath(self):
fd = QtGui.QFileDialog(self)
GlobalConfig['InputPayloadPath_s'] = fd.getExistingDirectory(self, "Open Directory", \
self.user_dir,\
QtGui.QFileDialog.ShowDirsOnly|QtGui.QFileDialog.DontResolveSymlinks)
self.ui.lineEdit_PayloadPath.setText(GlobalConfig['InputPayloadPath_s'])
def setOutputPath(self):
fd = QtGui.QFileDialog(self)
GlobalConfig['OutputSigDir_s'] = fd.getExistingDirectory(self, "Open Directory", \
self.user_dir,\
QtGui.QFileDialog.ShowDirsOnly|QtGui.QFileDialog.DontResolveSymlinks)
self.ui.lineEdit_OutputPath.setText(GlobalConfig['OutputSigDir_s'])
def ForLineEditorsChanged(self):
GlobalConfig['ARFFFileName'] = self.ui.lineEdit_ARFF.displayText()
print(GlobalConfig['ARFFFileName'])
def accept(self):
GlobalConfig['ARFFFileName'] = self.ui.lineEdit_ARFF.displayText()
GlobalConfig['InputPayloadPath_s'] = self.ui.lineEdit_PayloadPath.displayText()
GlobalConfig['OutputSigDir_s'] = self.ui.lineEdit_OutputPath.displayText()
print( "%s\t%s\t%s" % \
(GlobalConfig['ARFFFileName'],\
GlobalConfig['InputPayloadPath_s'],\
GlobalConfig['OutputSigDir_s']) )
sigutils.SaveYAMLStream( GlobalConfig, CONFIGFileName)
self.hide()
super(StartConfigWizard, self).accept()
def main():
app = QtGui.QApplication(sys.argv)
myapp = StartConfigWizard()
myapp.show()
sys.exit(app.exec_())
print ("done!")
# Process command-line arguments.
if __name__ == '__main__':
main()
exit()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011 flykeysky<at>gmail.com. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import os, sys
from PyQt4.QtCore import (QChar, QFile, QDir, QString, QTimer, QVariant, Qt,
SIGNAL, QFileInfo)
from PyQt4.QtGui import (QApplication, QDialog, QHBoxLayout, QLabel,
QListWidget, QListWidgetItem, QMessageBox, QPushButton,
QSplitter, QTableWidget, QTableWidgetItem, QTreeWidget,
QTreeWidgetItem, QVBoxLayout, QWidget, QWizard, QFileDialog)
import flows
import configwizard
import sigutils
#from configwizardui import Ui_ConfigWizard
MAC = True
try:
from PyQt4.QtGui import qt_mac_set_native_menubar
except ImportError:
MAC = False
class MainForm(QDialog):
def __init__(self, parent=None):
super(MainForm, self).__init__(parent)
treeLabel = QLabel("Input Flows Tree")
self.treeWidget = QTreeWidget()
#self.treeWidget
self.treeWidget.setSelectionMode(self.treeWidget.ExtendedSelection)
self.treeWidget.setDragDropMode(self.treeWidget.InternalMove)
self.treeWidget.setDragEnabled(True)
self.treeWidget.setDropIndicatorShown(True)
treeLabel.setBuddy(self.treeWidget)
tableLabel = QLabel("Selected Flows Table")
self.tableWidget = QTableWidget()
self.tableWidget.setSelectionBehavior(QTableWidget.SelectRows)
tableLabel.setBuddy(self.tableWidget)
configurewizardButton = QPushButton("Run &Config")
extractButton = QPushButton("E&xtract flows")
generatorwizardButton = QPushButton("&Generator")
addFlowButton = QPushButton("&Add Flow")
removeFlowButton = QPushButton("&Remove Flow")
refreshFlowButton = QPushButton("Re&fresh")
quitButton = QPushButton("&Quit")
if not MAC:
addFlowButton.setFocusPolicy(Qt.NoFocus)
removeFlowButton.setFocusPolicy(Qt.NoFocus)
quitButton.setFocusPolicy(Qt.NoFocus)
configurewizardButton.setFocusPolicy(Qt.NoFocus)
generatorwizardButton.setFocusPolicy(Qt.NoFocus)
splitter = QSplitter(Qt.Horizontal)
#tree
vbox = QVBoxLayout()
vbox.addWidget(treeLabel)
vbox.addWidget(self.treeWidget)
widget = QWidget()
widget.setLayout(vbox)
splitter.addWidget(widget)
#table
vbox = QVBoxLayout()
vbox.addWidget(tableLabel)
vbox.addWidget(self.tableWidget)
widget = QWidget()
widget.setLayout(vbox)
splitter.addWidget(widget)
buttonLayout = QHBoxLayout()
buttonLayout.addWidget(configurewizardButton)
buttonLayout.addWidget(extractButton)
buttonLayout.addStretch()
buttonLayout.addWidget(addFlowButton)
buttonLayout.addStretch()
buttonLayout.addWidget(removeFlowButton)
buttonLayout.addWidget(refreshFlowButton)
buttonLayout.addStretch()
buttonLayout.addWidget(generatorwizardButton)
buttonLayout.addWidget(quitButton)
layout = QVBoxLayout()
layout.addWidget(splitter)
layout.addLayout(buttonLayout)
self.setLayout(layout)
#self.connect(self.tableWidget,SIGNAL("itemChanged(QTableWidgetItem*)"), self.tableItemChanged)
self.connect(configurewizardButton, SIGNAL("clicked()"), self.configurewiz)
self.connect(extractButton, SIGNAL("clicked()"), self.extractflow)
self.connect(addFlowButton, SIGNAL("clicked()"), self.addFlow)
self.connect(removeFlowButton, SIGNAL("clicked()"), self.removeFlow)
self.connect(refreshFlowButton, SIGNAL("clicked()"), self.refreshtable)
self.connect(generatorwizardButton, SIGNAL("clicked()"), self.generatePayloadFileListFile)
self.connect(quitButton, SIGNAL("clicked()"), self.accept)
self.flows = flows.FlowContainer(QString("flows.dat"))
self.selectedflows = flows.FlowContainer(QString("selectedflows.dat"))
self.setWindowTitle("Flows (dict)")
self.myGlobalConfig = {}
self.PayloadFullname_list = []
self.payloadfullnamelist_file = '/tmp/payloadfullnamelist'
self.Payload5tuple_list = []
QTimer.singleShot(0, self.initialLoad)
def initialLoad(self):
if not QFile.exists(self.flows.filename):
#for flow in flows.generateFakeFlows():
#self.flows.addFlow(flow)
self.flows.dirty = False
else:
try:
self.flows.load()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to load: {0}".format(e))
if not QFile.exists(self.selectedflows.filename):
#for flow in flows.generateFakeFlows():
# self.selectedflows.addFlow(flow)
self.selectedflows.dirty = False
else:
try:
self.selectedflows.load()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to load: {0}".format(e))
self.myGlobalConfig = sigutils.LoadYAMLStream(configwizard.CONFIGFileName)
self.populateTable()
self.tableWidget.sortItems(0)
self.populateTree()
def configurewiz(self):
myapp = configwizard.StartConfigWizard(self)
myapp.show()
def reject(self):
self.accept()
def accept(self):
if ((self.selectedflows.dirty or self.flows.dirty) and
QMessageBox.question(self, "Flows - Save?",
"Save unsaved changes?",
QMessageBox.Yes | QMessageBox.No) ==
QMessageBox.Yes):
try:
self.flows.save()
self.selectedflows.save()
except IOError, e:
QMessageBox.warning(self, "Flows - Error",
"Failed to save: {0}".format(e))
QDialog.accept(self)
def extractflow(self):
#load yaml file
#self.myGlobalConfig = sigutils.LoadYAMLStream(configwizard.CONFIGFileName)
print("extracting...")
print("%s\t%s\t%s" % (self.myGlobalConfig['ARFFFileName'], \
self.myGlobalConfig['InputPayloadPath_s'], \
self.myGlobalConfig['OutputSigDir_s']))
if (self.myGlobalConfig['ARFFFileName'] == '' or \
self.myGlobalConfig['InputPayloadPath_s'] == '' or \
self.myGlobalConfig['OutputSigDir_s'] == ''):
self.configurewiz()
self.FindnLoadPayloadFiles()
self.extractFromARFF()
def FindnLoadPayloadFiles(self):
m_PayloadFilesPath = self.myGlobalConfig['InputPayloadPath_s']
#m_payloadfullname_list = []
for (thisDir, subsHere, filesHere) in os.walk(str(m_PayloadFilesPath)):
for filename in filesHere:
(shortname, extension) = os.path.splitext(filename)
payloadfullname = os.path.join(thisDir, filename)
if(os.path.isfile(payloadfullname) and (extension == ".fldat")):
#print(payloadfullname)
self.PayloadFullname_list.append(payloadfullname)
self.Payload5tuple_list.append(shortname)
#self.PayloadFullname_list = m_payloadfullname_list
#print (self.Payload5tuple_list)
f_name = self.payloadfullnamelist_file
f = open(f_name, 'w')
for i in self.PayloadFullname_list:
#print(i)
f.write(i + "\n")
#sigutils.SaveYAMLStream(f_name,self.PayloadFullname_list)
f.close()
def extractFromARFF(self):
import re
attribute_prog = re.compile("^@ATTRIBUTE\ ")
data_prog = re.compile("^@DATA")
pARFFFile = open(self.myGlobalConfig['ARFFFileName'], 'r')
feature_data = []
all_index = {}
feature_count = 0
selectedfeature_names = ['max_fpktl', 'min_bpktl', 'std_bpktl', 'min_biat', 'sflow_fbytes', 'fpsh_cnt' ]
selectedfivetupe_names = ['srcip', 'srcport', 'proto', 'dstip', 'dstport']
selected_app_names = ['open_class'] #, 'app_class', 'cata_class']
foundData = False
flows_found_count = 0
flows_all_count = 0
flows_missed_count = 0
for line in pARFFFile.readlines():
attribute_re = attribute_prog.search(line)
if attribute_re:
o = line.split()
# to build a dict for 5-tuples, features and application info.
print (feature_count, o[1])
appendqueue_cmd = "all_index[\'%s\']= %d" % (str(o[1].strip()), feature_count)
appendqueue_code = compile(appendqueue_cmd, '', 'exec')
exec(appendqueue_code)
feature_count += 1
continue
if 0:#foundData == False: #for debug
for i in selectedfeature_names:
print(i)
if all_index.has_key(i):
print(i, all_index[i])
m = data_prog.match(line)
if m:
foundData = True
#print ("...")
if (foundData == True and (\
not line.isspace()) \
and (not re.match('^@', line)) \
and (not re.match('^%', line)) \
):
l = line.strip().split(',')
#print(l)
feature_data.append(l)
m_opendpi =''
m_l4 =''
m_processname =''
m_feature =''
m_payloadfilename =''
m_5tuple = ''
#print (l[ all_index['proto'] ])
flows_all_count += 1
if (l[ all_index['proto'] ] == '17' or l[ all_index['proto'] ] == '6'):
count = 0
for ii in selectedfivetupe_names:
if ii == 'proto':
if l[ all_index['proto'] ] == '17':
m_5tuple += 'UDP' + '-'
m_l4 = 'UDP'
else:#l[ all_index['proto'] ] == '6'
m_5tuple += 'TCP' + '-'
m_l4 = 'TCP'
elif (count < len(selectedfivetupe_names) - 1):
m_5tuple += l[all_index[ii]] + '-'
else:
m_5tuple += l[all_index[ii]]
count += 1
print ( m_5tuple, len(selectedfivetupe_names) )
#
if (m_5tuple in self.Payload5tuple_list):
flows_found_count += 1
print ("---------%d flow found-----------" % (flows_found_count))
m_payloadfilename = self.myGlobalConfig['InputPayloadPath_s'] +'/'+ m_5tuple + '.fldat'
print (m_payloadfilename)
#print('payloadfile is: %s/%s.fldat' \
# % (self.myGlobalConfig['InputPayloadPath_s'] , m_5tuple) )
count = 0
for ii in selectedfeature_names:
#print(ii, len(selectedfeature_names))
if (count < len(selectedfeature_names) -1):
m_feature += l[all_index[ii]] + ','
else:
m_feature += l[all_index[ii]]
count += 1
print (m_feature, len(selectedfeature_names) ) #<-- this the features
# extract opendpi
m_opendpi = l[all_index['open_class']]
print (m_opendpi)
m_opendpi_splited = m_opendpi.strip().split('_')
#print (m_opendpi_splited)
m_processname = m_opendpi_splited[-1]
print (m_processname)
flow = flows.Flow(m_opendpi, m_l4, m_processname, m_5tuple , m_feature, m_payloadfilename)
self.flows.addFlow(flow)
else:
print('no payload found.')
flows_missed_count +=1
print ("--------------------\n%s found flows\n%s missed flows\n%s flows in all" % \
(flows_found_count,flows_missed_count,flows_all_count))
pARFFFile.close()
self.populateTree()
def populateTable(self, selectedFlow=None):
selected = None
self.tableWidget.clear()
self.tableWidget.setSortingEnabled(True)
self.tableWidget.setRowCount(len(self.selectedflows))
headers = ["OpenDPI", "L4", "ProcessName", "FiveTuples", "FEATURES", "PayloadFileName"]
self.tableWidget.setColumnCount(len(headers))
self.tableWidget.setHorizontalHeaderLabels(headers)
for row, flow in enumerate(self.selectedflows):
item = QTableWidgetItem(flow.opendpi)
item.setData(Qt.UserRole, QVariant(long(id(flow))))
if selectedFlow is not None and selectedFlow == id(flow):
selected = item
self.tableWidget.setItem(row, flows.OPENDPI, item)
self.tableWidget.setItem(row, flows.L4,
QTableWidgetItem(flow.l4))
self.tableWidget.setItem(row, flows.PROCESSNAME,
QTableWidgetItem(flow.processname))
self.tableWidget.setItem(row, flows.FIVETUPLE,
QTableWidgetItem(flow.fivetuple))
self.tableWidget.setItem(row, flows.FEATURES,
QTableWidgetItem(flow.features))
self.tableWidget.setItem(row, flows.PAYLOADFILENAME,
QTableWidgetItem(flow.payloadfilename))
self.tableWidget.setSortingEnabled(True)
self.tableWidget.resizeColumnsToContents()
if selected is not None:
selected.setSelected(True)
self.tableWidget.setCurrentItem(selected)
def populateTree(self, selectedFlow=None):
selected = None
self.treeWidget.clear()
self.treeWidget.setColumnCount(3)
self.treeWidget.setHeaderLabels(["ProcessName/L4/OpenDPI", "FEATURES", "PayloadFileName"])
self.treeWidget.setItemsExpandable(True)
parentFromProcessName = {}
parentFromProcessNameL4 = {}
parentFromProcessNameL4OpenDPI = {}
for flow in self.flows.inProcessL4Order():
ancestor = parentFromProcessName.get(flow.processname)
if ancestor is None:
ancestor = QTreeWidgetItem(self.treeWidget, [flow.processname])
parentFromProcessName[flow.processname] = ancestor
processnameL4 = flow.processname + "/" + flow.l4
parent = parentFromProcessNameL4.get(processnameL4)
if parent is None:
parent = QTreeWidgetItem(ancestor, [flow.l4])
parentFromProcessNameL4[processnameL4] = parent
processnameL4OpenDPI = processnameL4 + "/" + flow.opendpi
subparent = parentFromProcessNameL4OpenDPI.get(processnameL4OpenDPI)
if subparent is None:
subparent = QTreeWidgetItem(parent, [flow.opendpi])
parentFromProcessNameL4OpenDPI[processnameL4OpenDPI] = subparent
item = QTreeWidgetItem(subparent, [flow.fivetuple, QString("%L1").arg(flow.features) , \
QString("%L1").arg(flow.payloadfilename)])
item.setTextAlignment(1, Qt.AlignRight | Qt.AlignVCenter)
if selectedFlow is not None and selectedFlow == id(flow):
selected = item
self.treeWidget.expandItem(subparent)
self.treeWidget.expandItem(parent)
self.treeWidget.expandItem(ancestor)
self.treeWidget.resizeColumnToContents(0)
self.treeWidget.resizeColumnToContents(1)
if selected is not None:
selected.setSelected(True)
self.treeWidget.setCurrentItem(selected)
def generatePayloadFileListFile(self):
#print("generating payloadfile:%s..."%( str( self.myGlobalConfig['OutputSigDir_s'] ) ))
fd = QFileDialog(self)
m_outputfilename = fd.getSaveFileName( self,"Output file to...", self.myGlobalConfig['OutputSigDir_s'], "" )
m_outputfile = open(m_outputfilename,'w')
for i in self.selectedflows:
print(i.payloadfilename)
m_outputfile.write("%s\n"%(i.payloadfilename) )
m_outputfile.close()
fi = QFileInfo(m_outputfilename)
out_path = fi.dir().absolutePath()
cmd_1 = ( "d2o -l %s -o %s/data -x %s/offsets" % (m_outputfilename, out_path, out_path) )
print(cmd_1)
os.system(cmd_1)
datafilename = ("%s/data" % (out_path) )
print(datafilename)
if ( not os.path.isfile(datafilename) ):
cmd_2 = ( "mksary -i %s.ary %s" % (datafilename,datafilename) )
#cmd_3 = ( "" % () )
def addFlow(self):
#flow = flows.Flow(" Unknown", " Unknown", " Unknown")
for i in self.treeWidget.selectedItems ():
if (i.childCount() > 0):
print ("TODO: skipping, instead of select all children")
else:
print ("%d\t%s\t%s%s" % (i.childCount(), i.text(0), i.text(1), i.text(2)))
print (i.parent().text(0), i.parent().parent().text(0), i.parent().parent().parent().text(0))
m_fivetuple = QString (i.text(0))
m_feature = QString(i.text(1))
m_payloadfilename = i.text(2)
m_opendpi = QString(i.parent().text(0))
m_l4 = QString(i.parent().parent().text(0))
m_processname = QString(i.parent().parent().parent().text(0))
flow = flows.Flow(m_opendpi, m_l4, m_processname, m_fivetuple , m_feature, m_payloadfilename)
flow.fivetuple = QString(i.text(0))
self.selectedflows.addFlow(flow)
self.populateTable(id(flow))
def refreshtable(self):
self.populateTable()
def currentTableFlow(self):
item = self.tableWidget.item(self.tableWidget.currentRow(), 0)
if item is None:
return None
return self.selectedflows.flow(item.data(Qt.UserRole).toLongLong()[0])
def removeFlow1(self):
flow = self.currentTableFlow()
if flow is None:
return
self.selectedflows.removeFlow(flow)
self.populateTable()
def removeFlow2(self):
flowitemlist = self.tableWidget.selectedItems()
#flow = self.currentTableFlow()
for item in flowitemlist:
if item is None:
return
fl = self.selectedflows.flow( item.data(Qt.UserRole).toLongLong()[0] )
self.selectedflows.removeFlow(fl)
def removeFlow(self):
self.removeFlow2()
self.populateTable()
def main():
app = QApplication(sys.argv)
form = MainForm()
form.show()
app.exec_()
# Process command-line arguments.
if __name__ == '__main__':
main()
exit()
| Python |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'configwizardui.ui'
#
# Created: Tue Jan 25 06:46:08 2011
# by: PyQt4 UI code generator 4.7.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_ConfigWizard(object):
def setupUi(self, ConfigWizard):
ConfigWizard.setObjectName("ConfigWizard")
ConfigWizard.resize(400, 300)
self.wizardPageProject = QtGui.QWizardPage()
self.wizardPageProject.setObjectName("wizardPageProject")
ConfigWizard.addPage(self.wizardPageProject)
self.wizardPageConfigPath = QtGui.QWizardPage()
self.wizardPageConfigPath.setObjectName("wizardPageConfigPath")
self.gridLayoutWidget_2 = QtGui.QWidget(self.wizardPageConfigPath)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(0, 0, 381, 241))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_ConfigPath = QtGui.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_ConfigPath.setSizeConstraint(QtGui.QLayout.SetMaximumSize)
self.gridLayout_ConfigPath.setHorizontalSpacing(2)
self.gridLayout_ConfigPath.setObjectName("gridLayout_ConfigPath")
self.pushButton_OutputPath = QtGui.QPushButton(self.gridLayoutWidget_2)
self.pushButton_OutputPath.setObjectName("pushButton_OutputPath")
self.gridLayout_ConfigPath.addWidget(self.pushButton_OutputPath, 2, 0, 1, 1)
self.lineEdit_ARFF = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit_ARFF.setObjectName("lineEdit_ARFF")
self.gridLayout_ConfigPath.addWidget(self.lineEdit_ARFF, 0, 1, 1, 1)
self.pushButton_PayloadPath = QtGui.QPushButton(self.gridLayoutWidget_2)
self.pushButton_PayloadPath.setObjectName("pushButton_PayloadPath")
self.gridLayout_ConfigPath.addWidget(self.pushButton_PayloadPath, 1, 0, 1, 1)
self.pushButton_ARFF = QtGui.QPushButton(self.gridLayoutWidget_2)
self.pushButton_ARFF.setObjectName("pushButton_ARFF")
self.gridLayout_ConfigPath.addWidget(self.pushButton_ARFF, 0, 0, 1, 1)
self.lineEdit_OutputPath = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit_OutputPath.setObjectName("lineEdit_OutputPath")
self.gridLayout_ConfigPath.addWidget(self.lineEdit_OutputPath, 2, 1, 1, 1)
self.lineEdit_PayloadPath = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit_PayloadPath.setObjectName("lineEdit_PayloadPath")
self.gridLayout_ConfigPath.addWidget(self.lineEdit_PayloadPath, 1, 1, 1, 1)
ConfigWizard.addPage(self.wizardPageConfigPath)
self.wizardPageFinal = QtGui.QWizardPage()
self.wizardPageFinal.setObjectName("wizardPageFinal")
self.gridLayoutWidget = QtGui.QWidget(self.wizardPageFinal)
self.gridLayoutWidget.setGeometry(QtCore.QRect(-1, -1, 381, 241))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout_2.setSizeConstraint(QtGui.QLayout.SetMaximumSize)
self.gridLayout_2.setHorizontalSpacing(2)
self.gridLayout_2.setObjectName("gridLayout_2")
ConfigWizard.addPage(self.wizardPageFinal)
self.retranslateUi(ConfigWizard)
QtCore.QMetaObject.connectSlotsByName(ConfigWizard)
def retranslateUi(self, ConfigWizard):
ConfigWizard.setWindowTitle(QtGui.QApplication.translate("ConfigWizard", "Wizard", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_OutputPath.setText(QtGui.QApplication.translate("ConfigWizard", "Output Path", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_PayloadPath.setText(QtGui.QApplication.translate("ConfigWizard", "Payload Path", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_ARFF.setText(QtGui.QApplication.translate("ConfigWizard", "ARFF file", None, QtGui.QApplication.UnicodeUTF8))
| Python |
PROGRAM_NAME = 'Flyback'
PROGRAM_VERSION = '0.6.5'
DEFAULT_PREFERENCES = {
'exclude_audio': True,
'exclude_video': True,
'exclude_trash': True,
'exclude_cache': True,
'exclude_vms': True,
'exclude_iso': True,
'exclude_filesize': 1,
}
if __name__=='__main__':
print PROGRAM_VERSION
| Python |
import gnome, gobject, gtk, gtk.glade, os, sys, threading
import backup
import create_backup_gui
import manage_backup_gui
import settings
import util
def echo(*args):
print 'echo', args
class GUI(object):
def close(self, a=None, b=None):
self.main_window.hide()
self.unregister_gui(self)
def open_backup(self,a=None,b=None,c=None):
treeview_backups_widget = self.xml.get_widget('treeview_backups')
model, entry = treeview_backups_widget.get_selection().get_selected()
if entry and model.get_value(entry, 2):
uuid = model.get_value(entry, 3)
host = model.get_value(entry, 4)
path = model.get_value(entry, 5)
if uuid and host and path:
print 'opening... drive:%s'%uuid, 'path:%s'%path
self.register_gui( manage_backup_gui.GUI(self.register_gui, self.unregister_gui, uuid, host, path) )
else:
print 'creating a new archive...'
self.register_gui( create_backup_gui.GUI(self.register_gui, self.unregister_gui) )
self.close()
def delete_backup(self,a=None,b=None,c=None):
treeview_backups_widget = self.xml.get_widget('treeview_backups')
model, entry = treeview_backups_widget.get_selection().get_selected()
if entry and model.get_value(entry, 2):
uuid = model.get_value(entry, 3)
host = model.get_value(entry, 4)
path = model.get_value(entry, 5)
if uuid and host and path:
title = 'Delete Backup?'
s = "Permanently delete the following backup repository?\n"
s += "<b>Drive:</b> %s:%s\n<b>Source:</b> <i>%s</i>:%s\n" % (util.pango_escape(uuid), util.pango_escape(backup.get_mount_point_for_uuid(uuid)), util.pango_escape(host), util.pango_escape(path), )
s += '\n<b>This action cannot be undone!</b>'
md = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_WARNING, gtk.BUTTONS_YES_NO, util.pango_escape(title))
md.format_secondary_markup(s)
if gtk.RESPONSE_YES==md.run():
print 'deleting',uuid,host,path
gui = self
class T(threading.Thread):
def run(self):
backup.delete_backup(uuid, host, path)
gtk.gdk.threads_enter()
gui.refresh_device_list()
gtk.gdk.threads_leave()
T().start()
md.destroy()
def update_buttons(self,a=None):
model, entry = a.get_selection().get_selected()
available = entry and model.get_value(entry, 2)
if available:
self.xml.get_widget('button_open').set_sensitive(True)
self.xml.get_widget('button_delete').set_sensitive(True)
else:
self.xml.get_widget('button_open').set_sensitive(False)
self.xml.get_widget('button_delete').set_sensitive(False)
def refresh_device_list(self):
treeview_backups_model = self.xml.get_widget('treeview_backups').get_model()
treeview_backups_model.clear()
known_backups = backup.get_known_backups()
for t in known_backups:
uuid = t['uuid']
paths = backup.get_dev_paths_for_uuid(t['uuid'])
drive_name = 'UUID: '+ t['uuid']
for path in paths:
if 'disk/by-id' in path:
drive_name = path[path.index('disk/by-id')+11:]
free_space = util.humanize_bytes(backup.get_free_space(t['uuid']))
drive_name = backup.get_mount_point_for_uuid(t['uuid']) + ' (%s free)' % free_space
s = "<b>Drive:</b> %s\n<b>Source:</b> <i>%s</i>:%s\n" % (util.pango_escape(drive_name), util.pango_escape(t['host']), util.pango_escape(t['path']), )
if backup.is_dev_present(t['uuid']) and backup.get_hostname()==t['host']:
s += "<b>Status:</b> Drive is ready for backups"
else:
if backup.is_dev_present(t['uuid']) and backup.get_hostname()!=t['host']:
s += "<b>Status:</b> Backup available for export only (was created on another computer)"
else:
s += "<b>Status:</b> Drive is unavailable (please attach)"
if backup.get_device_type(uuid)=='gvfs':
icon = self.main_window.render_icon(gtk.STOCK_NETWORK, gtk.ICON_SIZE_DIALOG)
elif backup.get_device_type(uuid)=='local':
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_DIALOG)
else:
icon = self.main_window.render_icon(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_DIALOG)
treeview_backups_model.append( (icon, s, backup.is_dev_present(t['uuid']), t['uuid'], t['host'], t['path']) )
if known_backups:
treeview_backups_model.append( (self.main_window.render_icon(gtk.STOCK_ADD, gtk.ICON_SIZE_DIALOG), 'Double-click here to create a new backup...', True, None, None, None) )
else:
treeview_backups_model.append( (self.main_window.render_icon(gtk.STOCK_ADD, gtk.ICON_SIZE_DIALOG), 'No existing backups found.\nDouble-click here to create a new backup...', True, None, None, None) )
def __init__(self, register_gui, unregister_gui):
self.register_gui = register_gui
self.unregister_gui = unregister_gui
self.xml = gtk.glade.XML( os.path.join( util.RUN_FROM_DIR, 'glade', 'select_backup.glade' ) )
self.main_window = self.xml.get_widget('select_backup_gui')
self.main_window.connect("delete-event", self.close )
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_BUTTON)
self.main_window.set_icon(icon)
self.main_window.set_title('%s v%s - Select Backup' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION))
# buttons
self.xml.get_widget('button_cancel').connect('clicked', self.close)
self.xml.get_widget('button_open').connect('clicked', self.open_backup)
self.xml.get_widget('button_delete').connect('clicked', self.delete_backup)
# setup list
treeview_backups_model = gtk.ListStore( gtk.gdk.Pixbuf, str, bool, str, str, str )
treeview_backups_widget = self.xml.get_widget('treeview_backups')
renderer = gtk.CellRendererPixbuf()
renderer.set_property('xpad', 4)
renderer.set_property('ypad', 4)
treeview_backups_widget.append_column( gtk.TreeViewColumn('', renderer, pixbuf=0) )
renderer = gtk.CellRendererText()
renderer.set_property('xpad', 16)
renderer.set_property('ypad', 16)
treeview_backups_widget.append_column( gtk.TreeViewColumn('', renderer, markup=1) )
treeview_backups_widget.set_headers_visible(False)
treeview_backups_widget.set_model(treeview_backups_model)
treeview_backups_widget.connect( 'row-activated', self.open_backup )
treeview_backups_widget.connect( 'cursor-changed', self.update_buttons )
treeview_backups_widget.connect( 'move-cursor', self.update_buttons )
util.register_device_added_removed_callback(self.refresh_device_list)
self.refresh_device_list()
self.main_window.show()
| Python |
import datetime, gnome, gobject, gtk, gtk.glade, os, sys, tempfile, threading, time
import backup
import settings
import util
def echo(*args):
print 'echo', args
class GUI(object):
def close(self, a=None, b=None):
self.main_window.hide()
self.unregister_gui(self)
def update_revisions(self):
revisions = backup.get_revisions(self.uuid, self.host, self.path)
treeview_revisions_widget = self.xml.get_widget('treeview_revisions')
treeview_revisions_model = treeview_revisions_widget.get_model()
treeview_revisions_model.clear()
for rev in revisions:
s = 'Captured on: %s\nBy: <i>%s</i>' % ( util.pango_escape(rev['date']), util.pango_escape(rev['author']) )
if rev['verified']:
s += '\nVerified on: %s' % util.pango_escape(rev['verified']) #.strftime('%')
treeview_revisions_model.append((s,rev['commit']))
def update_files(self,a=None):
treeview_files_model = self.xml.get_widget('treeview_files').get_model()
treeview_files_model.clear()
treeview_files_model.append( (util.pango_escape('loading files... (please wait)'),) )
model, entry = a.get_selection().get_selected()
if not entry:
treeview_files_model.clear()
return
self.xml.get_widget('toolbutton_export').set_sensitive( True )
self.xml.get_widget('toolbutton_explore').set_sensitive( True )
rev = entry and model.get_value(entry, 1)
icon = self.main_window.render_icon(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('loading files for rev: '+self.path), datetime.datetime.now(), '' ) )
gui = self
class T(threading.Thread):
def run(self):
if rev not in gui.rev_files_map:
gui.rev_files_map[rev] = backup.get_files_for_revision(gui.uuid, gui.host, gui.path, rev)
gtk.gdk.threads_enter()
if rev==gui.get_selected_revision():
treeview_files_model.clear()
for fn in gui.rev_files_map[rev]:
treeview_files_model.append( (util.pango_escape(fn),) )
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
def get_selected_revision(self):
model, entry = self.xml.get_widget('treeview_revisions').get_selection().get_selected()
if not entry: return
rev = entry and model.get_value(entry, 1)
return rev
def open_preferences(self):
import manage_backup_preferences_gui
self.register_gui( manage_backup_preferences_gui.GUI(self.register_gui, self.unregister_gui, self.uuid, self.host, self.path) )
def start_backup(self):
icon = self.main_window.render_icon(gtk.STOCK_SAVE, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('backing up: '+self.path), datetime.datetime.now(), '' ) )
gui = self
class T(threading.Thread):
def run(self):
backup.backup(gui.uuid, gui.host, gui.path)
gtk.gdk.threads_enter()
gui.update_revisions()
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
def start_export(self):
dialog = gtk.FileChooserDialog(title='Select folder to save archive to...', parent=None, action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK), backend=None)
response = dialog.run()
if response == gtk.RESPONSE_OK:
target_dir = dialog.get_filename()
rev = self.get_selected_revision()
icon = self.main_window.render_icon(gtk.STOCK_FLOPPY, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('exporting selected revision to: '+target_dir), datetime.datetime.now(), '' ) )
gui = self
class T(threading.Thread):
def run(self):
fn = backup.export_revision( gui.uuid, gui.host, gui.path, rev, target_dir )
util.open_file(fn)
gtk.gdk.threads_enter()
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
elif response == gtk.RESPONSE_CANCEL:
pass
dialog.destroy()
def start_explore(self):
target_dir = tmp = tempfile.mkdtemp(suffix='_flyback')
rev = self.get_selected_revision()
icon = self.main_window.render_icon(gtk.STOCK_DIRECTORY, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('preparing folder for exploration: '+target_dir), datetime.datetime.now(), '' ) )
gui = self
class T(threading.Thread):
def run(self):
fn = backup.export_revision( gui.uuid, gui.host, gui.path, rev, target_dir )
os.chdir(target_dir)
os.system('tar -zxvf "%s"' % fn)
os.remove(fn)
os.chdir(util.RUN_FROM_DIR)
util.open_file(target_dir)
gtk.gdk.threads_enter()
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
def start_verify(self):
rev = self.get_selected_revision()
icon = self.main_window.render_icon(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('verifying revision: '+rev), datetime.datetime.now(), '' ) )
gui = self
class T(threading.Thread):
def run(self):
fn = backup.verify_revision( gui.uuid, gui.host, gui.path, rev )
gtk.gdk.threads_enter()
gui.update_revisions()
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
def start_status(self):
icon = self.main_window.render_icon(gtk.STOCK_FIND, gtk.ICON_SIZE_MENU)
running_tasks_model = self.xml.get_widget('running_tasks').get_model()
i = running_tasks_model.append( ( icon, util.pango_escape('retrieving folder status since last backup...'), datetime.datetime.now(), '' ) )
import backup_status_gui
gui2 = backup_status_gui.GUI(self.register_gui, self.unregister_gui, self.uuid, self.host, self.path)
self.register_gui( gui2 )
gui = self
class T(threading.Thread):
def run(self):
added, modified, deleted = backup.get_status( gui.uuid, gui.host, gui.path )
gtk.gdk.threads_enter()
gui2.set_files(added, modified, deleted)
running_tasks_model.remove(i)
gtk.gdk.threads_leave()
T().start()
def __init__(self, register_gui, unregister_gui, uuid, host, path):
self.register_gui = register_gui
self.unregister_gui = unregister_gui
self.uuid = uuid
self.host = host
self.path = path
self.rev_files_map = {}
self.xml = gtk.glade.XML( os.path.join( util.RUN_FROM_DIR, 'glade', 'manage_backup.glade' ) )
self.main_window = self.xml.get_widget('window')
self.main_window.connect("delete-event", self.close )
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_BUTTON)
self.main_window.set_icon(icon)
self.xml.get_widget('entry_drive_name').set_text( backup.get_drive_name(self.uuid) )
self.xml.get_widget('entry_path').set_text( self.host +':'+ self.path )
self.main_window.set_title('%s v%s - Manage Backup' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION))
# toolbar
self.xml.get_widget('toolbutton_backup').set_sensitive( backup.test_backup_assertions(self.uuid, self.host, self.path) )
self.xml.get_widget('toolbutton_backup').connect('clicked', lambda x: self.start_backup() )
self.xml.get_widget('toolbutton_status').set_sensitive( backup.test_backup_assertions(self.uuid, self.host, self.path) )
self.xml.get_widget('toolbutton_status').connect('clicked', lambda x: self.start_status() )
self.xml.get_widget('toolbutton_export').connect('clicked', lambda x: self.start_export() )
self.xml.get_widget('toolbutton_explore').connect('clicked', lambda x: self.start_explore() )
self.xml.get_widget('toolbutton_verify').connect('clicked', lambda x: self.start_verify() )
self.xml.get_widget('toolbutton_preferences').connect('clicked', lambda x: self.open_preferences() )
# revision list
treeview_revisions_model = gtk.ListStore( str, str )
treeview_revisions_widget = self.xml.get_widget('treeview_revisions')
renderer = gtk.CellRendererText()
treeview_revisions_widget.append_column( gtk.TreeViewColumn('History', renderer, markup=0) )
treeview_revisions_widget.set_model(treeview_revisions_model)
treeview_revisions_widget.connect( 'cursor-changed', self.update_files )
treeview_revisions_widget.set_property('rules-hint', True)
self.update_revisions()
# file list
treeview_files_widget = self.xml.get_widget('treeview_files')
treeview_files_model = gtk.ListStore( str )
renderer = gtk.CellRendererText()
renderer.set_property('font','monospace')
treeview_files_widget.append_column( gtk.TreeViewColumn('Files', renderer, markup=0) )
treeview_files_widget.set_model(treeview_files_model)
treeview_files_widget.set_property('rules-hint', True)
treeview_files_model.append( (util.pango_escape('please select a revision to view... (on the left)'),) )
# task list
running_tasks_widget = self.xml.get_widget('running_tasks')
running_tasks_model = gtk.ListStore( gtk.gdk.Pixbuf, str, gobject.TYPE_PYOBJECT, str )
renderer = gtk.CellRendererPixbuf()
renderer.set_property('xpad', 4)
renderer.set_property('ypad', 4)
running_tasks_widget.append_column( gtk.TreeViewColumn('', renderer, pixbuf=0) )
renderer = gtk.CellRendererText()
running_tasks_widget.append_column( gtk.TreeViewColumn('', renderer, markup=1) )
renderer = gtk.CellRendererText()
running_tasks_widget.append_column( gtk.TreeViewColumn('', renderer, markup=3) )
running_tasks_widget.set_model(running_tasks_model)
running_tasks_widget.set_headers_visible(False)
running_tasks_widget.set_property('rules-hint', True)
class T(threading.Thread):
def run(self):
while True:
tasks_running = False
gtk.gdk.threads_enter()
for x in running_tasks_model:
x[3] = util.humanize_time( datetime.datetime.now() - x[2] )
gtk.gdk.threads_leave()
if tasks_running: time.sleep(1)
else: time.sleep(10)
running_tasks_thread = T()
running_tasks_thread.daemon = True
running_tasks_thread.start()
self.main_window.show()
# if no revisions exist, prompt user to run backup
if not backup.get_revisions(self.uuid, self.host, self.path):
s = 'Welcome to Flyback!'
md = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE, s)
md.format_secondary_markup('This is a brand new (and currently empty) backup repository. To fill it with data, please click the "backup" button in the upper-left corner.')
md.run()
md.destroy()
| Python |
from __future__ import division
import datetime, os, sys, threading, time
# fix for python <2.6
if not hasattr(threading.Thread, "is_alive"):
threading.Thread.is_alive = threading.Thread.isAlive
RUN_FROM_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def pango_escape(message):
if message is None:
return 'None'
if not isinstance(message, basestring):
message = str(message)
message = "&".join(message.split("&"))
message = "<".join(message.split("<"))
message = ">".join(message.split(">"))
return message
def open_file(fn):
import os
os.system( 'gnome-open "%s"' % fn )
def humanize_bytes(bytes):
if bytes < 0:
return 'unknown'
if bytes < 1024:
return '%dB' % bytes
if bytes < 1024*1024:
return '%.1fKB' % (bytes/1024)
if bytes < 1024*1024*1024:
return '%.1fMB' % (bytes/1024/1024)
if bytes < 1024*1024*1024*1024:
return '%.1fGB' % (bytes/1024/1024/1024)
return '%.1fTB' % (bytes/1024/1024/1024/1024)
def humanize_time(td):
seconds = int(td.seconds)
if seconds < 60:
return '%is' % seconds
if seconds < 60*60:
return '%im %is' % (seconds//60, seconds%60)
if seconds < 60*60*24:
return '%ih %.1fm' % (seconds//60//60, seconds/60)
return '%id %.1fh' % (seconds//60//60//24, seconds/60/60)
class DeviceMonitorThread(threading.Thread):
def run(self):
import gtk
print 'starting dbus-monitor...'
self.add_callbacks = []
self.remove_callbacks = []
f = os.popen('dbus-monitor --system "interface=org.freedesktop.Hal.Manager"')
while True:
line = f.readline()
#print line
if 'member=DeviceRemoved' in line:
time.sleep(.5)
print 'device removed'
for callback in self.remove_callbacks:
gtk.gdk.threads_enter()
callback()
gtk.gdk.threads_leave()
if 'member=DeviceAdded' in line:
time.sleep(.5)
print 'device added'
for callback in self.add_callbacks:
gtk.gdk.threads_enter()
callback()
gtk.gdk.threads_leave()
device_monitor_thread = DeviceMonitorThread()
device_monitor_thread.daemon = True
def register_device_added_removed_callback(callback):
if not device_monitor_thread.is_alive():
device_monitor_thread.start()
time.sleep(.5)
device_monitor_thread.add_callbacks.append(callback)
device_monitor_thread.remove_callbacks.append(callback)
| Python |
import gnome, gobject, gtk, gtk.glade, os, sys, tempfile, threading
import backup
import settings
import util
class GUI(object):
def close(self, a=None, b=None):
self.main_window.hide()
self.unregister_gui(self)
def save(self, a=None):
preferences = {
'exclude_audio': self.xml.get_widget('checkbutton_exclude_audio').get_active(),
'exclude_video': self.xml.get_widget('checkbutton_exclude_video').get_active(),
'exclude_trash': self.xml.get_widget('checkbutton_exclude_trash').get_active(),
'exclude_cache': self.xml.get_widget('checkbutton_exclude_cache').get_active(),
'exclude_vms': self.xml.get_widget('checkbutton_exclude_vms').get_active(),
'exclude_iso': self.xml.get_widget('checkbutton_exclude_iso').get_active(),
}
if self.xml.get_widget('checkbutton_exclude_filesize').get_active():
preferences['exclude_filesize'] = self.xml.get_widget('spinbutton_exclude_filesize_value').get_value()
else:
preferences['exclude_filesize'] = None
backup.save_preferences(self.uuid, self.host, self.path, preferences)
self.close()
def __init__(self, register_gui, unregister_gui, uuid, host, path):
self.register_gui = register_gui
self.unregister_gui = unregister_gui
self.uuid = uuid
self.host = host
self.path = path
self.xml = gtk.glade.XML( os.path.join( util.RUN_FROM_DIR, 'glade', 'manage_backup_preferences.glade' ) )
self.main_window = self.xml.get_widget('dialog')
self.xml.get_widget('button_cancel').connect('clicked', self.close)
self.xml.get_widget('button_ok').connect('clicked', self.save)
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_BUTTON)
self.main_window.set_icon(icon)
self.main_window.set_title('%s v%s - Backup Preferences' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION))
self.preferences = backup.get_preferences(self.uuid, self.host, self.path)
print self.preferences
self.xml.get_widget('checkbutton_exclude_audio').set_active(self.preferences.get('exclude_audio'))
self.xml.get_widget('checkbutton_exclude_video').set_active(self.preferences.get('exclude_video'))
self.xml.get_widget('checkbutton_exclude_trash').set_active(self.preferences.get('exclude_trash'))
self.xml.get_widget('checkbutton_exclude_cache').set_active(self.preferences.get('exclude_cache'))
self.xml.get_widget('checkbutton_exclude_vms').set_active(self.preferences.get('exclude_vms'))
self.xml.get_widget('checkbutton_exclude_iso').set_active(self.preferences.get('exclude_iso'))
self.xml.get_widget('checkbutton_exclude_filesize').set_active(bool(self.preferences.get('exclude_filesize')))
if self.preferences.get('exclude_filesize'):
self.xml.get_widget('spinbutton_exclude_filesize_value').set_value(self.preferences.get('exclude_filesize'))
else:
self.xml.get_widget('spinbutton_exclude_filesize_value').set_value(0)
self.main_window.show()
| Python |
import gnome, gobject, gtk, gtk.glade, os, sys
import backup
import manage_backup_gui
import settings
import util
print util.RUN_FROM_DIR
def echo(*args):
print 'echo', args
class GUI(object):
def close(self, a=None, b=None):
self.main_window.hide()
self.unregister_gui(self)
def init_backup(self,a=None,b=None,c=None):
treeview_backups_widget = self.xml.get_widget('treeview_backups')
model, entry = treeview_backups_widget.get_selection().get_selected()
if entry:
uuid = model.get_value(entry, 3)
host = backup.get_hostname()
path = self.xml.get_widget('filechooserbutton').get_current_folder()
print 'opening... drive:%s'%uuid, 'host:%s'%host, 'path:%s'%path
backup.init_backup(uuid, host, path)
self.register_gui( manage_backup_gui.GUI(self.register_gui, self.unregister_gui, uuid, host, path) )
self.close()
else:
s = 'No Drive Selected'
md = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, s)
md.format_secondary_markup('You must select a drive from the list...')
md.run()
md.destroy()
def refresh_device_list(self):
treeview_backups_model = self.xml.get_widget('treeview_backups').get_model()
treeview_backups_model.clear()
writable_devices = backup.get_writable_devices()
for uuid in writable_devices:
path = backup.get_mount_point_for_uuid(uuid)
if backup.get_device_type(uuid)=='gvfs':
icon = self.main_window.render_icon(gtk.STOCK_NETWORK, gtk.ICON_SIZE_DIALOG)
elif backup.get_device_type(uuid)=='local':
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_DIALOG)
else:
icon = self.main_window.render_icon(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_DIALOG)
free_space = util.humanize_bytes(backup.get_free_space(uuid))
s = "<b>Drive:</b> %s\n<b>Mount Point:</b> %s\n<b>Free Space:</b> %s" % (util.pango_escape(uuid), util.pango_escape(path), util.pango_escape(free_space))
treeview_backups_model.append( (icon, s, backup.is_dev_present(uuid), uuid) )
if not writable_devices:
icon = self.main_window.render_icon(gtk.STOCK_INFO, gtk.ICON_SIZE_DIALOG)
s = 'In order to create a backup, Flyback needs a hard drive\nother than the one your computer boots from.\n(preferably external and removable) Please plug one\ninto a free USB or eSATA port...'
treeview_backups_model.append( (icon, s, False, None) )
self.xml.get_widget('button_new').set_sensitive(False)
else:
self.xml.get_widget('button_new').set_sensitive(True)
def __init__(self, register_gui, unregister_gui):
print util.RUN_FROM_DIR
self.register_gui = register_gui
self.unregister_gui = unregister_gui
self.xml = gtk.glade.XML( os.path.join( util.RUN_FROM_DIR, 'glade', 'create_backup.glade' ) )
self.main_window = self.xml.get_widget('window')
self.main_window.connect("delete-event", self.close )
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_BUTTON)
self.main_window.set_icon(icon)
self.main_window.set_title('%s v%s - Create Backup' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION))
# buttons
self.xml.get_widget('button_cancel').connect('clicked', self.close)
self.xml.get_widget('button_new').connect('clicked', self.init_backup)
# setup list
treeview_backups_model = gtk.ListStore( gtk.gdk.Pixbuf, str, bool, str )
treeview_backups_widget = self.xml.get_widget('treeview_backups')
renderer = gtk.CellRendererPixbuf()
renderer.set_property('xpad', 4)
renderer.set_property('ypad', 4)
treeview_backups_widget.append_column( gtk.TreeViewColumn('', renderer, pixbuf=0) )
renderer = gtk.CellRendererText()
renderer.set_property('xpad', 16)
renderer.set_property('ypad', 16)
treeview_backups_widget.append_column( gtk.TreeViewColumn('', renderer, markup=1) )
treeview_backups_widget.set_headers_visible(False)
treeview_backups_widget.set_model(treeview_backups_model)
util.register_device_added_removed_callback(self.refresh_device_list)
self.refresh_device_list()
self.main_window.show()
| Python |
import datetime, os, pickle, sys, tempfile, traceback
import uuid as uuidlib
import settings
import util
UUID_GVFS = uuidlib.uuid5(uuidlib.NAMESPACE_DNS, 'gvfs.flyback.org')
def get_known_backups():
backups = []
for uuid in get_all_devices():
path = get_mount_point_for_uuid(uuid)
if path:
fbdbs = [ x for x in os.listdir(path) if x.startswith('.flybackdb') ]
for fbdb in fbdbs:
try:
f = open( os.path.join(path, fbdb, 'flyback_properties.pickle'), 'rb' )
o = pickle.load(f)
f.close()
backups.append(o)
print 'discovered backup:', uuid, path
except:
print 'failed to read:', os.path.join(path, fbdb, 'flyback_properties.pickle')
return backups
def is_dev_present(uuid):
# handle gfvs
for x,y in get_gvfs_devices_and_paths():
if uuid==x:
return True
# handle local devices
return os.path.exists( os.path.join( '/dev/disk/by-uuid/', uuid ) )
def get_device_type(uuid):
# handle gfvs
for x,y in get_gvfs_devices_and_paths():
if uuid==x:
return 'gvfs'
# handle local devices
if os.path.exists( os.path.join( '/dev/disk/by-uuid/', uuid ) ):
return 'local'
return None
def get_hostname():
import socket
return socket.gethostname()
def get_gvfs_devices():
return [ x[0] for x in get_gvfs_devices_and_paths() ]
def get_gvfs_devices_and_paths():
l = []
gvfs_dir = os.path.join( os.path.expanduser('~'), '.gvfs')
if os.path.exists(gvfs_dir):
for x in os.listdir(gvfs_dir):
mount_point = os.path.join( gvfs_dir, x )
uuid = str(uuidlib.uuid5(UUID_GVFS, mount_point))
l.append( (uuid, mount_point) )
return l
def get_local_devices():
devices = [ os.path.basename(x) for x in os.listdir('/dev/disk/by-uuid/') ]
return devices
def get_all_devices():
return get_local_devices() + get_gvfs_devices()
def get_writable_devices():
writable_uuids = []
for uuid in get_all_devices():
path = get_mount_point_for_uuid(uuid)
if path:
try:
fn = os.path.join(path,'.flyback_write_test.txt')
f = open(fn, 'w')
f.write('delete me!')
f.close()
os.remove(fn)
writable_uuids.append(uuid)
except:
print 'could not write to:', path
return writable_uuids
def test_backup_assertions(uuid, host, path, test_exists=True):
if not is_dev_present(uuid):
print 'not is_dev_present("%s")' % uuid
return False
if not get_hostname()==host:
print 'get_hostname()!="%s"' % host
return False
if not os.path.exists(path):
print 'not os.path.exists("%s")' % path
return False
if test_exists:
if not os.path.exists( get_git_dir(uuid, host, path) ):
print 'not os.path.exists("%s")' % get_git_dir(uuid, host, path)
return False
return True
def get_dev_paths_for_uuid(uuid):
dev_path = os.path.join( '/dev/disk/by-uuid/', uuid )
f = os.popen('udevadm info -q all -n "%s"' % dev_path)
s = f.read()
f.close()
dev_paths = set()
for line in s.split('\n'):
if line.startswith('E: DEVNAME='):
dev_paths.add( line[line.index('=')+1:].strip() )
if line.startswith('E: DEVLINKS='):
for path in line[line.index('=')+1:].strip().split():
dev_paths.add(path)
return dev_paths
def get_mount_point_for_uuid(uuid):
# handle gfvs
for x,y in get_gvfs_devices_and_paths():
if uuid==x:
return y
# handle local devices
dev_paths = get_dev_paths_for_uuid(uuid)
f = os.popen('mount')
s = f.read()
f.close()
for line in s.split('\n'):
x = line.strip().split(' ')
if x:
dev_path = x[0]
if dev_path in dev_paths:
mount_path = ' '.join(x[2:x.index('type')])
return mount_path
def get_drive_name(uuid):
paths = get_dev_paths_for_uuid(uuid)
drive_name = 'UUID: '+ uuid
for path in paths:
if 'disk/by-id' in path:
drive_name = path[path.index('disk/by-id')+11:]
return drive_name
def get_free_space(uuid):
path = get_mount_point_for_uuid(uuid)
cmd = 'df "%s"' % path
print '$', cmd
f = os.popen(cmd)
s = f.read()
f.close()
line = s.split('\n',1)[1]
x = line.strip().split()
print x
if int(x[1])==0: return -1 # unknown amount of space
return int(x[3])*1024
def get_git_db_name(uuid, host, path):
import hashlib
s = ':'.join( (uuid, host, path) )
print s
return '.flybackdb_%s' % hashlib.sha1(s).hexdigest()
def get_git_dir(uuid, host, path):
mount_point = get_mount_point_for_uuid(uuid)
git_db = get_git_db_name(uuid, host, path)
git_db_dir = os.path.join( mount_point, git_db )
print 'git_db_dir', git_db_dir
return git_db_dir
def rmdir(tmp):
f = os.popen('rm -Rf "%s"' % tmp)
s = f.read().strip()
f.close()
if s: print s
def init_backup(uuid, host, path):
assert test_backup_assertions(uuid, host, path, test_exists=False)
tmp = tempfile.mkdtemp(suffix='_flyback')
os.chdir(tmp)
git_dir = get_git_dir(uuid, host, path)
print 'initializing repository...', git_dir
cmd = 'GIT_DIR="%s" git init' % (git_dir,)
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
sys.stdout.write(line)
s = ''.join(s)
if f.close():
raise Exception(s)
# write config info
f = open( os.path.join(git_dir, 'flyback_properties.pickle'), 'wb' )
o = {
'uuid':uuid,
'host':host,
'path':path,
'version':settings.PROGRAM_VERSION,
}
pickle.dump(o,f)
f.close()
# save default preferences
preferences = get_preferences(uuid, host, path)
save_preferences(uuid, host, path, preferences)
rmdir(tmp)
os.chdir(util.RUN_FROM_DIR)
return
def get_new_files(uuid, host, path):
git_dir = get_git_dir(uuid, host, path)
# get files that exist now
f = os.popen('find "%s"' % path)
existing_files = f.read().split('\n')
f.close()
existing_files = set([ x[len(path):].lstrip('/') for x in existing_files ])
# get files in revision
revisions = get_revisions(uuid, host, path)
if revisions:
print 'revisions', revisions
rev_files = set(get_files_for_revision(uuid, host, path, revisions[0]['commit']))
else:
rev_files = set()
return existing_files - rev_files
def backup(uuid, host, path):
assert test_backup_assertions(uuid, host, path)
git_dir = get_git_dir(uuid, host, path)
if not os.path.exists(git_dir):
init_backup(uuid, host, path)
os.chdir(path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,path)
# add any new files
for fn in get_new_files(uuid, host, path):
cmd = git_cmd + 'add -v "%s"' % fn
print '$', cmd
f = os.popen(cmd)
for line in f:
sys.stdout.write(line)
f.close()
# commit
cmd = git_cmd + 'commit -v . -m "commited by: %s v%s"' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION)
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
sys.stdout.write(line)
s = ''.join(s)
exit_code = f.close()
if exit_code>0 and exit_code!=256:
print 'exit_code', exit_code
raise Exception(s)
# repack
cmd = git_cmd + 'repack -A -d --max-pack-size=2000'
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
sys.stdout.write(line)
s = ''.join(s)
if f.close():
raise Exception(s)
def get_preferences(uuid, host, path):
preferences = dict(settings.DEFAULT_PREFERENCES)
git_dir = get_git_dir(uuid, host, path)
try:
f = open( os.path.join(git_dir, 'flyback_preferences.pickle'), 'rb' )
o = pickle.load(f)
f.close()
if o:
preferences.update(o)
except:
print traceback.print_exc()
return preferences
def save_preferences(uuid, host, path, preferences):
preferences_diff = {}
for k,v in preferences.iteritems():
if settings.DEFAULT_PREFERENCES.get(k)!=v:
preferences_diff[k] = v
git_dir = get_git_dir(uuid, host, path)
try:
f = open( os.path.join(git_dir, 'flyback_preferences.pickle'), 'wb' )
pickle.dump(preferences_diff, f)
f.close()
except:
print traceback.print_exc()
# gen exclude file
exclude_map = {
'exclude_audio': ['*.mp3','*.aac','*.wma'],
'exclude_video': ['*.mp4','*.avi','*.mpeg',],
'exclude_trash': ['Trash/','.Trash*/',],
'exclude_cache': ['Cache/','.cache/',],
'exclude_vms': ['*.vmdk',],
'exclude_iso': ['*.iso',],
}
try:
f = open( os.path.join(git_dir, 'info', 'exclude'), 'w' )
for k,v in exclude_map.iteritems():
if preferences.get(k):
for x in v:
f.write('%s\n' % x)
print 'excluding:', x
f.close()
except:
print traceback.print_exc()
def get_revisions(uuid, host, path):
tmp = tempfile.mkdtemp(suffix='_flyback')
os.chdir(tmp)
git_dir = get_git_dir(uuid, host, path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,tmp)
cmd = git_cmd + 'log'
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
sys.stdout.write(line)
s = ''.join(s)
if f.close():
pass #raise Exception(s)
# load verification history
try:
f = open( os.path.join(git_dir, 'revision_verifications.pickle'), 'rb' )
revision_verifications = pickle.load(f)
f.close()
except:
revision_verifications = {}
log = []
if s:
entry = None
for line in s.split('\n'):
if line.startswith('commit'):
if entry:
entry['verified'] = revision_verifications.get(entry['commit'])
log.append(entry)
entry = {'commit':line[line.index(' '):].strip(), 'message':''}
elif line.startswith('Author: '):
entry['author'] = line[line.index(' '):].strip()
elif line.startswith('Date: '):
entry['date'] = line[line.index(' '):].strip()
else:
entry['message'] += line
if entry:
entry['verified'] = revision_verifications.get(entry['commit'])
log.append(entry)
rmdir(tmp)
os.chdir(util.RUN_FROM_DIR)
print 'log', log
return log
def get_files_for_revision(uuid, host, path, rev):
tmp = tempfile.mkdtemp(suffix='_flyback')
os.chdir(tmp)
git_dir = get_git_dir(uuid, host, path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,tmp)
cmd = git_cmd + 'ls-tree -r --name-only ' + rev
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
rmdir(tmp)
os.chdir(util.RUN_FROM_DIR)
s = ''.join(s)
if f.close():
raise Exception(s)
return [ x.strip('"') for x in s.split('\n') ]
def export_revision(uuid, host, path, rev, target_path):
tmp = tempfile.mkdtemp(suffix='_flyback')
os.chdir(tmp)
git_dir = get_git_dir(uuid, host, path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,tmp)
fn = '%s/flyback-archive_r%s.tar.gz' % (target_path, rev)
cmd = git_cmd + 'archive %s | gzip > "%s"' % (rev, fn)
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
sys.stdout.write(line)
rmdir(tmp)
os.chdir(util.RUN_FROM_DIR)
s = ''.join(s)
if f.close():
raise Exception(s)
return fn
def verify_revision(uuid, host, path, rev):
tmp = tempfile.mkdtemp(suffix='_flyback')
os.chdir(tmp)
git_dir = get_git_dir(uuid, host, path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,tmp)
cmd = git_cmd + 'archive %s > /dev/null' % (rev)
print '$', cmd
f = os.popen(cmd)
s = []
for line in f:
s.append(line)
sys.stdout.write(line)
rmdir(tmp)
os.chdir(util.RUN_FROM_DIR)
s = ''.join(s)
if f.close():
raise Exception(s)
# save verification history
print 1
try:
f = open( os.path.join(git_dir, 'revision_verifications.pickle'), 'rb' )
revision_verifications = pickle.load(f)
print 2
f.close()
except:
revision_verifications = {}
print 3
revision_verifications[rev] = datetime.datetime.now()
f = open( os.path.join(git_dir, 'revision_verifications.pickle'), 'wb' )
pickle.dump(revision_verifications,f)
f.close()
print 4
def get_status(uuid, host, path):
assert test_backup_assertions(uuid, host, path)
added = []
modified = []
deleted = []
os.chdir(path)
git_dir = get_git_dir(uuid, host, path)
git_cmd = 'GIT_DIR="%s" GIT_WORK_TREE="%s" git ' % (git_dir,path)
cmd = git_cmd + 'status'
print '$', cmd
f = os.popen(cmd)
rest_are_added = False
for line in f:
sys.stdout.write(line)
if not line.startswith('#'):
continue
if line.startswith('# modified:'):
fn = line[ line.index(':')+1: ].strip()
modified.append(fn)
if line.startswith('# deleted:'):
fn = line[ line.index(':')+1: ].strip()
deleted.append(fn)
if line.startswith('# (use "git'):
if line.startswith('# (use "git add'):
rest_are_added = True
else:
rest_are_added = False
continue
if rest_are_added:
fn = line.lstrip('#').strip()
if fn:
added.append(fn)
f.close()
os.chdir(util.RUN_FROM_DIR)
return added, modified, deleted
def delete_backup(uuid, host, path):
git_dir = get_git_dir(uuid, host, path)
cmd = 'rm -Rf "%s"' % git_dir
print '$', cmd
f = os.popen(cmd)
for line in f:
sys.stdout.write(line)
f.close()
| Python |
#!/usr/bin/python
import os, sys, traceback
import settings
import backup
GUIS = set()
def register_gui(gui):
GUIS.add( gui )
def unregister_gui(gui):
GUIS.discard(gui)
if not GUIS:
import gtk
gtk.main_quit()
def run_all_backups():
for t in backup.get_known_backups():
uuid = t['uuid']
host = t['host']
path = t['path']
if backup.test_backup_assertions(uuid, host, path):
print '---=== starting backup:', uuid, path, '===---'
try: backup.backup(uuid, host, path)
except: traceback.print_exc()
else:
print '---=== skipped backup:', uuid, path, '===---'
def run_backup(uuid, path):
host = backup.get_hostname()
if backup.test_backup_assertions(uuid, host, path):
print '---=== starting backup:', uuid, path, '===---'
try: backup.backup(uuid, host, path)
except: traceback.print_exc()
else:
print '---=== skipped backup:', uuid, path, '===---'
def launch_select_backup_gui():
import select_backup_gui
register_gui( select_backup_gui.GUI(register_gui, unregister_gui) )
if __name__=='__main__':
import sys
args = sys.argv[1:]
if len(args):
print
print "------------------------------------------"
print " FlyBack - Apple's Time Machine for Linux"
print "------------------------------------------"
print
if args[0] in ('-b','--backup-all'):
run_all_backups()
elif len(args)==2:
run_backup(args[0], args[1])
else:
print ' to launch the graphical interface:'
print ' $ python flyback.py'
print ' to backup all detected repositories:'
print ' $ python flyback.py [-b|--backup-all]'
print ' to backup a specific repository:'
print ' $ python flyback.py <drive_uuid> <path>'
print
else:
import gobject, gnome, gtk
gnome.init( settings.PROGRAM_NAME, settings.PROGRAM_VERSION )
gobject.threads_init()
gtk.gdk.threads_init()
launch_select_backup_gui()
gtk.main()
| Python |
import gnome, gobject, gtk, gtk.glade, os, sys, tempfile, threading
import settings
import util
class GUI(object):
def close(self, a=None, b=None):
self.main_window.hide()
self.unregister_gui(self)
def set_files(self, added, modified, deleted):
print added, modified, deleted
model = self.xml.get_widget('treeview_filelist').get_model()
model.clear()
for fn in added:
icon_added = self.main_window.render_icon(gtk.STOCK_ADD, gtk.ICON_SIZE_MENU)
model.append( (icon_added, fn) )
for fn in modified:
icon_modified = self.main_window.render_icon(gtk.STOCK_EDIT, gtk.ICON_SIZE_MENU)
model.append( (icon_modified, fn) )
for fn in deleted:
icon_deleted = self.main_window.render_icon(gtk.STOCK_DELETE, gtk.ICON_SIZE_MENU)
model.append( (icon_deleted, fn) )
def __init__(self, register_gui, unregister_gui, uuid, host, path):
self.register_gui = register_gui
self.unregister_gui = unregister_gui
self.uuid = uuid
self.host = host
self.path = path
self.xml = gtk.glade.XML( os.path.join( util.RUN_FROM_DIR, 'glade', 'backup_status.glade' ) )
self.main_window = self.xml.get_widget('dialog')
icon = self.main_window.render_icon(gtk.STOCK_HARDDISK, gtk.ICON_SIZE_BUTTON)
self.main_window.set_icon(icon)
self.main_window.set_title('%s v%s - Backup Status' % (settings.PROGRAM_NAME, settings.PROGRAM_VERSION))
self.xml.get_widget('button_close').connect('clicked', self.close)
treeview_files_widget = self.xml.get_widget('treeview_filelist')
treeview_files_model = gtk.ListStore( gtk.gdk.Pixbuf, str )
renderer = gtk.CellRendererPixbuf()
renderer.set_property('xpad', 4)
renderer.set_property('ypad', 4)
treeview_files_widget.append_column( gtk.TreeViewColumn('', renderer, pixbuf=0) )
renderer = gtk.CellRendererText()
treeview_files_widget.append_column( gtk.TreeViewColumn('', renderer, markup=1) )
treeview_files_widget.set_model(treeview_files_model)
treeview_files_widget.set_headers_visible(False)
treeview_files_widget.set_property('rules-hint', True)
treeview_files_model.append( (None, 'Please wait...(loading list)') )
self.main_window.show()
| Python |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
import subprocess
import os
import sys
from webbrowser import _iscommand as is_command
import gtk
import mimetypes, mimetools
import config
import locale
import ctypes
try: import i18n
except: from gettext import gettext as _
_browser = ''
def open_webbrowser(uri):
'''open a URI in the registered default application
'''
## for proxychains
os.environ['LD_PRELOAD'] = ' '.join(
[ ld for ld in os.environ.get('LD_PRELOAD', '').split(' ') if 'libproxychains.so' not in ld ]
)
browser = 'xdg-open'
if sys.platform[:3] == "win":
browser = 'start'
subprocess.Popen([browser, uri])
def webkit_set_proxy_uri(uri):
if uri and '://' not in uri:
uri = 'https://' + uri
try:
if os.name == 'nt':
libgobject = ctypes.CDLL('libgobject-2.0-0.dll')
libsoup = ctypes.CDLL('libsoup-2.4-1.dll')
libwebkit = ctypes.CDLL('libwebkit-1.0-2.dll')
else:
libgobject = ctypes.CDLL('libgobject-2.0.so.0')
libsoup = ctypes.CDLL('libsoup-2.4.so.1')
try:
libwebkit = ctypes.CDLL('libwebkitgtk-1.0.so.0')
except:
libwebkit = ctypes.CDLL('libwebkit-1.0.so.2')
pass
proxy_uri = libsoup.soup_uri_new(str(uri)) if uri else 0
session = libwebkit.webkit_get_default_session()
libgobject.g_object_set(session, "proxy-uri", proxy_uri, None)
if proxy_uri:
libsoup.soup_uri_free(proxy_uri)
libgobject.g_object_set(session, "max-conns", 20, None)
libgobject.g_object_set(session, "max-conns-per-host", 5, None)
return 0
except:
exctype, value = sys.exc_info()[:2]
print 'error: webkit_set_proxy_uri: (%s, %s)' % (exctype,value)
return 1
def open_file_chooser_dialog():
sel_file = None
fc_dlg = gtk.FileChooserDialog(title='Open ... '
, parent=None
, action=gtk.FILE_CHOOSER_ACTION_OPEN
, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,gtk.RESPONSE_OK))
fc_dlg.set_default_response(gtk.RESPONSE_OK)
resp = fc_dlg.run()
if resp == gtk.RESPONSE_OK:
sel_file = fc_dlg.get_filename()
fc_dlg.destroy()
gtk.gdk.threads_leave()
return sel_file
def encode_multipart_formdata(fields, files):
BOUNDARY = mimetools.choose_boundary()
CRLF = '\r\n'
L = []
total_size = 0
L = []
for key, value in fields.items():
key, value = key.encode('utf8'), value.encode('utf8')
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
L.append(value)
for pair in files:
key, filename = pair[0].encode('utf8'), pair[1].encode('utf8')
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, 'hotot.png'));
L.append('Content-Type: %s' % get_content_type(filename))
L.append('')
L.append(file(filename).read())
total_size += os.path.getsize(filename)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
headers = {'content-type':'multipart/form-data; boundary=%s' % BOUNDARY
, 'content-length': str(len(body))};
return headers, body
def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
def get_ui_object(name):
for base in config.DATA_DIRS:
fullpath = os.path.join(base, name)
if os.path.exists(fullpath):
return fullpath
def get_extra_exts():
import glob
exts = []
files = glob.glob(os.path.join(config.CONF_DIR, config.EXT_DIR_NAME) + '/*')
ext_dirs = filter(lambda x: os.path.isdir(x), files)
for dir in ext_dirs:
ext_js = os.path.join(dir, 'entry.js')
if os.path.exists(ext_js):
exts.append('file://%s' % ext_js)
return exts
def get_extra_fonts():
font_list = [ff.get_name() for ff in
gtk.gdk.pango_context_get().list_families()]
font_list.sort()
for font in font_list:
try:
font.decode('ascii')
except:
font_list.remove(font)
font_list.insert(0, font)
return font_list
def get_locale():
lang, encode = locale.getdefaultlocale()
return lang
| Python |
#!/usr/bin/env python
# -*- coding:utf8 -*-
import json
import config
import time
import base64
import urllib, urllib2
import gtk
import threading
import gobject
import utils
import hotot
import os
import sys
import subprocess
try: import i18n
except: from gettext import gettext as _
reload(sys)
sys.setdefaultencoding('utf8')
USE_GTKNOTIFICATION_IN_NATIVE_PLATFORM = True
## Disable GtkNotification on Gnome3
screen = gtk.gdk.screen_get_default()
window_manager_name = screen.get_window_manager_name().lower() if screen else ''
if 'mutter' in window_manager_name:
USE_GTKNOTIFICATION_IN_NATIVE_PLATFORM = False
if USE_GTKNOTIFICATION_IN_NATIVE_PLATFORM:
import gtknotification
class Notification(object):
def do_notify(self, summary, body, icon_file = None):
if (icon_file == None or not os.path.isfile(icon_file)):
icon_file = utils.get_ui_object(os.path.join('image','ic64_hotot.png'));
icon_file = 'file://' + icon_file
title = _("Hotot Notification")
text = summary + '\n' + body
gobject.idle_add(gtknotification.gtknotification, title, text, icon_file)
update = do_notify
show = str
notify = Notification()
else:
import pynotify
pynotify.init(_("Hotot Notification"))
notify = pynotify.Notification('Init', '')
webv = None
app = None
http_code_msg_table = {
404: 'The URL you request does not exist. Please check your API Base/OAuth Base/Search Base.'
, 401: 'Server cannot authenticate you. Please check your username/password and API base.'
, 500: 'Server is broken. Please try again later.'
, 502: 'Server is down or being upgraded. Please try again later.'
, 503: 'Server is overcapacity. Please try again later.'
}
def init_notify():
if USE_GTKNOTIFICATION_IN_NATIVE_PLATFORM:
return
notify.set_icon_from_pixbuf(
gtk.gdk.pixbuf_new_from_file(
utils.get_ui_object(os.path.join('image','ic64_hotot.png'))))
notify.set_timeout(5000)
def do_notify(summary, body, icon_file = None):
if USE_GTKNOTIFICATION_IN_NATIVE_PLATFORM:
return notify.do_notify(summary, body, icon_file)
n = pynotify.Notification(summary, body)
if (icon_file == None or not os.path.isfile(icon_file)):
icon_file = utils.get_ui_object(os.path.join('image','ic64_hotot.png'));
n.set_icon_from_pixbuf(gtk.gdk.pixbuf_new_from_file(icon_file))
n.set_timeout(5000)
n.show()
def crack_hotot(uri):
params = uri.split('/')
try:
if params[0] == 'system':
crack_system(params)
elif params[0] == 'action':
crack_action(params)
elif params[0] == 'request':
raw_json = urllib.unquote(params[1])
req_params = dict([(k.encode('utf8'), v)
for k, v in json.loads(raw_json).items()])
crack_request(req_params)
except Exception, e:
import traceback
print "Exception:"
traceback.print_exc(file=sys.stdout)
def crack_action(params):
if params[1] == 'search':
load_search(params[2])
elif params[1] == 'choose_file':
callback = params[2]
file_path = utils.open_file_chooser_dialog()
webv.execute_script('%s("%s")' % (callback, file_path))
elif params[1] == 'save_avatar':
img_uri = urllib.unquote(params[2])
avatar_file = urllib.unquote(params[3])
avatar_path = os.path.join(config.AVATAR_CACHE_DIR, avatar_file)
if not (os.path.exists(avatar_path) and avatar_file.endswith(img_uri[img_uri.rfind('/')+1:])):
print 'Download:', img_uri , 'To' , avatar_path
th = threading.Thread(
target = save_file_proc,
args=(img_uri, avatar_path))
th.start()
elif params[1] == 'log':
print '\033[1;31;40m[%s]\033[0m %s' % (urllib.unquote(params[2]) ,urllib.unquote(params[3]))
elif params[1] == 'paste_clipboard_text':
webv.paste_clipboard();
elif params[1] == 'set_clipboard_text':
clipboard = gtk.clipboard_get()
text = list(params)
del text[0:2]
clipboard.set_text('/'.join(text))
def crack_system(params):
if params[1] == 'notify':
type = urllib.unquote(params[2])
summary = urllib.unquote(params[3])
body = urllib.unquote(params[4])
if type == 'content':
try:
avatar_file = os.path.join(config.AVATAR_CACHE_DIR, urllib.unquote(params[5]))
except:
avatar_file = None
do_notify(summary, body, avatar_file)
elif type == 'count':
notify.update(summary, body)
notify.show()
elif params[1] == 'load_settings':
settings = json.loads(urllib.unquote(params[2]))
config.load_settings(settings)
app.apply_settings()
elif params[1] == 'sign_in':
app.on_sign_in()
elif params[1] == 'sign_out':
app.on_sign_out()
elif params[1] == 'quit':
app.quit()
def crack_request(req_params):
args = ( req_params['uuid']
, req_params['method']
, req_params['url']
, req_params['params']
, req_params['headers']
, req_params['files'])
th = threading.Thread(target = request, args=args)
th.start()
def save_file_proc(uri, save_path):
if (not os.path.isfile(save_path)):
try:
avatar = open(save_path, "wb")
avatar.write(_get(uri, req_timeout=5))
avatar.close()
except:
import traceback
print "Exception:"
traceback.print_exc(file=sys.stdout)
os.unlink(save_path)
def execute_script(scripts):
return webv.execute_script(scripts)
def update_status(text):
webv.execute_script('''
ui.StatusBox.update_status('%s');
''' % text);
def load_search(query):
webv.execute_script('''
ui.Main.reset_search_page('%s');
$('#search_tweet_block > ul').html('');
ui.Notification.set(_("Loading Search result %s ...")).show();
daemon.update_search();
''' % (query, query));
def set_style_scheme():
style = app.window.get_style()
base, fg, bg, text = style.base, style.fg, style.bg, style.text
webv.execute_script('''
$('#header').css('background', '%s');
''' % str(bg[gtk.STATE_NORMAL]));
def get_prefs(name):
return config.settings[name]
def set_prefs(name, value):
config.settings[name] = value
def request(uuid, method, url, params={}, headers={},files=[],additions=''):
scripts = ''
try:
if (method == 'POST'):
result = _post(url, params, headers, files, additions)
else:
result = _get(url, params, headers)
except urllib2.HTTPError, e:
msg = 'Unknown Errors ... '
if http_code_msg_table.has_key(e.getcode()):
msg = http_code_msg_table[e.getcode()]
tech_info = 'HTTP Code: %s\\nURL: %s\\nDetails: %s' % (e.getcode(), e.geturl(), str(e))
content = '<p>%s</p><h3>- Technological Info -</h3><div class="dlg_group"><pre>%s</pre></div>' % (msg, tech_info)
scripts = '''
widget.DialogManager.alert('%s', '%s');
lib.network.error_task_table['%s']('');
''' % ('Ooops, an Error occurred!', content, uuid);
except urllib2.URLError, e:
content = '<p><label>Error Code:</label>%s<br/><label>Reason:</label> %s, %s<br/></p>' % (e.errno, e.reason, e.strerror)
scripts = '''
widget.DialogManager.alert('%s', '%s');
lib.network.error_task_table['%s']('');
''' % ('Ooops, an Error occurred!', content, uuid);
else:
if uuid != None:
if result[0] != '{' and result[0] != '[':
scripts = '''lib.network.success_task_table['%s']('%s');
''' % (uuid, result)
else:
scripts = '''lib.network.success_task_table['%s'](%s);
''' % (uuid, result)
scripts += '''delete lib.network.success_task_table['%s'];
delete lib.network.error_task_table['%s'];
''' % (uuid, uuid);
gobject.idle_add(webv.execute_script, scripts)
def _get(url, params={}, req_headers={}, req_timeout=None):
urlopen = urllib2.urlopen
if get_prefs('use_http_proxy'):
proxy_support = urllib2.ProxyHandler(
{"http" : get_prefs('http_proxy_host') +':'+str(get_prefs('http_proxy_port'))})
urlopen = urllib2.build_opener(proxy_support).open
request = urllib2.Request(url, headers=req_headers)
ret = urlopen(request, timeout=req_timeout).read()
return ret
def _post(url, params={}, req_headers={}, files=[], additions='', req_timeout=None):
if files != []:
files_headers, files_data = utils.encode_multipart_formdata(params, files)
params ={}
req_headers.update(files_headers)
additions += files_data
urlopen = urllib2.urlopen
if get_prefs('use_http_proxy'):
proxy_support = urllib2.ProxyHandler(
{"http" : get_prefs('http_proxy_host') +':'+str(get_prefs('http_proxy_port'))})
urlopen = urllib2.build_opener(proxy_support).open
params = dict([(k.encode('utf8')
, v.encode('utf8') if type(v)==unicode else v)
for k, v in params.items()])
request = urllib2.Request(url,
urlencode(params) + additions, headers=req_headers);
ret = urlopen(request, timeout=req_timeout).read()
return ret
pycurl = None
StringIO = None
def _curl(url, params=None, post=False, username=None, password=None, header=None, body=None):
global pycurl, StringIO
if not pycurl:
import pycurl
try: import cStringIO as StringIO
except: import StringIO
curl = pycurl.Curl()
if get_prefs('use_http_proxy'):
HTTP_PROXY = '%s:%s' % (get_prefs('http_proxy_host'), get_prefs('http_proxy_port'))
curl.setopt(pycurl.PROXY, HTTP_PROXY)
if header:
curl.setopt(pycurl.HTTPHEADER, [str(k) + ':' + str(v) for k, v in header.items()])
if post:
curl.setopt(pycurl.POST, 1)
if params:
if post:
curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(params))
else:
url = "?".join((url, urllib.urlencode(params)))
curl.setopt(pycurl.URL, str(url))
if username and password:
curl.setopt(pycurl.USERPWD, "%s:%s" % (str(username), str(password)))
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.MAXREDIRS, 5)
curl.setopt(pycurl.TIMEOUT, 15)
curl.setopt(pycurl.CONNECTTIMEOUT, 8)
curl.setopt(pycurl.HTTP_VERSION, pycurl.CURL_HTTP_VERSION_1_0)
content = StringIO.StringIO()
hdr = StringIO.StringIO()
curl.setopt(pycurl.WRITEFUNCTION, content.write)
curl.setopt(pycurl.HEADERFUNCTION, hdr.write)
print curl, url, header
try:
curl.perform()
except pycurl.error, e:
raise e
http_code = curl.getinfo(pycurl.HTTP_CODE)
if http_code != 200:
status_line = hdr.getvalue().splitlines()[0]
status_message = status_line
e =urllib2.HTTPError (str(url), http_code, status_message, {}, None)
e.url = url
raise e
else:
return content.getvalue()
def urlencode(query):
for k,v in query.items():
if not v:
del query[k]
return urllib.urlencode(query)
def idle_it(fn):
return lambda *args: gobject.idle_add(fn, *args)
| Python |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
import glib
import gtk
import pango
import os
from xml.sax.saxutils import escape
queue = list()
actual_notification = None
def gtknotification(title, text, icon_file=None, const=None, callback=None, tooltip=None):
global actual_notification
global queue
if actual_notification is None:
actual_notification = Notification(title, text, icon_file, callback, tooltip)
actual_notification.show()
else:
if actual_notification._title == title:
actual_notification.append_text(text)
else:
found = False
auxqueue = list()
for _title, _text, _icon_file, _callback, _tooltip in queue:
if _title == title:
_text = _text + "\n" + text
found = True
auxqueue.append([_title, _text, _icon_file, _callback, _tooltip])
if found:
del queue
queue = auxqueue
else:
queue.append([title, text, icon_file, callback, tooltip])
class Notification(gtk.Window):
title_markup = '<span foreground="%s" weight="ultrabold">%s</span>'
text_markup = '<span foreground="%s"><b>%s</b>\n<span>%s</span></span>'
def __init__(self, title, text, icon_file, callback, tooltip):
gtk.Window.__init__(self, type=gtk.WINDOW_POPUP)
self.foreground_color = "white"
background_color = gtk.gdk.Color()
icon_size = 48;
max_width = 300;
self.callback = callback
self.set_border_width(10)
self._title = title
title_label = gtk.Label(self.title_markup % (self.foreground_color, escape(self._title)))
title_label.set_use_markup(True)
title_label.set_justify(gtk.JUSTIFY_LEFT)
title_label.set_ellipsize(pango.ELLIPSIZE_END)
text1, text2 = (text + '\n').split('\n', 1)
text = self.text_markup % (self.foreground_color, escape(text1), escape(text2))
self.text = text
self.message_label = gtk.Label(text)
self.message_label.set_use_markup(True)
self.message_label.set_justify(gtk.JUSTIFY_LEFT)
self.message_label.set_line_wrap(True)
self.message_label.set_alignment(0, 0)
image = gtk.Image()
image.set_alignment(0, 0)
if icon_file:
if icon_file.startswith('file://'):
icon_file = icon_file[7:]
try:
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(icon_file, icon_size, icon_size)
image.set_from_pixbuf(pixbuf)
except:
pass
self.message_vbox = gtk.VBox()
self.message_vbox.pack_start(title_label, False, False)
self.message_vbox.pack_start(self.message_label, False, True)
lbox = gtk.HBox()
lbox.set_spacing(10)
lbox.pack_start(image, False, False)
lbox.pack_start(self.message_vbox, True, True)
event_box = gtk.EventBox()
event_box.set_visible_window(False)
event_box.set_events(gtk.gdk.BUTTON_PRESS_MASK)
event_box.connect("button_press_event", self.on_click)
event_box.add(lbox)
self.connect("button_press_event", self.on_click)
if tooltip is not None:
event_box.set_tooltip_text(tooltip)
nbox = gtk.HBox()
nbox.pack_start(event_box, True, True)
self.add(nbox)
self.set_app_paintable(True)
self.realize()
self.window.set_background(background_color)
self.set_opacity(0.6)
self.timer_id = None
self.set_default_size(max_width, -1)
self.connect("size-allocate", self.relocate)
self.show_all()
def append_text(self, text):
text1, text2 = (text + '\n').split('\n', 1)
text = self.text_markup % (self.foreground_color, escape(text1), escape(text2))
self.text = self.text + "\n" + text
self.message_label.set_text(self.text)
self.message_label.set_use_markup(True)
self.message_label.show()
def relocate(self, widget=None, allocation=None):
width, height = self.get_size()
screen_w = gtk.gdk.screen_width()
screen_h = gtk.gdk.screen_height()
x = screen_w - width - 20
y = 30
self.move(x,y)
def on_click(self, widget, event):
if self.callback is not None:
self.callback()
self.close()
def show(self):
self.show_all()
self.timer_id = glib.timeout_add_seconds(15, self.close)
return True
def close(self, *args):
global actual_notification
global queue
self.hide()
if self.timer_id is not None:
glib.source_remove(self.timer_id)
if len(queue) != 0:
title, text, icon_file, callback, tooltip = queue.pop(0)
actual_notification = Notification(title, text, icon_file, callback, tooltip)
actual_notification.show()
else:
actual_notification = None
self.destroy()
| Python |
#!/usr/bin/env python
# -*- coding:utf8 -*-
'''Hotot
@author: U{Shellex Wei <5h3ll3x@gmail.com>}
@license: LGPLv3+
'''
import gtk
import gobject
import view
import config
import agent
import keybinder
import utils
try:
import appindicator
except ImportError:
HAS_INDICATOR = False
else:
HAS_INDICATOR = True
if __import__('os').environ.get('DESKTOP_SESSION') in ('gnome-2d', 'classic-gnome'):
HAS_INDICATOR = False
try: import i18n
except: from gettext import gettext as _
try:
import glib
glib.set_application_name(_("Hotot"))
except:
pass
class Hotot:
def __init__(self):
self.is_sign_in = False
self.active_profile = 'default'
self.protocol = ''
self.build_gui()
if not HAS_INDICATOR:
self.create_trayicon()
def build_gui(self):
self.window = gtk.Window()
gtk.window_set_default_icon_from_file(
utils.get_ui_object('image/ic64_hotot_classics.png'))
self.window.set_icon_from_file(
utils.get_ui_object('image/ic64_hotot_classics.png'))
self.window.set_title(_("Hotot"))
self.window.set_position(gtk.WIN_POS_CENTER)
#self.window.set_default_size(500, 550)
vbox = gtk.VBox()
scrollw = gtk.ScrolledWindow()
self.webv = view.MainView()
agent.view = self.webv
scrollw.add(self.webv)
vbox.pack_start(scrollw)
vbox.show_all()
self.window.add(vbox)
self.menu_tray = gtk.Menu()
mitem_resume = gtk.MenuItem(_("_Resume/Hide"))
mitem_resume.connect('activate', self.on_trayicon_activate);
self.menu_tray.append(mitem_resume)
mitem_prefs = gtk.ImageMenuItem(gtk.STOCK_PREFERENCES)
mitem_prefs.connect('activate', self.on_mitem_prefs_activate);
self.menu_tray.append(mitem_prefs)
mitem_about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)
mitem_about.connect('activate', self.on_mitem_about_activate);
self.menu_tray.append(mitem_about)
mitem_quit = gtk.ImageMenuItem(gtk.STOCK_QUIT)
mitem_quit.connect('activate', self.on_mitem_quit_activate);
self.menu_tray.append(mitem_quit)
self.menu_tray.show_all()
## support for ubuntu unity indicator-appmenu
menubar = gtk.MenuBar()
menuitem_file = gtk.MenuItem(_("_File"))
menuitem_file_menu = gtk.Menu()
mitem_resume = gtk.MenuItem(_("_Resume/Hide"))
mitem_resume.connect('activate', self.on_mitem_resume_activate);
menuitem_file_menu.append(mitem_resume)
mitem_prefs = gtk.ImageMenuItem(gtk.STOCK_PREFERENCES)
mitem_prefs.connect('activate', self.on_mitem_prefs_activate);
menuitem_file_menu.append(mitem_prefs)
menuitem_quit = gtk.ImageMenuItem(gtk.STOCK_QUIT)
menuitem_quit.connect("activate", self.quit)
menuitem_file_menu.append(menuitem_quit)
menuitem_file.set_submenu(menuitem_file_menu)
menubar.append(menuitem_file)
menuitem_help = gtk.MenuItem(_("_Help"))
menuitem_help_menu = gtk.Menu()
menuitem_about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)
menuitem_about.connect("activate", self.on_mitem_about_activate)
menuitem_help_menu.append(menuitem_about)
menuitem_help.set_submenu(menuitem_help_menu)
menubar.append(menuitem_help)
menubar.set_size_request(0, 0)
menubar.show_all()
vbox.pack_start(menubar, expand=0, fill=0, padding=0)
##
self.window.set_geometry_hints(min_height=380, min_width=460)
self.window.show()
self.window.connect('delete-event', gtk.Widget.hide_on_delete)
def on_btn_update_clicked(self, btn):
if (self.tbox_status.get_text_length() <= 140):
agent.update_status(self.tbox_status.get_text())
self.tbox_status.set_text('')
self.inputw.hide()
def on_tbox_status_changed(self, entry):
if (self.tbox_status.get_text_length() <= 140):
entry.modify_base(gtk.STATE_NORMAL, gtk.gdk.Color('#fff'))
else:
entry.modify_base(gtk.STATE_NORMAL, gtk.gdk.Color('#f00'))
def on_tbox_status_key_released(self, entry, event):
if event.keyval == gtk.keysyms.Return:
self.btn_update.clicked();
entry.stop_emission('insert-text')
def on_mitem_resume_activate(self, item):
self.window.present()
def on_mitem_prefs_activate(self, item):
agent.execute_script('''
ui.PrefsDlg.load_settings(conf.settings);
ui.PrefsDlg.load_prefs();
globals.prefs_dialog.open();''');
self.window.present()
def on_mitem_about_activate(self, item):
agent.execute_script('globals.about_dialog.open();');
self.window.present()
def on_mitem_quit_activate(self, item):
self.quit()
def quit(self, *args):
gtk.gdk.threads_leave()
self.window.destroy()
gtk.main_quit()
import sys
sys.exit(0)
def apply_settings(self):
# init hotkey
self.init_hotkey()
# resize window
self.window.set_gravity(gtk.gdk.GRAVITY_CENTER)
self.window.resize(
config.settings['size_w']
, config.settings['size_h'])
# apply proxy
self.apply_proxy_setting()
def apply_proxy_setting(self):
if config.settings['use_http_proxy']:
proxy_uri = "https://%s:%s" % (
config.settings['http_proxy_host']
, config.settings['http_proxy_port'])
if config.settings['http_proxy_host'].startswith('http://'):
proxy_uri = "%s:%s" % (
config.settings['http_proxy_host']
, config.settings['http_proxy_port'])
utils.webkit_set_proxy_uri(proxy_uri)
else:
utils.webkit_set_proxy_uri("")
# workaround for a BUG of webkitgtk/soupsession
# proxy authentication
agent.execute_script('''
new Image().src='http://google.com/';''');
def init_hotkey(self):
try:
keybinder.bind(
config.settings['shortcut_summon_hotot']
, self.on_hotkey_compose)
except:
pass
def create_trayicon(self):
"""
Create status icon and connect signals
"""
self.trayicon = gtk.StatusIcon()
self.trayicon.connect('activate', self.on_trayicon_activate)
self.trayicon.connect('popup-menu', self.on_trayicon_popup_menu)
self.trayicon.set_tooltip(_("Hotot: Click to Active."))
self.trayicon.set_from_file(
utils.get_ui_object('image/ic24_hotot_mono_light.svg'))
self.trayicon.set_visible(True)
def on_trayicon_activate(self, icon):
gobject.idle_add(self._on_trayicon_activate, icon)
def _on_trayicon_activate(self, icon):
if self.window.is_active():
self.window.hide()
else:
self.window.present()
def on_trayicon_popup_menu(self, icon, button, activate_time):
self.menu_tray.popup(None, None
, None, button=button
, activate_time=activate_time)
def on_hotkey_compose(self):
gobject.idle_add(self._on_hotkey_compose)
def _on_hotkey_compose(self):
if not self.webv.is_focus():
self.window.hide()
self.window.present()
self.webv.grab_focus()
def on_sign_in(self):
self.is_sign_in = True
#self.window.set_title('Hotot | %s' % '$')
def on_sign_out(self):
self.is_sign_in = False
def main():
global HAS_INDICATOR
gtk.gdk.threads_init()
config.loads();
try:
import dl
libc = dl.open('/lib/libc.so.6')
libc.call('prctl', 15, 'hotot', 0, 0, 0)
except:
pass
agent.init_notify()
app = Hotot()
agent.app = app
if HAS_INDICATOR:
#TODO the icon is only work when installed to /usr/share/icons/hicolor/
indicator = appindicator.Indicator('hotot',
'hotot',
appindicator.CATEGORY_COMMUNICATIONS)
indicator.set_status(appindicator.STATUS_ACTIVE)
indicator.set_attention_icon(utils.get_ui_object('image/ic24_hotot_mono_light.svg'))
indicator.set_menu(app.menu_tray)
gtk.gdk.threads_enter()
gtk.main()
gtk.gdk.threads_leave()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
# -*- coding:utf8 -*-
import gtk
gtk.gdk.threads_init() ## fix issue 24
import webkit
import agent
import config
from webkit import WebView
import utils
import json
import gobject
try: import i18n
except: from gettext import gettext as _
class MainView(WebView):
def __init__(self):
WebView.__init__(self)
self.load_finish_flag = False
self.set_property('can-focus', True)
self.set_property('can-default', True)
self.set_full_content_zoom(1)
self.clipbord = gtk.Clipboard()
settings = self.get_settings()
try:
settings.set_property('enable-universal-access-from-file-uris', True)
settings.set_property('javascript-can-access-clipboard', True)
settings.set_property('enable-default-context-menu', True)
settings.set_property('enable-page-cache', True)
settings.set_property('tab-key-cycles-through-elements', True)
settings.set_property('enable-file-access-from-file-uris', True)
settings.set_property('enable-spell-checking', False)
settings.set_property('enable-caret-browsing', False)
except:
print 'Error: settings property was not set.'
webkit.set_web_database_directory_path(config.DB_DIR)
webkit.set_default_web_database_quota(1024**3L)
## bind events
self.connect('navigation-requested', self.on_navigation_requested);
self.connect('new-window-policy-decision-requested', self.on_new_window_requested);
self.connect('script-alert', self.on_script_alert);
self.connect('load-finished', self.on_load_finish);
self.connect("hovering-over-link", self.on_over_link);
templatefile = utils.get_ui_object(config.TEMPLATE)
template = open(templatefile, 'rb').read()
self.load_html_string(template, 'file://' + templatefile)
def on_navigation_requested(self, view, webframe, request):
return self.handle_uri(request.get_uri())
def on_new_window_requested(self, view, frame, request, decision, u_data):
return self.handle_uri(request.get_uri())
def handle_uri(self, uri):
if uri.startswith('file://'):
return False
elif uri.startswith('hotot:'):
self.on_hotot_action(uri)
return True
elif uri.startswith('about:'):
return True
else:
utils.open_webbrowser(uri)
return True
def on_script_alert(self, view, webframe, message):
if message.startswith('hotot:'):
self.on_hotot_action(message)
return True
return False
def on_hotot_action(self, uri):
if uri.startswith('hotot:'):
agent.crack_hotot(uri[6:])
return True
def on_load_finish(self, view, webframe):
self.load_finish_flag = True;
agent.webv = self
# overlay extra variables of web part
variables = {
'platform': 'Linux'
, 'conf_dir': config.CONF_DIR
, 'cache_dir': config.CACHE_DIR
, 'avatar_cache_dir': config.AVATAR_CACHE_DIR
, 'extra_fonts': utils.get_extra_fonts()
, 'extra_exts': utils.get_extra_exts()
, 'locale': utils.get_locale()
};
# and then, notify web part i am ready to work :)
gobject.idle_add(view.execute_script, '''
overlay_variables(%s);
globals.load_flags = 1;
''' % json.dumps(variables))
def on_over_link(self, view, alt, href):
href = href or ""
if not alt and not href.startswith('file:'):
self.parent.set_tooltip_text(href)
| Python |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
# Author: Huang Jiahua <jhuangjiahua@gmail.com>
# License: LGPLv3+
# Last modified:
app = 'hotot'
import os, sys
import gettext
import json
import re
if os.path.isdir(os.path.dirname(sys.argv[0]) + '/../build/mo'):
gettext.install(app, os.path.dirname(sys.argv[0]) + '/../build/mo', unicode=True)
elif os.path.isdir(os.path.dirname(sys.argv[0]) + '/build/mo'):
gettext.install(app, os.path.dirname(sys.argv[0]) + '/build/mo', unicode=True)
else:
gettext.install(app, unicode=True)
if __name__=="__main__":
print _('')
| Python |
# -*- coding: UTF-8 -*-
import os
import pickle
import json
import gtk
import sys
import glob
import shutil
import glib
PROGRAM_NAME = 'hotot'
EXT_DIR_NAME = 'ext'
CONF_DIR = os.path.join(glib.get_user_config_dir(), PROGRAM_NAME)
DB_DIR = os.path.join(CONF_DIR, 'db')
CACHE_DIR = os.path.join(glib.get_user_cache_dir(), PROGRAM_NAME)
AVATAR_CACHE_DIR = os.path.join(CACHE_DIR, 'avatar')
DATA_DIRS = []
DATA_BASE_DIRS = [
'/usr/local/share'
, '/usr/share'
, glib.get_user_data_dir()
]
DATA_DIRS += [os.path.join(d, PROGRAM_NAME) for d in DATA_BASE_DIRS]
DATA_DIRS.append(os.path.abspath('./data'))
TEMPLATE = 'index.html'
settings = {}
def getconf():
'''获取 config
'''
config = {}
##
if not os.path.isdir(CONF_DIR): os.makedirs(CONF_DIR)
if not os.path.isdir(AVATAR_CACHE_DIR): os.makedirs(AVATAR_CACHE_DIR)
for k, v in globals().items():
if not k.startswith('__') and (
isinstance(v, str)
or isinstance(v, int)
or isinstance(v, long)
or isinstance(v, float)
or isinstance(v, dict)
or isinstance(v, list)
or isinstance(v, bool)
):
config[k] = v
return config
def loads():
config = getconf();
def load_settings(pushed_settings):
pushed_settings = dict([(k.encode('utf8'), v) for k, v in pushed_settings.items()])
globals()['settings'] = pushed_settings
return settings
| Python |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
# Author: Shellex Wai <5h3ll3x@gmail.com>
# License: GNU LGPL
# Last modified:
"""
"""
__revision__ = '0.1'
if __name__=="__main__":
import hotot.hotot
hotot.hotot.main()
| Python |
#!/usr/bin/env python
# -*- coding:utf8 -*-
from distutils.core import setup
from DistUtilsExtra.command import *
from glob import glob
import os, os.path
def get_data_files(root, data_dir):
return [
(root + parent[len(data_dir):], [ os.path.join(parent, fn) for fn in files ])
for parent, dirs, files in os.walk(data_dir) if files
]
setup(name='hotot',
version="0.9.6",
description='Lightweight Twitter Client',
long_description =
"""
Lightweight Twitter Client base on Gtk2 and Webkit.
Features include:
- Update/View Timelines.
- Follow/Unfollow peoples.
- Post status.
- Reply tweets.
- Post direct messages.
- View people profile.
- Native notification.
- Global key-shortcut.
""",
author='Shellex Wai',
author_email='5h3ll3x@gmail.com',
license='LGPL-3',
url="http://code.google.com/p/hotot",
download_url="http://code.google.com/p/hotot/downloads/list",
platforms = ['Linux'],
requires = ['webkit', 'gtk', 'gobject', 'keybinder', 'pynotify'],
scripts=['scripts/hotot'],
packages = ['hotot'],
data_files = [
('share/pixmaps', ['hotot.png']),
] + get_data_files('share/hotot', 'data'),
cmdclass = { "build" : build_extra.build_extra,
"build_i18n" : build_i18n.build_i18n,
}
)
| Python |
#!/usr/bin/env python
# -*- coding:utf8 -*-
import re
import json
import os.path
TEMPLATE = "data/index.html"
DEFAULT_LOCALE_FILE = 'data/_locales/en/messages.json'
JS_FILE_DIR = ['data/js/']
LOCALE_FILE_DIR = 'data/_locales/'
template_tag_re = re.compile('data-i18n-[a-z0-9]+="(.+?)"')
js_tag_re = re.compile('''_\('([a-z0-9_]+)'\)''', re.MULTILINE)
js_tag_map = {}
def scan_template():
template_file = open(TEMPLATE, 'r')
html_data = template_file.read()
template_file.close()
key_list = template_tag_re.findall(html_data)
return key_list
def scan_js_dir():
for dir in JS_FILE_DIR:
os.path.walk(dir, scan_js_dir_cb, None)
def scan_js_dir_cb(arg, dir_name, f_names):
''' scan js files and generate a empty map'''
for name in f_names:
if not name.endswith('.js'):
continue
path = os.path.join(dir_name, name)
data = file(path, 'r').read()
tags = js_tag_re.findall(data)
if tags:
for tag in tags:
js_tag_map[tag] = {'message': '', 'description': path}
def generate_trans_template(key_list):
template = {}
for key in key_list:
template[key] = {'message':'', 'description': TEMPLATE}
return template
def load_exist_trans(trans_file):
return json.loads(file(trans_file).read())
def walk_cb(empty_trans, dir_name, f_names):
new_trans = ''
exists_trans = {}
if dir_name.endswith('data/_locales/'):
return
file_path = os.path.join(dir_name, 'messages.json')
if 'messages.json' in f_names:
trans_file = open(file_path, 'r')
exists_data = trans_file.read()
if exists_data:
exists_trans = json.loads(exists_data)
else:
exists_trans = {}
trans_file.close()
new_trans = format_out(merge_trans(empty_trans, exists_trans))
print '[Update]', file_path
else:
default_trans = json.loads(file(DEFAULT_LOCALE_FILE, 'r').read())
new_trans = format_out(merge_trans(empty_trans, default_trans))
print '[Create]', file_path
trans_file = open(file_path, 'w+')
trans_file.write(new_trans.encode('utf-8'))
trans_file.close()
def merge_trans(empty_trans, exists_trans):
keys_not_supported = []
for key in exists_trans:
if key not in empty_trans:
keys_not_supported.append(key)
for key in keys_not_supported:
print 'Cannot find Key', key, 'in template, delete it? (y/n):' ,
if raw_input().strip() == 'y':
del exists_trans[key]
new_trans = empty_trans.copy()
new_trans.update(exists_trans)
for key in new_trans:
if not new_trans[key]['message']:
print '[!] Empty Key: [%s]' % key
return new_trans
def format_out(trans):
arr = []
for k, v in trans.items():
sub_arr = []
for sub_k, sub_v in v.items():
sub_arr.append('\t\t"%s": "%s"' % (sub_k, sub_v))
arr.append(('\t"%s": {\n' % k )+ ',\n'.join(sub_arr) + '\n\t}')
return '{\n'+',\n'.join(arr)+'\n}'
if __name__ == '__main__':
keys = scan_template()
scan_js_dir()
print 'keys: ', keys
empty_trans = generate_trans_template(keys)
empty_trans.update(js_tag_map)
os.path.walk(LOCALE_FILE_DIR, walk_cb, empty_trans)
| Python |
class MainPage(webapp.RequestHandler):
def get(self):
# choice which days to view
startdate = self.request.get("startdate")
enddate = self.request.get("enddate")
if startdate == None or startdate == '' or not matchdateformat(startdate):
startdate = date.today()
else:
# format the startdate
startdate = stringtodate(startdate)
if enddate == None or enddate == '' or not matchdateformat(enddate):
enddate = date.today() + datetime.timedelta(days = 1)
else:
# format the enddate
enddate = stringtodate(enddate)
markedpoints = db.GqlQuery ("SELECT * FROM MarkedPoint WHERE date > :1 AND date < :2 ORDER BY date DESC", startdate, enddate)
#html map starts here
self.response.out.write('<html><head>')
self.response.out.write("""
<meta name="viewport" content="initial-scale=1.0, user-scalable=no" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<title>Flying on Wheel~</title>
<script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false"></script>
<!-- JS Date Control -->
<script language="javascript" src="/static/jquery-1.4.2.min.js" ></script>
<script language="javascript" src="/static/jquery.date_input.js" ></script>
<link rel="stylesheet" href="/static/date_input.css" type="text/css">
""")
self.response.out.write("""
<script type="text/javascript">
function detectBrowser() {
var useragent = navigator.userAgent;
//alert(useragent);
var mapdiv = document.getElementById("map_canvas");
//alert(useragent.indexOf('Mozilla'));
if (useragent.indexOf('Mozilla') != -1 || useragent.indexOf('Android') != -1 ) {
//alert("computer");
mapdiv.style.width = '640px';
mapdiv.style.height = '360px';
} else {
//alert("mobile");
mapdiv.style.width = '640px';
mapdiv.style.height = '360px';
}
}
</script>
""")
tempString = "var waypoints = [];";#for waypoints' javascript array
for mp in markedpoints:
tempString += 'addLatLng("%s", %s);' % (mp.name, mp.point);
tempString += 'waypoints.push("%s");' % mp.point;
#self.response.out.write('<div>%s</div>' % tempString)
tempString += 'calcRoute(waypoints);';
self.response.out.write("""
<script type="text/javascript">
var map;
var pathCoordinates = [];
var poly;
var directionDisplay;
var directionsService = new google.maps.DirectionsService();
var image = new google.maps.MarkerImage('/static/mark.png',
new google.maps.Size(60, 60),
new google.maps.Point(0,0),
new google.maps.Point(-12, 32));
var shadow = new google.maps.MarkerImage('/static/mark_shadow.png',
new google.maps.Size(60, 60),
new google.maps.Point(0,0),
new google.maps.Point(-12, 32));
function initialize() {
directionsDisplay = new google.maps.DirectionsRenderer();
var myLatLng = new google.maps.LatLng(34.506445,109.517914);
var myOptions = {
zoom: 15,
center: myLatLng,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
//pathCoordinates = new google.maps.MVCArray();
var polyOptions = {
path: pathCoordinates,
strokeColor: "#FF0000",
strokeOpacity: 0.5,
strokeWeight: 1
}
poly = new google.maps.Polyline(polyOptions);
poly.setMap(map);
directionsDisplay.setMap(map);
detectBrowser();
%s
}
function addLatLng(name, lat, lng) {
//alert("lat: " + lat);
//alert("lng: " + lng);
var latLng = new google.maps.LatLng(lat, lng);
//alert(latLng);
var path = poly.getPath();
path.insertAt(pathCoordinates.length, latLng);
var marker = new google.maps.Marker({
position: latLng,
map: map,
title: name,
icon: image,
shadow: shadow
});
}
function calcRoute(points) {
var start = points[0];
//alert(start);
var end = points[points.length-1];
//alert(end);
//alert(points.length)
var waypts = [];
for (var i = 1; i < points.length-1; i++) {
waypts.push({
location: points[i],
stopover: true});
}
var request = {
origin: start,
destination: end,
waypoints: waypts,
optimizeWaypoints: true,
travelMode: google.maps.DirectionsTravelMode.DRIVING
};
directionsService.route(request, function(response, status) {
if (status == google.maps.DirectionsStatus.OK) {
directionsDisplay.setDirections(response);
}
});
}
</script>
""" % tempString)
self.response.out.write('</head>')
self.response.out.write("""<body style="margin:0px; padding:0px;" onload="initialize()">""")
# init js date control
self.response.out.write("""
<script type="text/javascript">
$($.date_input.initialize);
$.extend(DateInput.DEFAULT_OPTS, {
stringToDate: function(string) {
var matches;
if (matches = string.match(/^(\d{4,4})-(\d{2,2})-(\d{2,2})$/)) {
return new Date(matches[1], matches[2] - 1, matches[3]);
} else {
return null;
};
},
dateToString: function(date) {
var month = (date.getMonth() + 1).toString();
var dom = date.getDate().toString();
if (month.length == 1) month = "0" + month;
if (dom.length == 1) dom = "0" + dom;
return date.getFullYear() + "-" + month + "-" + dom;
}
});
</script>""")
self.response.out.write("""
<div>
<form action=".">
<input readonly type="text" name="startdate" id="startdate" class="date_input" value="%s">
<input readonly type="text" name="enddate" id="enddate" class="date_input" value="%s">
<input type="submit" value="SHOW"/>
</form>
</div>
""" % (startdate, enddate))
self.response.out.write("""<div id="map_canvas" style="width: 40%; height: 40%;"></div>""")
self.response.out.write("""</body></html>""")
#html map ends here
class Mark(webapp.RequestHandler):
"""A database of the points"""
def get(self):
# user = users.get_current_user()
# if user:
# pass
# else:
# self.redirect(users.create_login_url(self.request.uri))
"""show the points which choice which days to view"""
# choice which days to view
startdate = self.request.get("startdate")
enddate = self.request.get("enddate")
if startdate == None or startdate == '' or not matchdateformat(startdate):
startdate = date.today()
else:
# format the startdate
startdate = stringtodate(startdate)
if enddate == None or enddate == '' or not matchdateformat(enddate):
enddate = date.today() + datetime.timedelta(days = 1)
else:
# format the enddate
enddate = stringtodate(enddate)
markedpoints = db.GqlQuery ("SELECT * FROM MarkedPoint WHERE date > :1 AND date < :2 ORDER BY date DESC", startdate, enddate)
# html form starts here
# html head and impot the lib
self.response.out.write("""
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<title>Flying on Wheel</title>
<!-- JS Date Control -->
<script language="javascript" src="/static/jquery-1.4.2.min.js" ></script>
<script language="javascript" src="/static/jquery.date_input.js" ></script>
<link rel="stylesheet" href="/static/date_input.css" type="text/css">
</head>
<body>
""")
# init js date control
self.response.out.write("""
<script type="text/javascript">
$($.date_input.initialize);
$.extend(DateInput.DEFAULT_OPTS, {
stringToDate: function(string) {
var matches;
if (matches = string.match(/^(\d{4,4})-(\d{2,2})-(\d{2,2})$/)) {
return new Date(matches[1], matches[2] - 1, matches[3]);
} else {
return null;
};
},
dateToString: function(date) {
var month = (date.getMonth() + 1).toString();
var dom = date.getDate().toString();
if (month.length == 1) month = "0" + month;
if (dom.length == 1) dom = "0" + dom;
return date.getFullYear() + "-" + month + "-" + dom;
}
});
</script>
""")
# the control of date
self.response.out.write("""
<div>
<form action="/mark">
<input readonly type="text" name="startdate" id="startdate" class="date_input" value="%s">
<input readonly type="text" name="enddate" id="enddate" class="date_input" value="%s">
<input type="submit" value="SHOW"/>
</form>
</div>
""" % (startdate, enddate))
# show the points in a table
self.response.out.write("""
<table style="width: 40%" align="left">
<tr align="left">
<th>Adress Name</th>
<th>Coordinate Point</th>
<th>Time</th>
</tr>
""")
for markedpoint in markedpoints:
self.response.out.write('<tr><td>%s</td><td>%s</td><td>%s</td></tr>' % (markedpoint.name, markedpoint.point, markedpoint.date))
self.response.out.write("""
</table>
""")
# a post form for update the point
self.response.out.write("""
<div style="width: 40%" align="left">
<form action="/mark" method="post">
<div><label>Adress Name:</label></div>
<div><textarea name="name" rows="3" cols="60"></textarea></div>
<div><label>Coordinates:</label></div>
<div><textarea name="point" rows="3" cols="60"></textarea></div>
<div><input type="submit" value="Mark"/></div>
</form>
</div>
</body>
</html>
""")
#html form ends here
def post(self):
"""Catch the post which update the points"""
markedpoint = MarkedPoint(date=datetime.datetime.now() + datetime.timedelta(hours = 8))
markedpoint.name = self.request.get("name")
markedpoint.point = self.request.get("point")
markedpoint.put()
self.redirect('/mark')
| Python |
# -*- coding: utf-8 -*-
import logging
from datetime import date
import datetime
import time
import re
import os
import hashlib
from google.appengine.ext.webapp import template
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from appengine_utilities import sessions
from google.appengine.api import memcache
from functools import wraps
from google.appengine.api import images
logging.getLogger().setLevel(logging.DEBUG)
def requires_admin(method):
"""
Check admin
"""
@wraps(method)
def wrapper(self, *args, **kwargs):
is_admin = users.is_current_user_admin()
if not is_admin:
self.redirect(users.create_login_url(self.request.uri))
return
else:
return method(self, *args, **kwargs)
return wrapper
class MarkedPoint(db.Model):
"""
The Point's infomation
"""
name = db.StringProperty()
point = db.GeoPtProperty()
point_info = db.StringProperty()
flyer_name = db.StringProperty()
picblob = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
@property
def id(self):
return str(self.key().id())
class Flyer(db.Model):
"""
The Flyer's Infomation
"""
name = db.StringProperty()
password = db.StringProperty()
address = db.StringProperty()
flag = db.BooleanProperty()
date = db.DateTimeProperty(auto_now_add=True)
def matchdateformat(stringdate):
"""
Match the string format(0000-00-00)
"""
p = re.compile(r'^\d{4}-\d{2}-\d{2}$')
if p.search(stringdate):
return True
else:
return False
def stringtodate(stringdate):
"""
Format the string date to date type(0000-00-00)
"""
time_format = "%Y-%m-%d"
tempstart = time.strptime(stringdate, time_format)
dateformat = datetime.datetime(*tempstart[:3])
return dateformat
class Mark(webapp.RequestHandler):
"""
Mark the point
"""
def get(self):
"""
Error Infomation: Not allowed
"""
self.response.out.write('NotAllowed')
def post(self):
"""
Catch the post which come from the flyer who update the points
"""
name = self.request.get("flyer_name")
password = self.request.get("password")
if checkRight(name, password):
markedpoint = MarkedPoint(date=datetime.datetime.now() + datetime.timedelta(hours = 8))
markedpoint.name = self.request.get("name")
markedpoint.point = self.request.get("point")
markedpoint.point_info = self.request.get("point_info")
markedpoint.flyer_name = name
# you can choice upload a pic or not
tempblob = self.request.get("picblob")
tempblob = images.resize(tempblob, 320, 240)
tempblob = db.Blob(tempblob)
if tempblob:
markedpoint.picblob = tempblob
markedpoint.put()
self.response.out.write('True')
else:
self.response.out.write('False')
class Mark_test(webapp.RequestHandler):
"""
Mark the point, just test which for develop new function
"""
@requires_admin
def get(self):
"""
The form of the point
"""
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'templates/mark_test.html')
self.response.out.write(template.render(path, template_values))
@requires_admin
def post(self):
"""
Catch the post which come from the flyer who update the points
"""
markedpoint = MarkedPoint(date=datetime.datetime.now() + datetime.timedelta(hours = 8))
markedpoint.name = self.request.get("name")
markedpoint.point = self.request.get("point")
markedpoint.point_info = self.request.get("point_info")
markedpoint.flyer_name = self.request.get("flyer_name")
# you can choice upload a pic or not
tempblob = self.request.get("picblob")
tempblob = images.resize(tempblob, 320, 240)
tempblob = db.Blob(tempblob)
if tempblob:
markedpoint.picblob = tempblob
markedpoint.put()
self.response.out.write('True')
class Marks(webapp.RequestHandler):
"""
list of the one flyer's points
"""
def get(self):
"""
show the points which days to view
"""
self.sess = sessions.Session(writer="cookie")
keyname = 'flyer_name'
if keyname in self.sess:
# format the date
startdate = self.request.get("startdate")
enddate = self.request.get("enddate")
if startdate == None or startdate == '' or not matchdateformat(startdate):
startdate = date.today()
else:
# format the startdate
startdate = stringtodate(startdate)
if enddate == None or enddate == '' or not matchdateformat(enddate):
enddate = date.today() + datetime.timedelta(days = 1)
else:
# format the enddate
enddate = stringtodate(enddate)
# select the flyer's points
flyer_name = self.sess[keyname]
markedpoints = db.GqlQuery ("SELECT * FROM MarkedPoint WHERE date > :1 AND date < :2 AND flyer_name = :3 ORDER BY date DESC", startdate, enddate, flyer_name)
template_values = {
'startdate': startdate.strftime("%Y-%m-%d"),
'enddate': enddate.strftime("%Y-%m-%d"),
'flyer_name': flyer_name,
'markedpoints': markedpoints,
}
path = os.path.join(os.path.dirname(__file__), 'templates/marks.html')
self.response.out.write(template.render(path, template_values))
else:
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'templates/welcome.html')
self.response.out.write(template.render(path, template_values))
class Index(webapp.RequestHandler):
"""
The main page of all flyer
"""
def get(self):
"""
show the points which days to view
"""
self.sess = sessions.Session(writer="cookie")
keyname = 'flyer_name'
if keyname in self.sess:
# format the date
startdate = self.request.get("startdate")
enddate = self.request.get("enddate")
if startdate == None or startdate == '' or not matchdateformat(startdate):
startdate = date.today()
else:
# format the startdate
startdate = stringtodate(startdate)
if enddate == None or enddate == '' or not matchdateformat(enddate):
enddate = date.today() + datetime.timedelta(days = 1)
else:
# format the enddate
enddate = stringtodate(enddate)
# select the flyer's points
flyer_name = self.sess[keyname]
markedpoints = db.GqlQuery ("SELECT * FROM MarkedPoint WHERE date > :1 AND date < :2 AND flyer_name = :3 ORDER BY date DESC", startdate, enddate, flyer_name)
template_values = {
'startdate': startdate.strftime("%Y-%m-%d"),
'enddate': enddate.strftime("%Y-%m-%d"),
'flyer_name': flyer_name,
'markedpoints': markedpoints,
}
path = os.path.join(os.path.dirname(__file__), 'templates/index.html')
self.response.out.write(template.render(path, template_values))
else:
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'templates/welcome.html')
self.response.out.write(template.render(path, template_values))
def flyer_error(flyer_error):
"""
The error page
"""
template_values = {
'flyer_error': flyer_error,
}
path = os.path.join(os.path.dirname(__file__), 'templates/error.html')
return template.render(path, template_values)
def checkRight(name, password):
"""
Check the post right
"""
md5password = hashlib.md5()
md5password.update(password)
password = md5password.hexdigest()
f = db.GqlQuery ("SELECT * FROM Flyer where name = :1 AND password = :2 AND flag = :3", name, password, True)
if f.count() == 1:
return True
else:
return False
class Login(webapp.RequestHandler):
"""
Login and save the session
"""
def get(self):
"""
Login form
"""
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'templates/flyer_login.html')
self.response.out.write(template.render(path, template_values))
def post(self):
"""
Catch the post from login
"""
name = self.request.get("name")
password = self.request.get("password")
if checkRight(name, password):
self.sess = sessions.Session(writer="cookie")
keyname = 'flyer_name'
self.sess[keyname] = name
self.redirect('/')
else:
flyer_error = "Login Error, Please correct the Name&Password, Thanks~"
template_values = {
'flyer_error': flyer_error,
}
path = os.path.join(os.path.dirname(__file__), 'templates/error.html')
self.response.out.write(template.render(path, template_values))
class Logout(webapp.RequestHandler):
"""
Logout and clear the session
"""
def get(self):
self.sess = sessions.Session(writer="cookie")
self.sess.delete()
self.redirect('/')
class Flyer_register(webapp.RequestHandler):
"""
Flyer Manage
"""
def get(self):
"""
The form of Flyer_rigister
"""
f = db.GqlQuery ("SELECT * FROM Flyer")
template_values = {
'flyers_count': f.count()+1,
}
path = os.path.join(os.path.dirname(__file__), 'templates/flyer_register.html')
self.response.out.write(template.render(path, template_values))
def post(self):
"""
Save the flyer
"""
flyer_name = self.request.get("name")
flyer_password = self.request.get("password")
flyer_repassword = self.request.get("repassword")
tempflyers = db.GqlQuery ("SELECT * FROM Flyer where name = :1", flyer_name)
# Check the passwd and the repasswd
if flyer_password != flyer_repassword:
flyer_error = "The password & repassword are not the same, Please correct it, Thanks~"
template_values = {
'flyer_error': flyer_error,
}
path = os.path.join(os.path.dirname(__file__), 'templates/error.html')
self.response.out.write(template.render(path, template_values))
# Look for registered or not
elif tempflyers.count() >= 1:
flyer_error = "This name has been registered, Please choice another one, Thanks~"
template_values = {
'flyer_error': flyer_error,
}
path = os.path.join(os.path.dirname(__file__), 'templates/error.html')
self.response.out.write(template.render(path, template_values))
# Save the flyer
else:
flyer = Flyer(date=datetime.datetime.now() + datetime.timedelta(hours = 8))
md5password = hashlib.md5()
md5password.update(flyer_password)
flyer.name = flyer_name
flyer.password = md5password.hexdigest()
flyer.flag = True
flyer.put()
flyer_success = "Good Luck!, You have been registered, Have a good travel~<br/><a href='/login'>Login</>"
template_values = {
'flyer_success': flyer_success,
}
path = os.path.join(os.path.dirname(__file__), 'templates/success.html')
self.response.out.write(template.render(path, template_values))
class Error404(webapp.RequestHandler):
"""
If flyer enter a page which is not be found, will come here
"""
def get(self):
flyer_error = "You have enter a bad url which is not be found, Please check your url~"
template_values = {
'flyer_error': flyer_error,
}
path = os.path.join(os.path.dirname(__file__), 'templates/error.html')
self.response.out.write(template.render(path, template_values))
class Admin(webapp.RequestHandler):
"""
The admin control
"""
@requires_admin
def get(self):
"""
Get the list of flyers
"""
tempflyers = db.GqlQuery ("SELECT * FROM Flyer")
template_values = {
'flyers': tempflyers,
}
path = os.path.join(os.path.dirname(__file__), 'templates/admin.html')
self.response.out.write(template.render(path, template_values))
@requires_admin
def post(self):
"""
Manage the flyers
"""
flyer_name = self.request.get("name")
choice = self.request.get("choice")
tempflyers = db.GqlQuery ("SELECT * FROM Flyer where name = :1", flyer_name)
tempflyer = tempflyers[0]
if choice == "toTrue":
tempflyer.flag = True
tempflyer.put()
elif choice == "toFalse":
tempflyer.flag = False
tempflyer.put()
else:
tempflyer.delete()
self.redirect('/admin')
class Points(webapp.RequestHandler):
"""
Show the points
"""
@requires_admin
def get(self):
# formate the date
startdate = self.request.get("startdate")
enddate = self.request.get("enddate")
if startdate == None or startdate == '' or not matchdateformat(startdate):
startdate = date.today()
else:
# format the startdate
startdate = stringtodate(startdate)
if enddate == None or enddate == '' or not matchdateformat(enddate):
enddate = date.today() + datetime.timedelta(days = 1)
else:
# format the enddate
enddate = stringtodate(enddate)
markedpoints = db.GqlQuery ("SELECT * FROM MarkedPoint WHERE date > :1 AND date < :2 ORDER BY date DESC", startdate, enddate)
template_values = {
'startdate': startdate.strftime("%Y-%m-%d"),
'enddate': enddate.strftime("%Y-%m-%d"),
'flyer_name': 'admin', # because the base.html control the date_select's show or not
'markedpoints': markedpoints,
}
path = os.path.join(os.path.dirname(__file__), 'templates/points.html')
self.response.out.write(template.render(path, template_values))
class Image(webapp.RequestHandler):
"""
image for showing
"""
def get(self, id):
id = int(id)
mp = MarkedPoint.get_by_id(id)
image = mp.picblob
self.response.headers['Content-Type'] = 'image/jpeg'
self.response.out.write(image)
application = webapp.WSGIApplication([
('/image/(?P<id>[0-9]+)/', Image),
('/mark', Mark),
('/mark_test', Mark_test),
('/marks', Marks),
('/points', Points),
('/', Index),
('/login', Login),
('/logout', Logout),
('/register', Flyer_register),
('/admin', Admin),
('.*', Error404),
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
'''
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import os, cgi, __main__
from google.appengine.ext.webapp import template
import wsgiref.handlers
from google.appengine.ext import webapp
from google.appengine.api import memcache
from google.appengine.ext import db
from appengine_utilities import cron
class MainPage(webapp.RequestHandler):
def get(self):
c = cron.Cron()
query = cron._AppEngineUtilities_Cron.all()
results = query.fetch(1000)
template_values = {"cron_entries" : results}
path = os.path.join(os.path.dirname(__file__), 'templates/scheduler_form.html')
self.response.out.write(template.render(path, template_values))
def post(self):
if str(self.request.get('action')) == 'Add':
cron.Cron().add_cron(str(self.request.get('cron_entry')))
elif str(self.request.get('action')) == 'Delete':
entry = db.get(db.Key(str(self.request.get('key'))))
entry.delete()
query = cron._AppEngineUtilities_Cron.all()
results = query.fetch(1000)
template_values = {"cron_entries" : results}
path = os.path.join(os.path.dirname(__file__), 'templates/scheduler_form.html')
self.response.out.write(template.render(path, template_values))
def main():
application = webapp.WSGIApplication(
[('/gaeutilities/', MainPage)],
debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == "__main__":
main() | Python |
# -*- coding: utf-8 -*-
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import os
import time
import datetime
import random
import hashlib
import Cookie
import pickle
import sys
from time import strftime
# google appengine imports
from google.appengine.ext import db
from google.appengine.api import memcache
from django.utils import simplejson
# appengine_utilities import
from rotmodel import ROTModel
# settings
try:
import settings_default
import settings
if settings.__name__.rsplit('.', 1)[0] != settings_default.__name__.rsplit('.', 1)[0]:
settings = settings_default
except:
settings = settings_default
class _AppEngineUtilities_Session(ROTModel):
"""
Model for the sessions in the datastore. This contains the identifier and
validation information for the session. Uses ROTModel rather than db.Model
in order to make more attempts to retrieve information on get().
"""
sid = db.StringListProperty()
session_key = db.FloatProperty()
ip = db.StringProperty()
ua = db.StringProperty()
last_activity = db.DateTimeProperty()
dirty = db.BooleanProperty(default=False)
working = db.BooleanProperty(default=False)
deleted = db.BooleanProperty(default=False)
def put(self):
"""
Extends put so that it writes vaules to memcache as well as the
datastore, and keeps them in sync, even when datastore writes fails.
It also uses a db.put(), rather than the ROTModel put, to avoid
retries on puts. With the memcache layer this optimizes performance,
stopping on db.Timeout rather than retrying.
Returns the session object.
"""
if self.session_key:
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), self)
else:
# new session, generate a new key, which will handle the
# put and set the memcache
self.create_key()
self.last_activity = datetime.datetime.now()
try:
self.dirty = False
db.put(self)
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), self)
except:
self.dirty = True
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), self)
return self
@classmethod
def get_session(cls, session_obj=None):
"""
Uses the passed objects sid to get a session object from memcache,
or datastore if a valid one exists.
Args:
session_obj: a session object
Returns a validated session object.
"""
if session_obj.sid == None:
return None
session_key = session_obj.sid.split(u'_')[0]
session = memcache.get(u"_AppEngineUtilities_Session_%s" % \
(unicode(session_key)))
if session:
if session.deleted == True:
session.delete()
return None
if session.dirty == True and session.working != False:
# the working bit is used to make sure multiple requests,
# which can happen with ajax oriented sites, don't try to put
# at the same time
session.working = True
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(session_key)), session)
session.put()
if session_obj.sid in session.sid:
sessionAge = datetime.datetime.now() - session.last_activity
if sessionAge.seconds > session_obj.session_expire_time:
session.delete()
return None
return session
else:
return None
# Not in memcache, check datastore
query = _AppEngineUtilities_Session.all()
query.filter(u"sid = ", session_obj.sid)
results = query.fetch(1)
if len(results) > 0:
sessionAge = datetime.datetime.now() - results[0].last_activity
if sessionAge.seconds > session_obj.session_expire_time:
results[0].delete()
return None
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(session_key)), results[0])
memcache.set(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(session_key)), results[0].get_items_ds())
return results[0]
else:
return None
def get_items(self):
"""
Returns all the items stored in a session. Queries memcache first
and will try the datastore next.
"""
items = memcache.get(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)))
if items:
for item in items:
if item.deleted == True:
item.delete()
items.remove(item)
return items
query = _AppEngineUtilities_SessionData.all()
query.filter(u"session_key", self.session_key)
results = query.fetch(1000)
return results
def get_item(self, keyname = None):
"""
Returns a single session data item from the memcache or datastore
Args:
keyname: keyname of the session data object
Returns the session data object if it exists, otherwise returns None
"""
mc = memcache.get(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)))
if mc:
for item in mc:
if item.keyname == keyname:
if item.deleted == True:
item.delete()
return None
return item
query = _AppEngineUtilities_SessionData.all()
query.filter(u"session_key = ", self.session_key)
query.filter(u"keyname = ", keyname)
results = query.fetch(1)
if len(results) > 0:
memcache.set(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)), self.get_items_ds())
return results[0]
return None
def get_items_ds(self):
"""
This gets all session data objects from the datastore, bypassing
memcache.
Returns a list of session data entities.
"""
query = _AppEngineUtilities_SessionData.all()
query.filter(u"session_key", self.session_key)
results = query.fetch(1000)
return results
def delete(self):
"""
Deletes a session and all it's associated data from the datastore and
memcache.
Returns True
"""
try:
query = _AppEngineUtilities_SessionData.all()
query.filter(u"session_key = ", self.session_key)
results = query.fetch(1000)
db.delete(results)
db.delete(self)
memcache.delete_multi([u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), \
u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key))])
except:
mc = memcache.get(u"_AppEngineUtilities_Session_%s" %+ \
(unicode(self.session_key)))
if mc:
mc.deleted = True
else:
# not in the memcache, check to see if it should be
query = _AppEngineUtilities_Session.all()
query.filter(u"sid = ", self.sid)
results = query.fetch(1)
if len(results) > 0:
results[0].deleted = True
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), results[0])
return True
def create_key(self):
"""
Creates a unique key for the session.
Returns the key value as a unicode string.
"""
self.session_key = time.time()
valid = False
while valid == False:
# verify session_key is unique
if memcache.get(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key))):
self.session_key = self.session_key + 0.001
else:
query = _AppEngineUtilities_Session.all()
query.filter(u"session_key = ", self.session_key)
results = query.fetch(1)
if len(results) > 0:
self.session_key = self.session_key + 0.001
else:
try:
self.put()
memcache.set(u"_AppEngineUtilities_Session_%s" %+ \
(unicode(self.session_key)), self)
except:
self.dirty = True
memcache.set(u"_AppEngineUtilities_Session_%s" % \
(unicode(self.session_key)), self)
valid = True
return unicode(self.session_key)
class _AppEngineUtilities_SessionData(ROTModel):
"""
Model for the session data in the datastore.
"""
session_key = db.FloatProperty()
keyname = db.StringProperty()
content = db.BlobProperty()
model = db.ReferenceProperty()
dirty = db.BooleanProperty(default=False)
deleted = db.BooleanProperty(default=False)
def put(self):
"""
Adds a keyname/value for session to the datastore and memcache
Returns the key from the datastore put or u"dirty"
"""
# update or insert in datastore
try:
return_val = db.put(self)
self.dirty = False
except:
return_val = u"dirty"
self.dirty = True
# update or insert in memcache
mc_items = memcache.get(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)))
if mc_items:
value_updated = False
for item in mc_items:
if value_updated == True:
break
if item.keyname == self.keyname:
item.content = self.content
item.model = self.model
memcache.set(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)), mc_items)
value_updated = True
break
if value_updated == False:
mc_items.append(self)
memcache.set(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)), mc_items)
return return_val
def delete(self):
"""
Deletes an entity from the session in memcache and the datastore
Returns True
"""
try:
db.delete(self)
except:
self.deleted = True
mc_items = memcache.get(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)))
value_handled = False
for item in mc_items:
if value_handled == True:
break
if item.keyname == self.keyname:
if self.deleted == True:
item.deleted = True
else:
mc_items.remove(item)
memcache.set(u"_AppEngineUtilities_SessionData_%s" % \
(unicode(self.session_key)), mc_items)
return True
class _DatastoreWriter(object):
def put(self, keyname, value, session):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
Returns the model entity key
"""
keyname = session._validate_key(keyname)
if value is None:
raise ValueError(u"You must pass a value to put.")
# datestore write trumps cookie. If there is a cookie value
# with this keyname, delete it so we don't have conflicting
# entries.
if session.cookie_vals.has_key(keyname):
del(session.cookie_vals[keyname])
session.output_cookie["%s_data" % (session.cookie_name)] = \
simplejson.dumps(session.cookie_vals)
session.output_cookie["%s_data" % (session.cookie_name)]["path"] = \
session.cookie_path
if session.cookie_domain:
session.output_cookie["%s_data" % \
(session.cookie_name)]["domain"] = session.cookie_domain
print session.output_cookie.output()
sessdata = session._get(keyname=keyname)
if sessdata is None:
sessdata = _AppEngineUtilities_SessionData()
sessdata.session_key = session.session.session_key
sessdata.keyname = keyname
try:
db.model_to_protobuf(value)
if not value.is_saved():
value.put()
sessdata.model = value
except:
sessdata.content = pickle.dumps(value)
sessdata.model = None
session.cache[keyname] = value
return sessdata.put()
class _CookieWriter(object):
def put(self, keyname, value, session):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
Returns True
"""
keyname = session._validate_key(keyname)
if value is None:
raise ValueError(u"You must pass a value to put.")
# Use simplejson for cookies instead of pickle.
session.cookie_vals[keyname] = value
# update the requests session cache as well.
session.cache[keyname] = value
# simplejson will raise any error I'd raise about an invalid value
# so let it raise exceptions
session.output_cookie["%s_data" % (session.cookie_name)] = \
simplejson.dumps(session.cookie_vals)
session.output_cookie["%s_data" % (session.cookie_name)]["path"] = \
session.cookie_path
if session.cookie_domain:
session.output_cookie["%s_data" % \
(session.cookie_name)]["domain"] = session.cookie_domain
print session.output_cookie.output()
return True
class Session(object):
"""
Sessions are used to maintain user presence between requests.
Sessions can either be stored server side in the datastore/memcache, or
be kept entirely as cookies. This is set either with the settings file
or on initialization, using the writer argument/setting field. Valid
values are "datastore" or "cookie".
Session can be used as a standard dictionary object.
session = appengine_utilities.sessions.Session()
session["keyname"] = "value" # sets keyname to value
print session["keyname"] # will print value
Datastore Writer:
The datastore writer was written with the focus being on security,
reliability, and performance. In that order.
It is based off of a session token system. All data is stored
server side in the datastore and memcache. A token is given to
the browser, and stored server side. Optionally (and on by default),
user agent and ip checking is enabled. Tokens have a configurable
time to live (TTL), which defaults to 5 seconds. The current token,
plus the previous 2, are valid for any request. This is done in order
to manage ajax enabled sites which may have more than on request
happening at a time. This means any token is valid for 15 seconds.
A request with a token who's TTL has passed will have a new token
generated.
In order to take advantage of the token system for an authentication
system, you will want to tie sessions to accounts, and make sure
only one session is valid for an account. You can do this by setting
a db.ReferenceProperty(_AppEngineUtilities_Session) attribute on
your user Model, and use the get_ds_entity() method on a valid
session to populate it on login.
Note that even with this complex system, sessions can still be hijacked
and it will take the user logging in to retrieve the account. In the
future an ssl only cookie option may be implemented for the datastore
writer, which would further protect the session token from being
sniffed, however it would be restricted to using cookies on the
.appspot.com domain, and ssl requests are a finite resource. This is
why such a thing is not currently implemented.
Session data objects are stored in the datastore pickled, so any
python object is valid for storage.
Cookie Writer:
Sessions using the cookie writer are stored entirely in the browser
and no interaction with the datastore is required. This creates
a drastic improvement in performance, but provides no security for
session hijack. This is useful for requests where identity is not
important, but you wish to keep state between requests.
Information is stored in a json format, as pickled data from the
server is unreliable.
Note: There is no checksum validation of session data on this method,
it's streamlined for pure performance. If you need to make sure data
is not tampered with, use the datastore writer which stores the data
server side.
django-middleware:
Included with the GAEUtilties project is a
django-middleware.middleware.SessionMiddleware which can be included in
your settings file. This uses the cookie writer for anonymous requests,
and you can switch to the datastore writer on user login. This will
require an extra set in your login process of calling
request.session.save() once you validated the user information. This
will convert the cookie writer based session to a datastore writer.
"""
# cookie name declaration for class methods
COOKIE_NAME = settings.session["COOKIE_NAME"]
def __init__(self, cookie_path=settings.session["DEFAULT_COOKIE_PATH"],
cookie_domain=settings.session["DEFAULT_COOKIE_DOMAIN"],
cookie_name=settings.session["COOKIE_NAME"],
session_expire_time=settings.session["SESSION_EXPIRE_TIME"],
clean_check_percent=settings.session["CLEAN_CHECK_PERCENT"],
integrate_flash=settings.session["INTEGRATE_FLASH"],
check_ip=settings.session["CHECK_IP"],
check_user_agent=settings.session["CHECK_USER_AGENT"],
set_cookie_expires=settings.session["SET_COOKIE_EXPIRES"],
session_token_ttl=settings.session["SESSION_TOKEN_TTL"],
last_activity_update=settings.session["UPDATE_LAST_ACTIVITY"],
writer=settings.session["WRITER"]):
"""
Initializer
Args:
cookie_path: The path setting for the cookie.
cookie_domain: The domain setting for the cookie. (Set to False
to not use)
cookie_name: The name for the session cookie stored in the browser.
session_expire_time: The amount of time between requests before the
session expires.
clean_check_percent: The percentage of requests the will fire off a
cleaning routine that deletes stale session data.
integrate_flash: If appengine-utilities flash utility should be
integrated into the session object.
check_ip: If browser IP should be used for session validation
check_user_agent: If the browser user agent should be used for
sessoin validation.
set_cookie_expires: True adds an expires field to the cookie so
it saves even if the browser is closed.
session_token_ttl: Number of sessions a session token is valid
for before it should be regenerated.
"""
self.cookie_path = cookie_path
self.cookie_domain = cookie_domain
self.cookie_name = cookie_name
self.session_expire_time = session_expire_time
self.integrate_flash = integrate_flash
self.check_user_agent = check_user_agent
self.check_ip = check_ip
self.set_cookie_expires = set_cookie_expires
self.session_token_ttl = session_token_ttl
self.last_activity_update = last_activity_update
self.writer = writer
# make sure the page is not cached in the browser
print self.no_cache_headers()
# Check the cookie and, if necessary, create a new one.
self.cache = {}
string_cookie = os.environ.get(u"HTTP_COOKIE", u"")
self.cookie = Cookie.SimpleCookie()
self.output_cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
try:
self.cookie_vals = \
simplejson.loads(self.cookie["%s_data" % (self.cookie_name)].value)
# sync self.cache and self.cookie_vals which will make those
# values available for all gets immediately.
for k in self.cookie_vals:
self.cache[k] = self.cookie_vals[k]
# sync the input cookie with the output cookie
self.output_cookie["%s_data" % (self.cookie_name)] = \
simplejson.dumps(self.cookie_vals) #self.cookie["%s_data" % (self.cookie_name)]
except Exception, e:
self.cookie_vals = {}
if writer == "cookie":
pass
else:
self.sid = None
new_session = True
# do_put is used to determine if a datastore write should
# happen on this request.
do_put = False
# check for existing cookie
if self.cookie.get(cookie_name):
self.sid = self.cookie[cookie_name].value
# The following will return None if the sid has expired.
self.session = _AppEngineUtilities_Session.get_session(self)
if self.session:
new_session = False
if new_session:
# start a new session
self.session = _AppEngineUtilities_Session()
self.session.put()
self.sid = self.new_sid()
if u"HTTP_USER_AGENT" in os.environ:
self.session.ua = os.environ[u"HTTP_USER_AGENT"]
else:
self.session.ua = None
if u"REMOTE_ADDR" in os.environ:
self.session.ip = os.environ["REMOTE_ADDR"]
else:
self.session.ip = None
self.session.sid = [self.sid]
# do put() here to get the session key
self.session.put()
else:
# check the age of the token to determine if a new one
# is required
duration = datetime.timedelta(seconds=self.session_token_ttl)
session_age_limit = datetime.datetime.now() - duration
if self.session.last_activity < session_age_limit:
self.sid = self.new_sid()
if len(self.session.sid) > 2:
self.session.sid.remove(self.session.sid[0])
self.session.sid.append(self.sid)
do_put = True
else:
self.sid = self.session.sid[-1]
# check if last_activity needs updated
ula = datetime.timedelta(seconds=self.last_activity_update)
if datetime.datetime.now() > self.session.last_activity + \
ula:
do_put = True
self.output_cookie[cookie_name] = self.sid
self.output_cookie[cookie_name]["path"] = self.cookie_path
if self.cookie_domain:
self.output_cookie[cookie_name]["domain"] = self.cookie_domain
if self.set_cookie_expires:
self.output_cookie[cookie_name]["expires"] = \
self.session_expire_time
self.cache[u"sid"] = self.sid
if do_put:
if self.sid != None or self.sid != u"":
self.session.put()
if self.set_cookie_expires:
if not self.output_cookie.has_key("%s_data" % (cookie_name)):
self.output_cookie["%s_data" % (cookie_name)] = u""
self.output_cookie["%s_data" % (cookie_name)]["expires"] = \
self.session_expire_time
print self.output_cookie.output()
# fire up a Flash object if integration is enabled
if self.integrate_flash:
import flash
self.flash = flash.Flash(cookie=self.cookie)
# randomly delete old stale sessions in the datastore (see
# CLEAN_CHECK_PERCENT variable)
if random.randint(1, 100) < clean_check_percent:
self._clean_old_sessions()
def new_sid(self):
"""
Create a new session id.
Returns session id as a unicode string.
"""
sid = u"%s_%s" % (self.session.session_key,
hashlib.md5(repr(time.time()) + \
unicode(random.random())).hexdigest()
)
#sid = unicode(self.session.session_key) + "_" + \
# hashlib.md5(repr(time.time()) + \
# unicode(random.random())).hexdigest()
return sid
def _get(self, keyname=None):
"""
private method
Return all of the SessionData object data from the datastore only,
unless keyname is specified, in which case only that instance of
SessionData is returned.
Important: This does not interact with memcache and pulls directly
from the datastore. This also does not get items from the cookie
store.
Args:
keyname: The keyname of the value you are trying to retrieve.
Returns a list of datastore entities.
"""
if hasattr(self, 'session'):
if keyname != None:
return self.session.get_item(keyname)
return self.session.get_items()
return None
def _validate_key(self, keyname):
"""
private method
Validate the keyname, making sure it is set and not a reserved name.
Returns the validated keyname.
"""
if keyname is None:
raise ValueError(
u"You must pass a keyname for the session data content."
)
elif keyname in (u"sid", u"flash"):
raise ValueError(u"%s is a reserved keyname." % keyname)
if type(keyname) != type([str, unicode]):
return unicode(keyname)
return keyname
def _put(self, keyname, value):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
Returns the value from the writer put operation, varies based on writer.
"""
if self.writer == "datastore":
writer = _DatastoreWriter()
else:
writer = _CookieWriter()
return writer.put(keyname, value, self)
def _delete_session(self):
"""
private method
Delete the session and all session data.
Returns True.
"""
# if the event class has been loaded, fire off the preSessionDelete event
if u"AEU_Events" in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event(u"preSessionDelete")
if hasattr(self, u"session"):
self.session.delete()
self.cookie_vals = {}
self.cache = {}
self.output_cookie["%s_data" % (self.cookie_name)] = \
simplejson.dumps(self.cookie_vals)
self.output_cookie["%s_data" % (self.cookie_name)]["path"] = \
self.cookie_path
if self.cookie_domain:
self.output_cookie["%s_data" % \
(self.cookie_name)]["domain"] = self.cookie_domain
print self.output_cookie.output()
# if the event class has been loaded, fire off the sessionDelete event
if u"AEU_Events" in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event(u"sessionDelete")
return True
def delete(self):
"""
Delete the current session and start a new one.
This is useful for when you need to get rid of all data tied to a
current session, such as when you are logging out a user.
Returns True
"""
self._delete_session()
@classmethod
def delete_all_sessions(cls):
"""
Deletes all sessions and session data from the data store. This
does not delete the entities from memcache (yet). Depending on the
amount of sessions active in your datastore, this request could
timeout before completion and may have to be called multiple times.
NOTE: This can not delete cookie only sessions as it has no way to
access them. It will only delete datastore writer sessions.
Returns True on completion.
"""
all_sessions_deleted = False
while not all_sessions_deleted:
query = _AppEngineUtilities_Session.all()
results = query.fetch(75)
if len(results) is 0:
all_sessions_deleted = True
else:
for result in results:
result.delete()
return True
def _clean_old_sessions(self):
"""
Delete 50 expired sessions from the datastore.
This is only called for CLEAN_CHECK_PERCENT percent of requests because
it could be rather intensive.
Returns True on completion
"""
duration = datetime.timedelta(seconds=self.session_expire_time)
session_age = datetime.datetime.now() - duration
query = _AppEngineUtilities_Session.all()
query.filter(u"last_activity <", session_age)
results = query.fetch(50)
for result in results:
result.delete()
return True
def cycle_key(self):
"""
Changes the session id/token.
Returns new token.
"""
self.sid = self.new_sid()
if len(self.session.sid) > 2:
self.session.sid.remove(self.session.sid[0])
self.session.sid.append(self.sid)
return self.sid
def flush(self):
"""
Delete's the current session, creating a new one.
Returns True
"""
self._delete_session()
self.__init__()
return True
def no_cache_headers(self):
"""
Generates headers to avoid any page caching in the browser.
Useful for highly dynamic sites.
Returns a unicode string of headers.
"""
return u"".join([u"Expires: Tue, 03 Jul 2001 06:00:00 GMT",
strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z").decode("utf-8"),
u"Cache-Control: no-store, no-cache, must-revalidate, max-age=0",
u"Cache-Control: post-check=0, pre-check=0",
u"Pragma: no-cache",
])
def clear(self):
"""
Removes session data items, doesn't delete the session. It does work
with cookie sessions, and must be called before any output is sent
to the browser, as it set cookies.
Returns True
"""
sessiondata = self._get()
# delete from datastore
if sessiondata is not None:
for sd in sessiondata:
sd.delete()
# delete from memcache
self.cache = {}
self.cookie_vals = {}
self.output_cookie["%s_data" %s (self.cookie_name)] = \
simplejson.dumps(self.cookie_vals)
self.output_cookie["%s_data" % (self.cookie_name)]["path"] = \
self.cookie_path
if self.cookie_domain:
self.output_cookie["%s_data" % \
(self.cookie_name)]["domain"] = self.cookie_domain
print self.output_cookie.output()
return True
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
Args:
keyname: keyname to check
Returns True/False
"""
return self.__contains__(keyname)
def items(self):
"""
Creates a copy of just the data items.
Returns dictionary of session data objects.
"""
op = {}
for k in self:
op[k] = self[k]
return op
def keys(self):
"""
Returns a list of keys.
"""
l = []
for k in self:
l.append(k)
return l
def update(self, *dicts):
"""
Updates with key/value pairs from b, overwriting existing keys
Returns None
"""
for dict in dicts:
for k in dict:
self._put(k, dict[k])
return None
def values(self):
"""
Returns a list object of just values in the session.
"""
v = []
for k in self:
v.append(self[k])
return v
def get(self, keyname, default = None):
"""
Returns either the value for the keyname or a default value
passed.
Args:
keyname: keyname to look up
default: (optional) value to return on keyname miss
Returns value of keyname, or default, or None
"""
try:
return self.__getitem__(keyname)
except KeyError:
if default is not None:
return default
return None
def setdefault(self, keyname, default = None):
"""
Returns either the value for the keyname or a default value
passed. If keyname lookup is a miss, the keyname is set with
a value of default.
Args:
keyname: keyname to look up
default: (optional) value to return on keyname miss
Returns value of keyname, or default, or None
"""
try:
return self.__getitem__(keyname)
except KeyError:
if default is not None:
self.__setitem__(keyname, default)
return default
return None
@classmethod
def check_token(cls, cookie_name=COOKIE_NAME, delete_invalid=True):
"""
Retrieves the token from a cookie and validates that it is
a valid token for an existing cookie. Cookie validation is based
on the token existing on a session that has not expired.
This is useful for determining if datastore or cookie writer
should be used in hybrid implementations.
Args:
cookie_name: Name of the cookie to check for a token.
delete_invalid: If the token is not valid, delete the session
cookie, to avoid datastore queries on future
requests.
Returns True/False
"""
string_cookie = os.environ.get(u"HTTP_COOKIE", u"")
cookie = Cookie.SimpleCookie()
cookie.load(string_cookie)
if cookie.has_key(cookie_name):
query = _AppEngineUtilities_Session.all()
query.filter(u"sid", cookie[cookie_name].value)
results = query.fetch(1)
if len(results) > 0:
return True
else:
if delete_invalid:
output_cookie = Cookie.SimpleCookie()
output_cookie[cookie_name] = cookie[cookie_name]
output_cookie[cookie_name][u"expires"] = 0
print output_cookie.output()
return False
def get_ds_entity(self):
"""
Will return the session entity from the datastore if one
exists, otherwise will return None (as in the case of cookie writer
session.
"""
if hasattr(self, u"session"):
return self.session
return None
# Implement Python container methods
def __getitem__(self, keyname):
"""
Get item from session data.
keyname: The keyname of the mapping.
"""
# flash messages don't go in the datastore
if self.integrate_flash and (keyname == u"flash"):
return self.flash.msg
if keyname in self.cache:
return self.cache[keyname]
if keyname in self.cookie_vals:
return self.cookie_vals[keyname]
if hasattr(self, u"session"):
data = self._get(keyname)
if data:
# TODO: It's broke here, but I'm not sure why, it's
# returning a model object, but I can't seem to modify
# it.
try:
if data.model != None:
self.cache[keyname] = data.model
return self.cache[keyname]
else:
self.cache[keyname] = pickle.loads(data.content)
return self.cache[keyname]
except:
self.delete_item(keyname)
else:
raise KeyError(unicode(keyname))
raise KeyError(unicode(keyname))
def __setitem__(self, keyname, value):
"""
Set item in session data.
Args:
keyname: They keyname of the mapping.
value: The value of mapping.
"""
if self.integrate_flash and (keyname == u"flash"):
self.flash.msg = value
else:
keyname = self._validate_key(keyname)
self.cache[keyname] = value
return self._put(keyname, value)
def delete_item(self, keyname, throw_exception=False):
"""
Delete item from session data, ignoring exceptions if
necessary.
Args:
keyname: The keyname of the object to delete.
throw_exception: false if exceptions are to be ignored.
Returns:
Nothing.
"""
if throw_exception:
self.__delitem__(keyname)
return None
else:
try:
self.__delitem__(keyname)
except KeyError:
return None
def __delitem__(self, keyname):
"""
Delete item from session data.
Args:
keyname: The keyname of the object to delete.
"""
bad_key = False
sessdata = self._get(keyname = keyname)
if sessdata is None:
bad_key = True
else:
sessdata.delete()
if keyname in self.cookie_vals:
del self.cookie_vals[keyname]
bad_key = False
self.output_cookie["%s_data" % (self.cookie_name)] = \
simplejson.dumps(self.cookie_vals)
self.output_cookie["%s_data" % (self.cookie_name)]["path"] = \
self.cookie_path
if self.cookie_domain:
self.output_cookie["%s_data" % \
(self.cookie_name)]["domain"] = self.cookie_domain
print self.output_cookie.output()
if bad_key:
raise KeyError(unicode(keyname))
if keyname in self.cache:
del self.cache[keyname]
def __len__(self):
"""
Return size of session.
"""
# check memcache first
if hasattr(self, u"session"):
results = self._get()
if results is not None:
return len(results) + len(self.cookie_vals)
else:
return 0
return len(self.cookie_vals)
def __contains__(self, keyname):
"""
Check if an item is in the session data.
Args:
keyname: The keyname being searched.
"""
try:
self.__getitem__(keyname)
except KeyError:
return False
return True
def __iter__(self):
"""
Iterate over the keys in the session data.
"""
# try memcache first
if hasattr(self, u"session"):
vals = self._get()
if vals is not None:
for k in vals:
yield k.keyname
for k in self.cookie_vals:
yield k
def __str__(self):
"""
Return string representation.
"""
return u"{%s}" % ', '.join(['"%s" = "%s"' % (k, self[k]) for k in self])
| Python |
import Cookie
import os
from common.appengine_utilities import sessions
class SessionMiddleware(object):
TEST_COOKIE_NAME = 'testcookie'
TEST_COOKIE_VALUE = 'worked'
def process_request(self, request):
"""
Check to see if a valid session token exists, if not,
then use a cookie only session. It's up to the application
to convert the session to a datastore session. Once this
has been done, the session will continue to use the datastore
unless the writer is set to "cookie".
Setting the session to use the datastore is as easy as resetting
request.session anywhere if your application.
Example:
from common.appengine_utilities import sessions
request.session = sessions.Session()
"""
self.request = request
if sessions.Session.check_token():
request.session = sessions.Session()
else:
request.session = sessions.Session(writer="cookie")
request.session.set_test_cookie = self.set_test_cookie
request.session.test_cookie_worked = self.test_cookie_worked
request.session.delete_test_cookie = self.delete_test_cookie
request.session.save = self.save
return None
def set_test_cookie(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
self.cookie[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE
print self.cookie
def test_cookie_worked(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
return self.cookie.get(self.TEST_COOKIE_NAME)
def delete_test_cookie(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
self.cookie[self.TEST_COOKIE_NAME] = ''
self.cookie[self.TEST_COOKIE_NAME]['path'] = '/'
self.cookie[self.TEST_COOKIE_NAME]['expires'] = 0
def save(self):
self.request.session = sessions.Session()
def process_response(self, request, response):
if hasattr(request, "session"):
response.cookies= request.session.output_cookie
return response
| Python |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
class Event(object):
"""
Event is a simple publish/subscribe based event dispatcher. It's a way
to add, or take advantage of, hooks in your application. If you want to
tie actions in with lower level classes you're developing within your
application, you can set events to fire, and then subscribe to them with
callback methods in other methods in your application.
It sets itself to the sys.modules['__main__'] function. In order to use it,
you must import it with your sys.modules['__main__'] method, and make sure
you import sys.modules['__main__'] and it's accessible for the methods where
you want to use it.
For example, from sessions.py
# if the event class has been loaded, fire off the sessionDeleted
# event
if u"AEU_Events" in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event(u"sessionDelete")
You can the subscribe to session delete events, adding a callback
if u"AEU_Events" in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.subscribe(u"sessionDelete", \
clear_user_session)
"""
def __init__(self):
self.events = []
def subscribe(self, event, callback, args = None):
"""
This method will subscribe a callback function to an event name.
Args:
event: The event to subscribe to.
callback: The callback method to run.
args: Optional arguments to pass with the callback.
Returns True
"""
if not {"event": event, "callback": callback, "args": args, } \
in self.events:
self.events.append({"event": event, "callback": callback, \
"args": args, })
return True
def unsubscribe(self, event, callback, args = None):
"""
This method will unsubscribe a callback from an event.
Args:
event: The event to subscribe to.
callback: The callback method to run.
args: Optional arguments to pass with the callback.
Returns True
"""
if {"event": event, "callback": callback, "args": args, }\
in self.events:
self.events.remove({"event": event, "callback": callback,\
"args": args, })
return True
def fire_event(self, event = None):
"""
This method is what a method uses to fire an event,
initiating all registered callbacks
Args:
event: The name of the event to fire.
Returns True
"""
for e in self.events:
if e["event"] == event:
if type(e["args"]) == type([]):
e["callback"](*e["args"])
elif type(e["args"]) == type({}):
e["callback"](**e["args"])
elif e["args"] == None:
e["callback"]()
else:
e["callback"](e["args"])
return True
"""
Assign to the event class to sys.modules['__main__']
"""
sys.modules['__main__'].AEU_Events = Event()
| Python |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import Cookie
from time import strftime
from django.utils import simplejson
# settings
try:
import settings_default
import settings
if settings.__name__.rsplit('.', 1)[0] != settings_default.__name__.rsplit('.', 1)[0]:
settings = settings_default
except:
settings = settings_default
COOKIE_NAME = settings.flash["COOKIE_NAME"]
class Flash(object):
"""
Send messages to the user between pages.
When you instantiate the class, the attribute 'msg' will be set from the
cookie, and the cookie will be deleted. If there is no flash cookie, 'msg'
will default to None.
To set a flash message for the next page, simply set the 'msg' attribute.
Example psuedocode:
if new_entity.put():
flash = Flash()
flash.msg = 'Your new entity has been created!'
return redirect_to_entity_list()
Then in the template on the next page:
{% if flash.msg %}
<div class="flash-msg">{{ flash.msg }}</div>
{% endif %}
"""
def __init__(self, cookie=None):
"""
Load the flash message and clear the cookie.
"""
print self.no_cache_headers()
# load cookie
if cookie is None:
browser_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(browser_cookie)
else:
self.cookie = cookie
# check for flash data
if self.cookie.get(COOKIE_NAME):
# set 'msg' attribute
cookie_val = self.cookie[COOKIE_NAME].value
# we don't want to trigger __setattr__(), which creates a cookie
try:
self.__dict__['msg'] = simplejson.loads(cookie_val)
except:
# not able to load the json, so do not set message. This should
# catch for when the browser doesn't delete the cookie in time for
# the next request, and only blanks out the content.
pass
# clear the cookie
self.cookie[COOKIE_NAME] = ''
self.cookie[COOKIE_NAME]['path'] = '/'
self.cookie[COOKIE_NAME]['expires'] = 0
print self.cookie[COOKIE_NAME]
else:
# default 'msg' attribute to None
self.__dict__['msg'] = None
def __setattr__(self, name, value):
"""
Create a cookie when setting the 'msg' attribute.
"""
if name == 'cookie':
self.__dict__['cookie'] = value
elif name == 'msg':
self.__dict__['msg'] = value
self.__dict__['cookie'][COOKIE_NAME] = simplejson.dumps(value)
self.__dict__['cookie'][COOKIE_NAME]['path'] = '/'
print self.cookie
else:
raise ValueError('You can only set the "msg" attribute.')
def no_cache_headers(self):
"""
Generates headers to avoid any page caching in the browser.
Useful for highly dynamic sites.
Returns a unicode string of headers.
"""
return u"".join([u"Expires: Tue, 03 Jul 2001 06:00:00 GMT",
strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z").decode("utf-8"),
u"Cache-Control: no-store, no-cache, must-revalidate, max-age=0",
u"Cache-Control: post-check=0, pre-check=0",
u"Pragma: no-cache",
])
| Python |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import time
from google.appengine.api import datastore
from google.appengine.ext import db
# settings
try:
import settings_default
import settings
if settings.__name__.rsplit('.', 1)[0] != settings_default.__name__.rsplit('.', 1)[0]:
settings = settings_default
except:
settings = settings_default
class ROTModel(db.Model):
"""
ROTModel overrides the db.Model functions, retrying each method each time
a timeout exception is raised.
Methods superclassed from db.Model are:
get(cls, keys)
get_by_id(cls, ids, parent)
get_by_key_name(cls, key_names, parent)
get_or_insert(cls, key_name, kwargs)
put(self)
"""
@classmethod
def get(cls, keys):
count = 0
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.Model.get(keys)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
else:
raise db.Timeout()
@classmethod
def get_by_id(cls, ids, parent=None):
count = 0
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.Model.get_by_id(ids, parent)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
else:
raise db.Timeout()
@classmethod
def get_by_key_name(cls, key_names, parent=None):
if isinstance(parent, db.Model):
parent = parent.key()
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
count = 0
if multiple:
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.get(keys)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
else:
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.get(*keys)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
@classmethod
def get_or_insert(cls, key_name, **kwargs):
def txn():
entity = cls.get_by_key_name(key_name, parent=kwargs.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwargs)
entity.put()
return entity
return db.run_in_transaction(txn)
def put(self):
count = 0
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.Model.put(self)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
else:
raise db.Timeout()
def delete(self):
count = 0
while count < settings.rotmodel["RETRY_ATTEMPTS"]:
try:
return db.Model.delete(self)
except db.Timeout:
count += 1
time.sleep(count * settings.rotmodel["RETRY_INTERVAL"])
else:
raise db.Timeout()
| Python |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__="jbowman"
__date__ ="$Sep 11, 2009 4:20:11 PM$"
# Configuration settings for the session class.
session = {
"COOKIE_NAME": "gaeutilities_session",
"DEFAULT_COOKIE_PATH": "/",
"DEFAULT_COOKIE_DOMAIN": False, # Set to False if you do not want this value
# set on the cookie, otherwise put the
# domain value you wish used.
"SESSION_EXPIRE_TIME": 7200, # sessions are valid for 7200 seconds
# (2 hours)
"INTEGRATE_FLASH": True, # integrate functionality from flash module?
"SET_COOKIE_EXPIRES": True, # Set to True to add expiration field to
# cookie
"WRITER":"datastore", # Use the datastore writer by default.
# cookie is the other option.
"CLEAN_CHECK_PERCENT": 50, # By default, 50% of all requests will clean
# the datastore of expired sessions
"CHECK_IP": True, # validate sessions by IP
"CHECK_USER_AGENT": True, # validate sessions by user agent
"SESSION_TOKEN_TTL": 5, # Number of seconds a session token is valid
# for.
"UPDATE_LAST_ACTIVITY": 60, # Number of seconds that may pass before
# last_activity is updated
}
# Configuration settings for the cache class
cache = {
"DEFAULT_TIMEOUT": 3600, # cache expires after one hour (3600 sec)
"CLEAN_CHECK_PERCENT": 50, # 50% of all requests will clean the database
"MAX_HITS_TO_CLEAN": 20, # the maximum number of cache hits to clean
}
# Configuration settings for the flash class
flash = {
"COOKIE_NAME": "appengine-utilities-flash",
}
# Configuration settings for the paginator class
paginator = {
"DEFAULT_COUNT": 10,
"CACHE": 10,
"DEFAULT_SORT_ORDER": "ASC",
}
rotmodel = {
"RETRY_ATTEMPTS": 3,
"RETRY_INTERVAL": .2,
}
if __name__ == "__main__":
print "Hello World";
| Python |
# -*- coding: utf-8 -*-
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import datetime
import pickle
import random
import sys
# google appengine import
from google.appengine.ext import db
from google.appengine.api import memcache
# settings
try:
import settings_default
import settings
if settings.__name__.rsplit('.', 1)[0] != settings_default.__name__.rsplit('.', 1)[0]:
settings = settings_default
except:
settings = settings_default
class _AppEngineUtilities_Cache(db.Model):
cachekey = db.StringProperty()
createTime = db.DateTimeProperty(auto_now_add=True)
timeout = db.DateTimeProperty()
value = db.BlobProperty()
class Cache(object):
"""
Cache is used for storing pregenerated output and/or objects in the Big
Table datastore to minimize the amount of queries needed for page
displays. The idea is that complex queries that generate the same
results really should only be run once. Cache can be used to store
pregenerated value made from queries (or other calls such as
urlFetch()), or the query objects themselves.
Cache is a standard dictionary object and can be used as such. It attesmpts
to store data in both memcache, and the datastore. However, should a
datastore write fail, it will not try again. This is for performance
reasons.
"""
def __init__(self, clean_check_percent = settings.cache["CLEAN_CHECK_PERCENT"],
max_hits_to_clean = settings.cache["MAX_HITS_TO_CLEAN"],
default_timeout = settings.cache["DEFAULT_TIMEOUT"]):
"""
Initializer
Args:
clean_check_percent: how often cache initialization should
run the cache cleanup
max_hits_to_clean: maximum number of stale hits to clean
default_timeout: default length a cache item is good for
"""
self.clean_check_percent = clean_check_percent
self.max_hits_to_clean = max_hits_to_clean
self.default_timeout = default_timeout
if random.randint(1, 100) < self.clean_check_percent:
try:
self._clean_cache()
except:
pass
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheInitialized')
def _clean_cache(self):
"""
_clean_cache is a routine that is run to find and delete cache
items that are old. This helps keep the size of your over all
datastore down.
It only deletes the max_hits_to_clean per attempt, in order
to maximize performance. Default settings are 20 hits, 50%
of requests. Generally less hits cleaned on more requests will
give you better performance.
Returns True on completion
"""
query = _AppEngineUtilities_Cache.all()
query.filter('timeout < ', datetime.datetime.now())
results = query.fetch(self.max_hits_to_clean)
db.delete(results)
return True
def _validate_key(self, key):
"""
Internal method for key validation. This can be used by a superclass
to introduce more checks on key names.
Args:
key: Key name to check
Returns True is key is valid, otherwise raises KeyError.
"""
if key == None:
raise KeyError
return True
def _validate_value(self, value):
"""
Internal method for value validation. This can be used by a superclass
to introduce more checks on key names.
Args:
value: value to check
Returns True is value is valid, otherwise raises ValueError.
"""
if value == None:
raise ValueError
return True
def _validate_timeout(self, timeout):
"""
Internal method to validate timeouts. If no timeout
is passed, then the default_timeout is used.
Args:
timeout: datetime.datetime format
Returns the timeout
"""
if timeout == None:
timeout = datetime.datetime.now() +\
datetime.timedelta(seconds=self.default_timeout)
if type(timeout) == type(1):
timeout = datetime.datetime.now() + \
datetime.timedelta(seconds = timeout)
if type(timeout) != datetime.datetime:
raise TypeError
if timeout < datetime.datetime.now():
raise ValueError
return timeout
def add(self, key = None, value = None, timeout = None):
"""
Adds an entry to the cache, if one does not already exist. If they key
already exists, KeyError will be raised.
Args:
key: Key name of the cache object
value: Value of the cache object
timeout: timeout value for the cache object.
Returns the cache object.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
if key in self:
raise KeyError
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
# try to put the entry, if it fails silently pass
# failures may happen due to timeouts, the datastore being read
# only for maintenance or other applications. However, cache
# not being able to write to the datastore should not
# break the application
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), value, int(memcache_timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheAdded')
return self.get(key)
def set(self, key = None, value = None, timeout = None):
"""
Sets an entry to the cache, overwriting an existing value
if one already exists.
Args:
key: Key name of the cache object
value: Value of the cache object
timeout: timeout value for the cache object.
Returns the cache object.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
cacheEntry = self._read(key)
if not cacheEntry:
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), value, int(memcache_timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheSet')
return value
def _read(self, key = None):
"""
_read is an internal method that will get the cache entry directly
from the datastore, and return the entity. This is used for datastore
maintenance within the class.
Args:
key: The key to retrieve
Returns the cache entity
"""
query = _AppEngineUtilities_Cache.all()
query.filter('cachekey', key)
query.filter('timeout > ', datetime.datetime.now())
results = query.fetch(1)
if len(results) is 0:
return None
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheReadFromDatastore')
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return results[0]
def delete(self, key = None):
"""
Deletes a cache object.
Args:
key: The key of the cache object to delete.
Returns True.
"""
memcache.delete('cache-%s' % (key))
result = self._read(key)
if result:
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheDeleted')
result.delete()
return True
def get(self, key):
"""
Used to return the cache value associated with the key passed.
Args:
key: The key of the value to retrieve.
Returns the value of the cache item.
"""
mc = memcache.get('cache-%s' % (key))
if mc:
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheReadFromMemcache')
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return mc
result = self._read(key)
if result:
timeout = result.timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), pickle.loads(result.value),
int(timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return pickle.loads(result.value)
else:
raise KeyError
def get_many(self, keys):
"""
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
Args:
keys: A list of keys to retrieve.
Returns a dictionary of key/value pairs.
"""
dict = {}
for key in keys:
value = self.get(key)
if value is not None:
dict[key] = value
return dict
def __getitem__(self, key):
"""
__getitem__ is necessary for this object to emulate a container.
"""
return self.get(key)
def __setitem__(self, key, value):
"""
__setitem__ is necessary for this object to emulate a container.
"""
return self.set(key, value)
def __delitem__(self, key):
"""
Implement the 'del' keyword
"""
return self.delete(key)
def __contains__(self, key):
"""
Implements "in" operator
"""
try:
self.__getitem__(key)
except KeyError:
return False
return True
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
"""
return self.__contains__(keyname)
| Python |
import e32dbm
DB_FILE = u"E:\\flyingonwheel.db"
def write_db():
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = u"musli"
db[u"password"] = u"my secret"
db.close()
def read_db():
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
print name
print password
db.close()
print "Writing db.."
write_db()
print "Reading db.."
read_db()
| Python |
import e32
import appuifw
import dir_iter
import pyS60uiutil
import os
import os.path
import sys
import re
import socket
class pyFileManagerS60:
"""
File Manager written in Python for S60 phones.
Can be used as a standalone application or as a part of another program.
"""
def __init__(self, title='pyFileManager for S60', my_real_full_name=None):
self.my_title = unicode(title)
if my_real_full_name == None:
self.my_real_full_name = appuifw.app.full_name()
else:
self.my_real_full_name = my_real_full_name
self.screen_size = 'normal'
self.last_find = u'*.jpg'
self.rexp = None
self.matches = []
self.nr_of_matches = 0
#=
#= Actions menu and corresponding callback functions.
#=
def __set_actions_menu(self):
return [(u'Send', self.__sendfile),
(u'Open', self.__open_with_app),
(u'Rename', self.__rename),
(u'Copy', self.__copy),
(u'Move', self.__move),
(u'Create dir', self.__create_dir),
(u'Delete', self.__delete),
(u'Refresh dir', lambda x: None)]
def __open_with_app(self, selection):
"""
Opens a file with associated application. For example, JPEG image
is shown with an image viewer application.
If the file is a Python script it will be executed but the execution
is not handled automatically by the system.
"""
full_name = os.path.join(selection[0], selection[1])
if not os.path.isfile(full_name):
appuifw.note(u'Not a regular file!', 'error')
return
if os.path.splitext(selection[1])[1].lower() == '.py':
execfile(full_name, globals())
return
# If the file is a text file (extension 'txt') and pyEdit has been
# installed, the user prompted to select either pyEdit or the default
# system text editor.
#
#if os.path.splitext(selection[1])[1].lower() == '.txt':
# execfile(full_name, globals())
# return
# How to indicate the document to the pyEdit?
#
try:
appuifw.Content_handler().open(full_name)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __ask_non_existing_file_name(self, path, file):
"""
Prompts for a new file as long as the user types a file name
that does not exist in the directory 'path' or cancels the operation.
If the suggested file name 'file' does not exist no prompt is shown.
"""
msg = u'File with the same name exists. Give a new name!'
ok = False
try:
target = os.path.join(path, file)
new_name = unicode(file)
while ok == False:
if os.path.exists(target):
new_name = appuifw.query(msg, 'text', new_name)
if new_name == None: return None
target = os.path.join(path, new_name)
else: ok = True
return target
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
return None
def __rename(self, selection):
"""
Renames the file in the current directory.
"""
source = os.path.join(selection[0], selection[1])
new_name = appuifw.query(u'New name', 'text', unicode(selection[1]))
target = self.__ask_non_existing_file_name(selection[0], new_name)
if target == None:
return
try:
os.rename(source, target)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __move(self, source):
"""
Moves a file from a directory to another directory or to the
original directory with a new name.
"""
if not self.screen_size == 'normal':
#= Inform user in the case the title is not visible.
appuifw.note(u'Select dest dir', 'info')
target = self.fb.select(temp_title='Select dest dir')
if target == None:
return
s = os.path.join(source[0], source[1])
t = self.__ask_non_existing_file_name(target[0], source[1])
if t == None:
return
try:
os.rename(s, t)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __copy(self, source):
"""
Copies a file from a directory to another directory or to the
original directory with a new name.
"""
if not os.path.isfile(os.path.join(source[0], source[1])):
appuifw.note(u'Not a regular file! Cannot copy.', 'error')
return
if not self.screen_size == 'normal':
#= Inform user in the case the title is not visible.
appuifw.note(u'Select dest dir', 'info')
target = self.fb.select(temp_title='Select dest dir')
if target == None:
return
s = os.path.join(source[0], source[1])
t = self.__ask_non_existing_file_name(target[0], source[1])
if t == None:
return
try:
e32.file_copy(t, s)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __create_dir(self, selection):
"""
Creates a directory.
"""
dir_name = appuifw.query(u'Directory name', 'text')
if dir_name == None:
return
full_dir = os.path.join(selection[0], dir_name)
try:
os.mkdir(full_dir)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __delete(self, selection):
"""
Removes regular files and directories.
"""
full_name = os.path.join(selection[0], selection[1])
q = unicode('Remove ' + full_name + '?')
cnf = appuifw.query(q, 'query')
if not cnf:
return
try:
if os.path.isdir(full_name):
os.rmdir(full_name)
else:
os.remove(full_name)
except:
type, value = sys.exc_info() [:2]
appuifw.note(unicode(str(type)+'\n'+str(value)), 'error')
def __readfile(self, selection):
fname = os.path.join(selection[0], selection[1])
mfile = file(fname, 'rb')
mdata = mfile.read()
mfile.close()
return mdata
def __sendfile(self, selection):
"""
Send file via BlueTooth, FTP, or email.
"""
comm_method = 'bluetooth'
if comm_method == None: return
elif comm_method == 'bluetooth':
try:
addr, serv = socket.bt_obex_discover()
if addr == None or serv == None:
appuifw.note(u'BlueTooth Discovery Error', 'error')
return
choises = serv.keys()
choises.sort()
lst = []
for x in choises:
lst.append(unicode(str(x)+u' #'+str(serv[x])))
choice = appuifw.popup_menu(lst)
if choice == None: return
target = (addr, serv[choises[choice]])
fname = os.path.join(selection[0], selection[1])
socket.bt_obex_send_file(target[0], target[1], unicode(fname))
except socket.error, detail:
err_msg = u'BlueTooth socket error: ' + unicode(detail)
appuifw.note(err_msg, 'error')
return
appuifw.note(u'File sent', 'conf')
elif comm_method == 'ftp':
#
# Not ready
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
port = socket.getservbyname('ftp', 'tcp')
addr = socket.gethostbyname('kosh.hut.fi')
print str(addr)
print str(port)
s.connect((addr, port))
except socket.error, detail:
appuifw.note(u'Communication socket error!\n' + \
unicode(detail), 'error')
return
appuifw.note(u'File successfully sent', 'conf')
elif comm_method == 'email':
# to do
pass
#=
#= Options menu and corresponding callback functions.
#=
def __set_options_menu(self):
return [(u'Current path', self.__show_current_path),
(u'Find', self.__find),
(u'Screen size', self.__switch_screen),
(u'Help', self.__help),
(u'About', self.__about)]
def __show_current_path(self):
s = self.fb.reveal_current_location()
if s == None:
appuifw.note(u'No selection going on', 'error')
if s == ('', ''):
msg = '\\\n'
else:
s = os.path.split(s[0])
msg = ''
while s[1]:
msg = s[1] + '\\\n' + msg
s = os.path.split(s[0])
msg = s[0] + '\n' + msg
fv = pyS60uiutil.fileViewer('Current path')
fv.view(msg)
def __match_files(self, arg, dirname, names):
for item in names:
if self.rexp.match(item):
self.matches.append(os.path.join(dirname,item))
self.nr_of_matches += 1
def __find(self):
expr = appuifw.query(u'Find what?', 'text', self.last_find)
if expr == None: return
self.last_find = expr
expr = expr.replace('*', '.*')
expr = expr.replace('?', '.?')
self.rexp = re.compile(expr)
self.matches = []
self.nr_of_matches = 0
current_dir = dir_iter.Directory_iter(e32.drive_list())
entries = current_dir.list_repr()
drive_selection_list = [u'From all drives']
for drv in entries:
drive_selection_list.append(u'From '+unicode(drv[0])+u' drive')
selected_drv = appuifw.selection_list(drive_selection_list)
if selected_drv == None: return
if selected_drv == 0:
# Scanning of all the drives selected.
index = 0
nr_of_drives_to_scan = len(entries)
else:
# Only one drive to scan. Probably not the most sophisticated
# way to handle this situation but it works ;-)
index = selected_drv - 1
nr_of_drives_to_scan = index + 1
while index < nr_of_drives_to_scan:
appuifw.note(u'Scanning drive: ' + \
unicode(str(entries[index][0])) + \
u' #' + \
unicode(str(index+1)) + \
u'/' + \
unicode(str(len(entries))),
'info')
os.path.walk(entries[index][0],
self.__match_files,
None)
index += 1
# Show the results
appuifw.note(u'Scanning completed!', 'info')
scan_result = 'Number of matches: ' + str(self.nr_of_matches) + u'\n\n'
for m in self.matches:
scan_result += '* ' + os.path.normpath(m) + '\n'
fv = pyS60uiutil.fileViewer('Scanning results', joystick=True)
fv.view(scan_result)
def __switch_screen(self):
if (self.screen_size == 'normal'):
self.screen_size = 'large'
else:
self.screen_size = 'normal'
self.fb.change_ui_defs(screen=self.screen_size)
def __help(self):
my_dir = str(os.path.split(self.my_real_full_name)[0])
fv = pyS60uiutil.fileViewer('Help on pyFileManager', joystick=True)
fv.load(os.path.join(my_dir, 'pyfilemans60help.txt'))
fv.view()
def __about(self):
my_dir = str(os.path.split(self.my_real_full_name)[0])
fv = pyS60uiutil.fileViewer('About pyFileManager')
fv.load(os.path.join(my_dir, 'pyfilemans60about.txt'))
fv.view()
#=
#= The main functionality
#=
def execute(self):
"""
Executes the pyFileManager object.
"""
#= Check if the currently installed pyS60uiutil module version is
#= compatible with this version of pyFileManager.
if pyS60uiutil.version_compatibility((0,4)) == False:
msg ='pyS60uiutil\nversion %d.%d.%d\ntoo old' % pyS60uiutil.version
appuifw.note(unicode(msg), 'error')
return
old_app_gui_data = pyS60uiutil.save_current_app_info()
appuifw.app.title = self.my_title
appuifw.app.t = None
appuifw.app.body = None
appuifw.app.exit_key_handler = None
actions_menu = self.__set_actions_menu()
actions_menul = []
for i in actions_menu:
actions_menul.append(i[0])
self.fb = pyS60uiutil.dirBrowser('Select file',
self.__set_options_menu())
defpath = None
refresh = False
while 1:
ix = None
#= Always use the previous path as a default path
sel = self.fb.select(defpath, False, refresh)
if sel == None: #= Means the user has pressed exit key
break
if sel[0] == '': #= At root level (drive list) -> no actions
continue
ix = appuifw.popup_menu(actions_menul)
if not ix == None:
func_name = (actions_menu[ix])[0]
if sel[1] == '':
#= Check whether action is allowed in an empty directory
if not ((func_name == u'Create dir') or \
(func_name == u'Refresh dir')):
appuifw.note(u'Nothing selected', 'error')
refresh = False
continue
((actions_menu[ix])[1])(sel) #= Selected action executed here!
if (func_name == u'Open') or (func_name == u'Send'):
#= After these actions it's not necessary to refresh the
#= directory listing since it's is likely up2date.
defpath = None
refresh = False
if (func_name == u'Copy') or (func_name == u'Move'):
defpath = sel[0]
refresh = True
else:
defpath = None
refresh = True
else:
# Action cancelled.
pass
pyS60uiutil.restore_app_info(old_app_gui_data)
#=
#= Used as a standalone application.
#=
if __name__ == '__main__':
fm = pyFileManagerS60()
fm.execute()
| Python |
#= pyS60uiutil.py - a collection of miscellaneous UI dialogs and utility
#= functions for PyS60.
#= Copyright (C) 2005 - 2006 Lasse Huovinen
#=
#= This library is free software; you can redistribute it and/or
#= modify it under the terms of the GNU Lesser General Public
#= License as published by the Free Software Foundation; either
#= version 2.1 of the License, or (at your option) any later version.
#=
#= This library is distributed in the hope that it will be useful,
#= but WITHOUT ANY WARRANTY; without even the implied warranty of
#= MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#= Lesser General Public License for more details.
#=
#= You should have received a copy of the GNU Lesser General Public
#= License along with this library; if not, write to the Free Software
#= Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import appuifw
import e32
import dir_iter
from key_codes import \
EKeyLeftArrow, EKeyRightArrow, EKeyUpArrow, EKeyDownArrow, EKeyDevice3, \
EKey0, EKey2, EKey5, EKey8
#=
#= The current version number of the pyS60uiutil module.
#=
version = (0,4,0)
def version_compatibility(required_version):
"""
Check if the version of the pyEditS60 module is older than required by
the application utilizing the module. Only the first two most significant
numbers are checked. The last number only indicates the bug fix level and
thus compatibility is not an issue.
"""
if (required_version[0] < version[0]):
return True
if (required_version[0] == version[0]):
if (required_version[1] <= version[1]):
return True
else:
return False
else:
return False
#=
#= Functions to save and restore application GUI data.
#=
def save_current_app_info():
"""
Save the current application GUI data.
"""
app_info = []
app_info.append(appuifw.app.title)
app_info.append(appuifw.app.body)
app_info.append(appuifw.app.menu)
app_info.append(appuifw.app.exit_key_handler)
app_info.append(appuifw.app.screen)
#= The ones above seem to be present always but
#= the appuifw.app.t is sometimes missing.
try:
app_info.append(appuifw.app.t)
except AttributeError:
app_info.append(None)
return app_info
def restore_app_info(app_info):
"""
Restore the saved application GUI data.
"""
appuifw.app.title = app_info[0]
appuifw.app.body = app_info[1]
appuifw.app.menu = app_info[2]
appuifw.app.exit_key_handler = app_info[3]
appuifw.app.screen = app_info[4]
appuifw.app.t = app_info[5]
#=
#= Directory browser
#=
class dirBrowser:
"""
Browse the S60 phone directory structure interactively using joystick and
select a file and/or directory. The user may also define an options
menu with callback functions.
Joystick events:
up - move up one item in the current listing
down - move down one item in the current listing
left - go to the parent directory
right - open currently highlighted item if it is directory
Keyboard events:
0 - go to the root directory
2 - go to top of the current listing
5 - go to middle of the current listing
8 - go to bottom of the current listing
"""
def __init__(self, title='Select', menu=None, screen='normal'):
self.my_title = unicode(title)
self.script_lock = e32.Ao_lock()
self.dir_stack = []
self.current_dir = dir_iter.Directory_iter(e32.drive_list())
self.entries = [] #= List of entries in current directory. Used to
#= speed up listing as list_repr() reads everything
#= from the disk which seems to be slow.
self.current_index = 0
self.dir_empty = False
self.selection = None
self.user_menu = menu
self.screen_mode = screen
self.selection_going_on = False
def change_ui_defs(self, immediate_activation=True,
title=None, menu=None, screen=None):
"""
Initially defined UI parameters may be changed through this method.
They may be activated immediately (provided that browsing is going on)
or for the next selection round.
"""
if not title == None:
self.my_title = unicode(title)
if not menu == None:
self.user_menu = menu
if not screen == None:
self.screen_mode = screen
if self.selection_going_on and immediate_activation:
if not title == None: appuifw.app.title = self.my_title
if not menu == None: appuifw.app.menu = self.user_menu
if not screen == None: appuifw.app.screen = self.screen_mode
def reveal_current_location(self):
"""
Returns the current directory and selected directory item provided
that file browsing is currently active, otherwise None is returned.
"""
if self.selection_going_on == True:
if self.current_dir.at_root: return ('', '')
if self.dir_empty == False:
return os.path.split(self.current_dir.entry\
(self.lbox.current()))
else: return (self.current_dir.name(), '')
else: return None
def release_select(self):
"""
If there's an active selection going on it can be released using
method.
"""
self.__exit_key_handler()
def select(self, default_path=None, reset_index=False,
refresh_listing=True, temp_title=None):
"""
The selection will return a tuple containing the selected directory
file item. The selected file item may also be a directory and
either part may be empty.
If the user does not select anything (presses the exit key)
the return value will be None.
If the default_path is not given the path from the previous selection
will be used. In this case the last selected item in the listing will
be set active unless the user wishes differently (reset_index).
The content of the directory will be re-read but for the speed
optimization purposes this is optional (refresh_listing).
The argument temp_title defines the title for a particular selection
round.
"""
#print 'select(' + str(default_path) + ',' + str(reset_index) + ',' +\
# str(refresh_listing) + ')'
self.__selecting(temp_title)
if (not default_path == None) or (self.entries == []):
#= If default path is given or the entries list is empty then lets
#= refresh the directory content no matter what the user wishes
#= to do.
refresh_listing = True
self.current_index = 0
self.dir_stack = []
self.current_dir = dir_iter.Directory_iter(e32.drive_list())
self.dir_empty = False
else:
if reset_index: self.current_index = 0
#print 'dp: ' + str(default_path) + ' ix: ' + str(self.current_index)
if refresh_listing:
self.__open_default_directory(default_path)
self.lbox = appuifw.Listbox(self.entries, self.__process_user_evt)
self.lbox.set_list(self.entries, self.current_index)
self.lbox.bind(EKeyLeftArrow, lambda: self.__process_user_evt('back'))
self.lbox.bind(EKeyRightArrow, lambda: self.__process_user_evt('next'))
self.lbox.bind(EKey0, lambda: self.__process_user_evt('rootdir'))
self.lbox.bind(EKey2, lambda: self.__process_user_evt('top'))
self.lbox.bind(EKey5, lambda: self.__process_user_evt('middle'))
self.lbox.bind(EKey8, lambda: self.__process_user_evt('bottom'))
appuifw.app.body = self.lbox
self.script_lock.wait()
self.__selected()
return self.selection
def __process_user_evt(self, dir=None):
if self.dir_empty: index = 0
else: index = self.lbox.current()
if dir == None:
#= The user has made up her mind...
#= Note: if at the root level, return tuple of empty strings
focused_item = 0
if self.current_dir.at_root:
self.selection = ('', '')
else:
if self.dir_empty == False:
self.selection = \
os.path.split(self.current_dir.entry(index))
else:
self.selection = (self.current_dir.name(), '')
#= Save dir index for the next selection
self.current_index = index
self.script_lock.signal()
return
elif dir == 'back':
#= Go to the parent directory unless already in the root directory.
if not self.current_dir.at_root:
focused_item = self.dir_stack.pop()
self.current_dir.pop()
else:
focused_item = index
self.__read_dir_content()
elif dir == 'next':
#= Go to the focused sub-directory provided it's a directory.
if self.current_dir.at_root:
self.dir_stack.append(index)
self.current_dir.add(index)
elif self.dir_empty: #= No effect
return
elif os.path.isdir(self.current_dir.entry(index)):
self.dir_stack.append(index)
self.current_dir.add(index)
else: #= Item not a directory -> no effect
return
focused_item = 0
self.__read_dir_content()
elif dir == 'top': focused_item = 0
elif dir == 'middle': focused_item = len(self.entries) / 2
elif dir == 'bottom': focused_item = len(self.entries) - 1
elif dir == 'rootdir':
self.dir_stack = []
self.current_dir = dir_iter.Directory_iter(e32.drive_list())
self.__read_dir_content()
focused_item = 0
else: #= Would be a bug
self.__internal_bug_handler('C')
return
self.lbox.set_list(self.entries, focused_item)
def __read_dir_content(self):
"""
Reads content of current directory and checks its emptiness.
"""
self.entries = self.current_dir.list_repr()
if self.entries == []:
self.entries.insert(0, (u'<empty>', u''))
self.dir_empty = True
else:
self.dir_empty = False
if len(self.entries) <= self.current_index:
self.current_index = len(self.entries) - 1
def __open_default_directory(self, dir=None):
"""
Parse and set the default directory. If the default directory is not
given, it does not exist, or there are problems parsing the given
path then default to the drive selection.
"""
if dir == None:
self.__read_dir_content()
#print 'retA'
return
dir = os.path.normpath(dir)
if not os.path.isdir(dir):
#= Should raise an exception!
raise ValueError, 'Given default path not a directory'
#print 'ERROR: Given default path not a directory'
#print 'retB'
return
drv, path = os.path.splitdrive(dir)
#print 'dir ' + str(dir) + ' -> '
#print 'drv ' + str(drv)
#print 'path ' + str(path)
self.__read_dir_content()
index = self.__get_index(drv)
if index == None: #= Would be a bug!
self.__internal_bug_handler('A')
return
self.dir_stack.append(index)
self.current_dir.add(index)
self.__read_dir_content()
for item in path.split('\\'):
if item == u'': continue
index = self.__get_index(u'['+item+u']')
if index == None: #= Would be a bug!
self.__internal_bug_handler('B')
return
self.dir_stack.append(index)
self.current_dir.add(index)
self.__read_dir_content()
#print 'retE'
return
def __get_index(self, item):
"""
Find the directory list index of a file or directory name in
the current directory.
"""
index = 0
for entry in self.entries:
if entry[0] == item: return index
index += 1
return None
def __exit_key_handler(self):
#= Save the last index for the next selection
self.selection_going_on = False
self.current_index = self.lbox.current()
self.script_lock.signal()
def __selecting(self, temp_title=None):
"""
Save the GUI parameters of the 'parent program' and set the new ones.
"""
self.old_app = save_current_app_info()
self.selection_going_on = True
if temp_title == None:
appuifw.app.title = self.my_title
else:
appuifw.app.title = unicode(temp_title)
appuifw.app.screen = self.screen_mode
if self.user_menu == None:
appuifw.app.menu = []
else:
appuifw.app.menu = self.user_menu
appuifw.app.exit_key_handler = self.__exit_key_handler
self.selection = None #= For new selection
def __selected(self):
"""
Restore the GUI parameters of the 'parent program'.
"""
self.selection_going_on = False
restore_app_info(self.old_app)
self.lbox = None
def __internal_bug_handler(self, place):
"""
Reports an internal bug in directory list handling and tries to
take actions such that normal operation could continue.
"""
print 'Internal bug encountered at ' + str(place) + \
'. Setting default directory to root.'
self.dir_stack = []
self.current_dir = dir_iter.Directory_iter(e32.drive_list())
self.__read_dir_content()
#=
#= File viewer
#=
class fileViewer:
"""
Show a long message on the screen. The message may be given as a
parameter or it can be read from a file.
"""
def __init__(self, title=None, font=None, color=None, joystick=False,
screen='normal'):
if title: self.my_title = unicode(title)
else: self.my_title = None
if font: self.my_font = unicode(font)
else: self.my_font = None
self.my_color = color
self.script_lock = e32.Ao_lock()
self.loaded_text = None
self.use_joystick = joystick
self.screen_mode = screen
def __exit_key_handler(self):
appuifw.app.exit_key_handler = None # Not needed
self.script_lock.signal()
def __set_new_gui(self):
"""
Save the GUI parameters of the 'parent program' and set the new ones.
"""
self.old_app = save_current_app_info()
if self.my_title:
appuifw.app.title = self.my_title
appuifw.app.screen = self.screen_mode
appuifw.app.t = appuifw.Text(u'')
if self.use_joystick:
self.stick = jumpTextCursorWithJoystick(appuifw.app.t)
if self.my_font:
appuifw.app.t.font = self.my_font
if self.my_color:
appuifw.app.t.color = self.my_color
appuifw.app.body = appuifw.app.t
appuifw.app.menu = []
appuifw.app.exit_key_handler = self.__exit_key_handler
def __read_file(self, file_name):
mfile = file(file_name, 'r')
msg = mfile.read() # Note: reads everything!
mfile.close()
return msg.decode('iso-8859-1')
def load(self, file_name=None):
"""
Load the message to be viewed from a file.
"""
if file_name == None: return
fname = os.path.normpath(file_name)
if not os.path.isfile(fname):
# Should raise an exception!
print 'ERROR: Given file ' + str(file_name) + ' not a regular file'
return
try:
self.loaded_text = self.__read_file(fname)
# The exception should be catched by the main app!!!
except UnicodeError, detail:
appuifw.note(u'Error while reading!\n' + unicode(detail),
'error')
#except:
# appuifw.note(u'Error while reading!', 'error')
def view(self, text=None):
"""
View the content of the selected file or given text string.
"""
if not text == None:
# Message to be shown given as a parameter.
txt = unicode(text)
elif self.loaded_text == None:
# No message to show.
return
else:
# Message loaded from a file.
txt = self.loaded_text
self.__set_new_gui()
appuifw.app.t.set(txt)
appuifw.app.t.set_pos(0)
self.script_lock.wait()
restore_app_info(self.old_app)
#=
#= A dialog for font selection (does not work with PyS60 1.0.0)
#=
class fontSelectionDlg:
"""
Let the user select a font amongst the fonts supported by the device.
"""
def __init__(self, title=None, screen='normal'):
if title: self.my_title = unicode(title)
else: self.my_title = u'Select font'
self.script_lock = e32.Ao_lock()
self.font = None
self.screen_mode = screen
def __show_font_test(self):
font = appuifw.available_fonts()[self.lbox.current()]
fv = fileViewer(None, font)
fv.view(u'This text uses font ' + font)
def __exit_key_handler(self):
self.font = None
appuifw.app.exit_key_handler = None # Not needed
self.script_lock.signal()
def __selection_handler(self):
self.font = appuifw.available_fonts()[self.lbox.current()]
appuifw.app.exit_key_handler = None # Not needed
self.script_lock.signal()
def select(self):
"""
Dialog for font selection. The font name is returned as unicoded
string. If no selection was made, None is returned.
"""
self.old_app = save_current_app_info()
appuifw.app.title = self.my_title
appuifw.app.screen = self.screen_mode
appuifw.app.exit_key_handler = self.__exit_key_handler
appuifw.app.menu = [(u'Test', self.__show_font_test)]
self.lbox = appuifw.Listbox(appuifw.available_fonts(),
self.__selection_handler)
appuifw.app.body = self.lbox
self.script_lock.wait()
restore_app_info(self.old_app)
return self.font
#=
#= A dialog for font color selection (does not work with PyS60 1.0.0)
#=
class fontColorSelectionDlg:
"""
Let the user select the font color from predefined list or make custom
selection. predef_colors may contain list of color names and their
respective RGB values:
predef_colors = [(u'color name 1', (r1, g1, b1)), \
(u'color name 2', (r2, g2, b2)), \
... \
(u'color name N', (rN, gN, bN)), \
(u'Custom', (r, g, b))]
custom_index = N #= not N+1
"""
def __init__(self, title=None, predef_colors=None, custom_index=None,
screen='normal'):
if title: self.my_title = unicode(title)
else: self.my_title = u'Select font color'
self.screen_mode = screen
self.predef_colors = predef_colors
self.custom_index = custom_index
if self.predef_colors:
self.color_menu = []
for c in self.predef_colors:
self.color_menu.append(c[0])
else:
self.color_menu = None
self.initial_custom_color = None
self.form = None
self.color = None
self.script_lock = e32.Ao_lock()
def __custom_show_font_test(self):
if self.color == None:
return
fv = fileViewer(None, None, self.color)
fv.view(u'This text uses font color ' + unicode(str(self.color)))
def __custom_done(self):
appuifw.app.exit_key_handler = None
self.script_lock.signal()
def __custom_selection_handler(self):
ix = self.lbox.current()
label = ['Red', 'Green', 'Blue'][ix] + ' component (0-255)'
color_lst = []
for i in range(3): color_lst.append(self.color[i])
old_val = color_lst[ix]
new_val = 0xffff
#= The values of the RGB components must belong to range [0-255].
while (not new_val == None) and ((new_val < 0) or (new_val > 255)):
new_val = appuifw.query(unicode(label), 'number', old_val)
if not new_val == None:
color_lst[ix] = new_val
self.color = (color_lst[0], color_lst[1], color_lst[2])
self.__custom_set_rgb_list(ix)
def __custom_set_rgb_list(self, curr_ix=None):
color_menu = [(u'Red', unicode(self.color[0])), \
(u'Green', unicode(self.color[1])), \
(u'Blue' , unicode(self.color[2])) ]
if curr_ix:
self.lbox.set_list(color_menu, curr_ix)
else:
self.lbox.set_list(color_menu)
def __custom_selection(self):
appuifw.app.menu = [(u'Test', self.__custom_show_font_test), \
(u'Done', self.__custom_done) ]
if self.initial_custom_color:
self.color = self.initial_custom_color
elif self.predef_colors and self.custom_index:
self.color = self.predef_colors[self.custom_index][1]
else:
#= If initial color is not provided then default to black.
self.color = (0, 0, 0)
self.lbox = appuifw.Listbox([(u'foo', u'bar')], \
self.__custom_selection_handler)
self.__custom_set_rgb_list()
appuifw.app.body = self.lbox
def __predef_show_font_test(self):
ix = self.lbox.current()
if ix == self.custom_index:
return
color = self.predef_colors[ix][1]
fv = fileViewer(None, None, color)
fv.view(u'This text uses font color ' + unicode(str(color)))
def __predef_selection_handler(self):
ix = self.lbox.current()
if ix == self.custom_index:
self.__custom_selection()
else:
self.color = self.predef_colors[ix][1]
appuifw.app.exit_key_handler = None
self.script_lock.signal()
def __predef_selection(self):
appuifw.app.menu = [(u'Test', self.__predef_show_font_test)]
self.lbox = appuifw.Listbox(self.color_menu,
self.__predef_selection_handler)
appuifw.app.body = self.lbox
def __exit_key_handler(self):
self.color = None
appuifw.app.exit_key_handler = None
self.script_lock.signal()
def select(self, custom_color_default=None):
"""
Dialog for font color selection. The font color is returned as a
tuple containing selected RGB values. If no selection was made,
None is returned.
"""
self.old_app = save_current_app_info()
appuifw.app.title = self.my_title
appuifw.app.screen = self.screen_mode
appuifw.app.exit_key_handler = self.__exit_key_handler
#= Update the initial custom color if so requested.
if custom_color_default:
self.initial_custom_color = custom_color_default
if self.color_menu:
#= Predefined menu given.
self.__predef_selection()
else:
#= Go directly to custom color selection since no predefined
#= menu defined.
self.__custom_selection()
self.script_lock.wait()
restore_app_info(self.old_app)
return self.color
#=
#= Cursor jumping with joystick
#=
class jumpTextCursorWithJoystick:
"""
Idea: once the user presses the joystick the callback functions to
handle joystick movements are activated. The joystick movements are
interpreted as follows:
- up -> go beginning of document
- down -> go end of document
- left -> go beginning of line
- right -> go end of line (finds 'newline')
- press -> back to normal operation
It is enough to create this object. It will then work autonomously.
"""
#= Note: The joystick movements are passed to the underlying SW and this
#= must be taken in the account in the callback functions below!
#=
#= Note2: For some reason the callback functions __bol() and __eol()
#= are not called properly if the current cursor position is
#= either 'in the beginning of document' or 'in the end of
#= document'. How to fix this?
def __init__(self, text_type):
"""
text_type is an instance of Text Type (editor UI control) of
appuifw module.
"""
self.tt = text_type
self.tt.bind(EKeyDevice3, self.__set_movements)
def clear(self):
"""
Clear joystick bindings.
"""
self.__reset_movements()
self.tt.bind(EKeyDevice3, None)
def resume(self):
"""
Resume joystick bindings that were cleared using clear().
"""
self.tt.bind(EKeyDevice3, self.__set_movements)
def __bol(self):
"""
Moves cursor to the beginning of the current line.
"""
self.__reset_movements()
#= Below, one must be added to the intended position since the
#= underlying SW moves cursor one position left anyway.
cpos = self.tt.get_pos()
if cpos == 0:
#= Already in the beginning of the very first line.
self.tt.set_pos(cpos+1)
return
if cpos == self.tt.len():
#= Already in the beginning of the very last line.
#= FIX: See Note2 above.
self.tt.set_pos(0)
return
cpos -= 1
while cpos:
c = self.tt.get(cpos, 1)
if c == u'\u2029': break #= Newline ('paragraph separator') found.
cpos -= 1
if cpos: cpos += 1
self.tt.set_pos(cpos+1)
def __eol(self):
"""
Moves cursor to the end of the current line.
"""
self.__reset_movements()
cpos = self.tt.get_pos()
end = self.tt.len()
while cpos < end:
c = self.tt.get(cpos, 1)
if c == u'\u2029': break #= Newline ('paragraph separator') found.
cpos += 1
#= One must be reduced since the underlying SW will move the cursor
#= one positiont to right.
self.tt.set_pos(cpos-1)
def __bod(self):
"""
Moves the cursor to the beginning of the document.
"""
self.__reset_movements()
self.tt.set_pos(0)
def __eod(self):
"""
Moves the cursor to the end of the document.
"""
self.__reset_movements()
self.tt.set_pos(self.tt.len())
def __set_movements(self):
"""
Set the callback functions once the user has pressed the joystick.
"""
self.tt.bind(EKeyLeftArrow, self.__bol)
self.tt.bind(EKeyRightArrow, self.__eol)
self.tt.bind(EKeyUpArrow, self.__bod)
self.tt.bind(EKeyDownArrow, self.__eod)
self.tt.bind(EKeyDevice3, self.__reset_movements)
def __reset_movements(self):
"""
Reset the callback functions. This is 'normal' operation, i.e.,
the cursor is moved one position once joystick is turned.
"""
self.tt.bind(EKeyLeftArrow, None)
self.tt.bind(EKeyRightArrow, None)
self.tt.bind(EKeyUpArrow, None)
self.tt.bind(EKeyDownArrow, None)
self.tt.bind(EKeyDevice3, self.__set_movements)
#= <skip_sa_test>
#= Stand-alone testing
#=
def _stand_alone_test_dirBrowser():
fb = dirBrowser('Select a file or directory...')
default_path = 'C:\\Nokia\\'
loop_test=True
while loop_test:
try:
#selection = fb.select(default_path, refresh_listing=False)
selection = fb.select(None)
#selection = fb.select(None, refresh_listing=False)
#selection = fb.select(None, True, False)
#selection = fb.select('C:\\notadir')
except ValueError, detail:
print '*** error: ' + str(detail)
return
if selection == None:
print 'Nothing selected'
else:
print 'Selected path', str(selection[0])
print 'Selected file', str(selection[1])
default_path = selection[0]
if not appuifw.query(u'Do another selection?', 'query'):
loop_test = False
def _stand_alone_test_fontSelectionDlg():
fs = fontSelectionDlg('Select your favorite font')
font = fs.select()
if font:
print u'Selected font ' + font
else:
print u'No font selection made'
def _stand_alone_test_fontColorSelectionDlg():
test_predef_menu = 0
if test_predef_menu:
predef_menu = [(u'Black', (0,0,0)), \
(u'Blue', (0,0,255)), \
(u'Green', (0,255,0)), \
(u'Red', (255,0,0)), \
(u'Custom', (0,255,255))]
custom_index = 4
else:
predef_menu = None
custom_index = None
fcs = fontColorSelectionDlg('Select your favorite font color', \
predef_menu, custom_index)
color = fcs.select()
if color:
print u'Selected font color ' + unicode(str(color))
else:
print u'No font color selection made'
def _stand_alone_test_jumpTextCursorWithJoystick():
fv = fileViewer('Test', None, None, True)
fv.view('This is a fairly long text to be show on the screen.\n' + \
'It should be divided at least on two different lines ' + \
'in order to make it usable for this test.\n')
if __name__ == '__main__':
# _stand_alone_test_dirBrowser()
# _stand_alone_test_fontSelectionDlg()
# _stand_alone_test_fontColorSelectionDlg()
# _stand_alone_test_jumpTextCursorWithJoystick()
pass
| Python |
#=
#= default.py for pyFileManagerS60
#=
#= Do not change. This id has been officially allocated from Symbian.
# SYMBIAN_UID=0x10279730
import sys
import os.path
import appuifw
if __name__ == '__main__':
#=
#= Run as a main program.
#= Force importing from the local directory first.
#=
localpath = str(os.path.split(appuifw.app.full_name())[0])
sys.path = [localpath] + sys.path
localpath = os.path.join(localpath, 'my')
sys.path = [localpath] + sys.path
try:
import pyFileManager
fm = pyFileManager.pyFileManagerS60()
fm.execute()
sys.exit()
except ImportError, detail:
appuifw.note(u'pyFileMngr not properly installed - ' +
unicode(str(detail)), 'error')
else:
#=
#= Run from another script. The path of this script is passed
#= in the variable __fmgrpath__ from the script executing
#= this script.
#=
#= Force importing from the local directory first.
#=
try:
globals()['__fmgrpath__']
localpath = os.path.split(globals()['__fmgrpath__'])[0]
sys.path = [localpath] + sys.path
import pyFileManager
fmngr_full_name = globals()['__fmgrpath__']
fm = pyFileManager.pyFileManagerS60(my_real_full_name=fmngr_full_name)
fm.execute()
except (AttributeError, TypeError), detail:
#= Most likely__fmgrpath__ == None
appuifw.note(u'pyFileMngr not properly installed - ' +
unicode(str(detail)), 'error')
except KeyError, detail:
#= The executing script has not defined __fmgrpath__
appuifw.note(u'pyFileMngr not properly installed - ' +
unicode(str(detail)), 'error')
except ImportError, detail:
appuifw.note(u'pyFileMngr not properly installed - ' +
unicode(str(detail)), 'error')
| Python |
# Simple BT App
#$GPRMC,161229.487,A,3723.2475,N,12158.3416,W,0.13,309.62,120598, ,*10
import socket, location, urllib
class BTReader:
def connect(self):
self.sock = socket.socket(socket.AF_BT, socket.SOCK_STREAM)
address, services = socket.bt_discover()
print "Discovered: %s, %s" % (address, services)
target = (address, services.values()[0])
print "Connecting to " + str(target)
self.sock.connect(target)
def readposition(self):
try:
buffer=""
ch = self.sock.recv(1)
while(ch != '\n'):
buffer += ch
ch = self.sock.recv(1)
# print buffer
if (buffer[0:6]=="$GPRMC"):
(GPRMC,utc,status,lat,latns,lon,lonew,knots,course,date,xx1,xx2)=buffer.split(",")
return "GPS (%s,%s,%s,%s,%s)"%(utc,lat+latns,lon+lonew,knots,course)
except Error:
return "Error!\n"
return ""
def close(self):
self.sock.close()
class GSM_loc:
def upd(self):
self.loc = location.gsm_location()
return "GSM (MCC:%s MNC:%s LAC:%s CID=%s)"%(self.loc[0], self.loc[1], self.loc[2], self.loc[3])
gsm = GSM_loc()
bt=BTReader()
bt.connect()
i=0
while (i<15):
print gsm.upd()
print bt.readposition()
i += 1
bt.close()
| Python |
#!/usr/bin/python
####
# 02/2006 Will Holcomb <wholcomb@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
Enables the use of multipart/form-data for posting forms
Inspirations:
Upload files in python:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
urllib2_file:
Fabien Seisen: <fabien@seisen.org>
Example:
import MultipartPostHandler, urllib2, cookielib
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
params = { "username" : "bob", "password" : "riviera",
"file" : open("filename", "rb") }
opener.open("http://wwww.bobsite.com/upload/", params)
Further Example:
The main function of this file is a sample which downloads a page and
then uploads it to the W3C validator.
"""
import urllib
import urllib2
import mimetools, mimetypes
import os, stat
from cStringIO import StringIO
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
# Controls how sequences are uncoded. If true, elements may be given multiple values by
# assigning a sequence.
doseq = 1
class MultipartPostHandler(urllib2.BaseHandler):
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if type(value) == file:
v_files.append((key, value))
else:
v_vars.append((key, value))
except TypeError:
systype, value, traceback = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = 'multipart/form-data; boundary=%s' % boundary
if(request.has_header('Content-Type')
and request.get_header('Content-Type').find('multipart/form-data') != 0):
print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
def multipart_encode(vars, files, boundary = None, buf = None):
if boundary is None:
boundary = mimetools.choose_boundary()
if buf is None:
buf = StringIO()
for(key, value) in vars:
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
buf.write('Content-Type: %s\r\n' % contenttype)
# buffer += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf.write('\r\n' + fd.read() + '\r\n')
buf.write('--' + boundary + '--\r\n\r\n')
buf = buf.getvalue()
return boundary, buf
multipart_encode = Callable(multipart_encode)
https_request = http_request
def main():
import tempfile, sys
validatorURL = "http://validator.w3.org/check"
opener = urllib2.build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
os.write(temp[0], opener.open(url).read())
params = { "ss" : "0", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
os.remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
validateFile(arg)
else:
validateFile("http://www.google.com")
if __name__=="__main__":
main()
| Python |
import MultipartPostHandler, urllib2, cookielib
latitude = str('23.34')
longitude = str('34.45')
file = 'logo.jpg'
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler.MultipartPostHandler)
params = { 'point': '%s, %s' % (latitude, longitude), "picblob": open(file, "rb") }
answer = opener.open("http://flyingonwheel.appspot.com/mark_test", params)
print answer.code
| Python |
#!/usr/bin/python
####
# 02/2006 Will Holcomb <wholcomb@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
Enables the use of multipart/form-data for posting forms
Inspirations:
Upload files in python:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
urllib2_file:
Fabien Seisen: <fabien@seisen.org>
Example:
import MultipartPostHandler, urllib2, cookielib
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
params = { "username" : "bob", "password" : "riviera",
"file" : open("filename", "rb") }
opener.open("http://wwww.bobsite.com/upload/", params)
Further Example:
The main function of this file is a sample which downloads a page and
then uploads it to the W3C validator.
"""
import urllib
import urllib2
import mimetools, mimetypes
import os, stat
from cStringIO import StringIO
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
# Controls how sequences are uncoded. If true, elements may be given multiple values by
# assigning a sequence.
doseq = 1
class MultipartPostHandler(urllib2.BaseHandler):
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if type(value) == file:
v_files.append((key, value))
else:
v_vars.append((key, value))
except TypeError:
systype, value, traceback = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = 'multipart/form-data; boundary=%s' % boundary
if(request.has_header('Content-Type')
and request.get_header('Content-Type').find('multipart/form-data') != 0):
print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
def multipart_encode(vars, files, boundary = None, buf = None):
if boundary is None:
boundary = mimetools.choose_boundary()
if buf is None:
buf = StringIO()
for(key, value) in vars:
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
buf.write('Content-Type: %s\r\n' % contenttype)
# buffer += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf.write('\r\n' + fd.read() + '\r\n')
buf.write('--' + boundary + '--\r\n\r\n')
buf = buf.getvalue()
return boundary, buf
multipart_encode = Callable(multipart_encode)
https_request = http_request
def main():
import tempfile, sys
validatorURL = "http://validator.w3.org/check"
opener = urllib2.build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
os.write(temp[0], opener.open(url).read())
params = { "ss" : "0", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
os.remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
validateFile(arg)
else:
validateFile("http://www.google.com")
if __name__=="__main__":
main()
| Python |
import httplib, mimetypes
def post_multipart(host, selector, fields, files):
"""
Post fields and files to an http host as multipart/form-data.
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return the server's response page.
"""
content_type, body = encode_multipart_formdata(fields, files)
h = httplib.HTTP(host)
h.putrequest('POST', selector)
h.putheader('content-type', content_type)
h.putheader('content-length', str(len(body)))
h.endheaders()
h.send(body)
errcode, errmsg, headers = h.getreply()
return h.file.read()
def encode_multipart_formdata(fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
CRLF = '\r\n'
L = []
for (key, value) in fields:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
L.append(value)
for (key, filename, value) in files:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
L.append('Content-Type: %s' % get_content_type(filename))
L.append('')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
| Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: alvayang <alvayang@tabex.org>
#Last Change:
#Description:
import positioning
import appuifw
import sys
import e32
import time
def writelog(data):
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n' % \
(str(time.time()),str(data['position']['latitude']), \
str(data['position']['longitude'])))
except Exception, e:
sys.stdout.write("exception happend:" + str(e))
def quit():
app_lock.signal()
def not_allowed():
appuifw.note("请通过选项选择退出.".decode('utf-8'), 'info')
appuifw.app.title = u"GPS Logger"
app_lock = e32.Ao_lock()
appuifw.app.menu = [("退出在这里面".decode('utf-8'), quit)]
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type':'service','format':'application','data':'position'}])
positioning.position(course=1,satellites=1, callback=writelog,\
interval=5000000, partial=0)
appuifw.app.exit_key_handler = not_allowed
app_lock.wait()
| Python |
# -*- coding: utf-8 -*-
import appuifw, e32, graphics, os
def cn(x):
return x.decode("utf-8")
appuifw.app.body = canvas = appuifw.Canvas()
appuifw.app.screen="full"
font=('normal', 15, graphics.FONT_BOLD)
img=graphics.Image.new((240, 320))
img.rectangle((8, 90, 230, 110), 0xFF6347, 0x316ac5)
img.text((9, 109), cn("c:\\system\\"), 0, font)
img.rectangle((8, 110, 230, 170),0xFF6347, 0xe0e0ff)
#img.line((230,95,230,170),0,width=3)
#img.line((12,171,230,171),0,width=3)
global x, y
x = 0
y = 0
for i in os.listdir("c:\\system"):
x=x+1
for i in os.listdir("c:\\system"):
X = 230.0/x
y = y+X
img.text((9, 135), cn("")+i, 0, font)
e32.ao_sleep(0.02)
img.text((9, 135), cn("")+i, 0xe0e0ff, font)
img.rectangle((8, 145, y, 170), 0, 0xFF69B4)
e32.ao_sleep(0.02)
canvas.blit(img)
| Python |
# -*- coding: utf-8 -*-
"""
test text show on graphics of pys60
"""
import appuifw, graphics, e32
appuifw.app.screen = 'full'
canvas = appuifw.Canvas() # create a canvas
image = graphics.Image.new(canvas.size) # create an image
appuifw.app.body = canvas
appuifw.app.exit_key_handler = quit
def quit():
global running
running = 0
fonts = [ # different font specifications
None,
'normal',
u'LatinBold19',
(None, 15),
('normal', 15),
(u'LatinBold19', 15, graphics.FONT_BOLD | graphics.FONT_ITALIC),
]
text = "测试.".decode('utf-8')
canvas.clear(0)
image.clear(0)
y = 20
for font in fonts: # draw the same text on the canvas and the image
canvas.text((10, y), text, fill = 0xff0000, font = font) # red text on the canvas
image.text((10, y), text, fill = 0x0000ff, font = font) # blue text on the image
y += 25
canvas.blit(image, target = (0, y)) # copy the image's contents to the lower part of the canvas
running = 1
while running:
e32.ao_yield() # wait for the exit button to be pressed
| Python |
#! -*- coding: utf-8 -*-
"""
test graph menu
"""
import appuifw, time, audio, os
from graphics import *
import e32, topwindow
appuifw.app.screen="full"
img = Image.new((240, 320))
img.clear(0xFAEBD7)
appuifw.app.body = canvas = appuifw.Canvas()
def cn(x):
return x.decode("utf-8")
font=('normal', 15, FONT_BOLD)
#-------------------------图片↓
screen = topwindow.TopWindow()
photo = Image.open("e:\\data\\python\\0.jpg")
screen.add_image(photo, (0, 0, 240, 320))
screen.show()
e32.ao_sleep(1.5)
screen.hide()
jpg1 = Image.open("e:\\data\\python\\1.jpg")
img.blit(jpg1, (0, 25))
canvas.blit(img)
#-------------------------图片↑
#-------------------------声音↓
sound1 = audio.Sound.open("e:\\data\\python\\ding.wav")
sound2 = audio.Sound.open("e:\\data\\python\\ding.wav")
sound3 = audio.Sound.open("e:\\data\\python\\ding.wav")
#-------------------------声音↑
#-------------------------功能↓
def menu():
#定义通话键噶功能
sound2.play()
o=open("e:\\data\\python\\down.down","w")
o.close()
img.rectangle((0,205,85,295),0,fill=0xFAEBD7)
img.text((205,316),cn("退出"),0xFAEBD7,font)
img.text((207,316),cn("取消"),0xFF00FF,font)
img.text((1,240),cn("1.开始游戏"),0xFF00FF,font)
img.text((1,280),cn("2.退出游戏"),0,font)
def down():
#导向键下
down=os.path.exists("e:\\data\\python\\down.down")
if down>0:
sound1.play()
img.text((1,240),cn("1.开始游戏"),0,font)
img.text((1,280),cn("2.退出游戏"),0xFF00FF,font)
os.remove("e:\\data\\python\\down.down")
o=open("e:\\data\\python\\up.up","w")
o.close()
def up():
#导向键上
up=os.path.exists("e:\\data\\python\\up.up")
if up>0:
sound1.play()
img.text((1,240),cn("1.开始游戏"),0xFF00FF,font)
img.text((1,280),cn("2.退出游戏"),0,font)
os.remove("e:\\data\\python\\up.up")
o=open("e:\\data\\python\\down.down","w")
o.close()
appuifw.app.body.bind(63498,down)
appuifw.app.body.bind(63497,up)
def exit():
#右软键
down=os.path.exists("e:\\data\\python\\down.down")
up=os.path.exists("e:\\data\\python\\up.up")
img.blit(jpg1,(0,25))
canvas.blit(img)
if down>0:
os.remove("e:\\data\\python\\down.down")
if up>0:
os.remove("e:\\data\\python\\up.up")
img.blit(jpg1,(0,25))
canvas.blit(img)
img.text((207,316),cn("取消"),0xFAEBD7,font)
img.text((205,316),cn("退出"),0,font)
if up==0 and down ==0:
sound1.play()
if appuifw.query(cn("退出游戏吗?"),"query"):
appuifw.app.set_exit()
appuifw.app.exit_key_handler=exit
appuifw.app.body.bind(63586,menu)
def out():
#右软键
sound1.play()
if appuifw.query(cn("是否退出游戏?\n阿高提示"),"query"):
appuifw.app.set_exit()
appuifw.app.exit_key_handler=out
def ok():
#定义确定键噶功能
down=os.path.exists("e:\\data\\python\\down.down")
up=os.path.exists("e:\\data\\python\\up.up")
if up>0:
if appuifw.query(cn("退出游戏吗?"),"query"):
os.remove("e:\\data\\python\\up.up")
appuifw.app.set_exit()
if down>0:
img.rectangle((0,205,85,295),0xFAEBD7,fill=0xFAEBD7)
os.remove("e:\\data\\python\\down.down")
img.text((207,316),cn("取消"),0xFAEBD7,font)
img.text((205,316),cn("退出"),0,font)
sound3.play()
img.blit(jpg1,(0,25))
canvas.blit(img)
appuifw.app.body.bind(63557,ok)
#-------------------------功能↑
#-------------------------界面↓
img.text((205,316),cn("退出"),0,font)
img.text((0,316),cn("菜单"),0,font)
img.line((0,295,320,295),0,width=2)
# img.text((197,20),cn("- 口 X"),0,font)
# img.line((0,25,320,25),0,width=2)
# img.text((0,18),cn("胃食猫"),0,font)
#-------------------------界面↑
#-------------------------无限重复动作↓
def wwe():
canvas.blit(img)
running=1
while running:
wwe()
a=time.strftime("%H:%M:%S")
img.text((85,316),cn("")+a,0xFF0000,font)
e32.ao_sleep(0.8)
img.text((85,316),cn("")+a,0xFAEBD7,font)
e32.ao_yield()
| Python |
import e32, camera, appuifw, key_codes
from graphics import Image
def viewfinder(img):
canvas.blit(img)
def shoot():
camera.stop_finder()
photo = camera.take_photo(size = (640, 480))
w, h = canvas.size
canvas.blit(photo, target = (0, 0, w, h), scale = 1)
photo.resize((320, 240))
photo.save('e:\\photo.jpg')
def quit():
app_lock.signal()
appuifw.app.body = canvas = appuifw.Canvas()
appuifw.app.title = u"Camera"
photo = Image.open("e:\\data\\python\\logo.jpg")
photo.resize((32, 32))
photo.save("e:\\data\\python\\logo.jpg")
canvas.blit(photo)
appuifw.app.exit_key_handler = quit
camera.start_finder(viewfinder)
canvas.bind(key_codes.EKeySelect, shoot)
app_lock = e32.Ao_lock()
app_lock.wait()
| Python |
# -*- coding: utf-8 -*-
"""
a graph login frame
"""
import graphics, e32, appuifw
font = ('normal', 15, graphics.FONT_BOLD)
appuifw.app.body = canvas = appuifw.Canvas()
appuifw.app.screen = "full"
def cn(x):
return x.decode("utf-8")
img = graphics.Image.new(canvas.size)
img.clear(0)
img.rectangle((10, 90, 230, 190), 0x316ac5, 0xe0e0ff)#大
img.rectangle((10, 70, 230, 90), 0x316ac5, 0x316ac5)#标题
img.text((11, 88), cn("客户端"), 0xFF6347, font)
img.text((11, 122), cn("IP地址:"), 0x316ac5, font)
img.rectangle((70, 105, 220, 125), 0x316ac5, width=3)#IP地址框
img.text((25, 147), cn("端口:"), 0x316ac5, font)
img.rectangle((70, 130, 220, 150), 0x316ac5)#端口框
img.text((25, 180), cn("连接"), 0x316ac5, font)
img.text((187, 180), cn("取消"), 0x316ac5, font)
img.line((230, 74, 230, 190), 0x316ac5, width=2)#右阴影
img.line((14, 190, 230, 190), 0x316ac5, width=2)#下阴影
#------------按键控制-------------------↓
def down():
img.rectangle((70, 105, 220, 125), 0xe0e0ff, width=3)#IP地址框灰
img.rectangle((70, 105, 220, 125), 0x316ac5)#IP地址框细
img.rectangle((70, 130, 220, 150), 0x316ac5, width=3)#端口框粗
#选中端口框
canvas.bind(63498, lambda:down())
def up():
img.rectangle((70, 105, 220, 125), 0x316ac5, width=3)#IP地址框粗
img.rectangle((70, 130, 220, 150), 0xe0e0ff, width=3)#端口框灰
img.rectangle((70, 130, 220, 150), 0x316ac5)#端口框细
#选中地址框
canvas.bind(63497, lambda:up())
def left():
img.text((25, 180), cn("连接"), 0xFF6347, font)
img.text((187, 180), cn("取消"), 0x316ac5, font)
#选中连接
canvas.bind(63495, lambda:left())
def right():
img.text((25, 180), cn("连接"), 0x316ac5, font)
img.text((187, 180), cn("取消"), 0xFF6347, font)
#选中取消
canvas.bind(63496, lambda:right())
#------------按键控制-------------------↑
def exit():
global gao
gao = 0
appuifw.app.exit_key_handler = exit
gao = 1
while gao:
canvas.blit(img)
e32.ao_yield()
| Python |
# -*- coding: utf-8 -*-
"""
persion walk
"""
import appuifw, graphics, e32
def cn(x):
return x.decode("utf-8")
font=('normal', 15, graphics.FONT_BOLD)
appuifw.app.body = canvas = appuifw.Canvas()
appuifw.app.screen = "full"
img = graphics.Image.new(canvas.size)
img.clear(0)
man = graphics.Image.open("e:\\data\\python\\person.png")
def ww(a, xy):
if a==0:
canvas.blit(img)
canvas.blit(man, target=(xy, 50), source=(71, 40, 100, 100))
else:
canvas.blit(img)
#--------------显示人物-----------------↓
def w():
global a
a=0
ww(a,xy)
#--------------显示人物-----------------↑
#--------------隐藏人物-----------------↓
def wj():
global a
a=3
ww(a)
#--------------隐藏人物-----------------↑
#--------------动态人物-----------------↓
def l(y):
canvas.blit(img)
canvas.blit(man, target=(y, 50),source=(25, 40, 51, 100))
def ad():
l(y)
e32.ao_sleep(0.5)
w()
e32.ao_sleep(0.5)
w()
e32.ao_sleep(0.5)
global x, y, xy
x = 50
y = 50
xy = 50
for i in range(1000):
x = x-5
y = y-5
xy = xy-5
e32.ao_sleep(0.1)
ad()
#--------------动态人物-----------------↑
canvas.bind(8, lambda:ad())
#e32.ao_sleep(50)
| Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: iamsk <skxiaonan@gmail.com>
#Last Change: 2010-04-04
#Description: mark current position and post it to flyingonwheel.appspot.com
#Notice: your phone must have the 'Location' (GPS data /position) application on,
#and receive satalite data in order to make this script work. (can be problematic indoors).
#Latitute: 纬度, Longitute: 经度
import sys
import time
import urllib
import httplib
import e32
import positioning
import appuifw
import e32dbm
DB_FILE = u"E:\\flyingonwheel.db"
MARK_URL = "http://flyingonwheel.appspot.com/mark"
def postPosition(data):
"""
post data to flyingonwheel.appspot.com
"""
if data:
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
db.close()
address_name, address_info = flyer_input()
appuifw.note(u"Posting position information ...", 'info')
latitude = str(data['position']['latitude'])
longitude = str(data['position']['longitude'])
params = urllib.urlencode([('flyer_name', '%s' % name), ('password', '%s' % password), ('point', '%s, %s' % (latitude, longitude)), ('name', '%s' % address_name), ('point_info', '%s' % address_info), ])
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
conn = httplib.HTTPConnection("flyingonwheel.appspot.com")
try:
conn.request("POST", "/mark", params, headers)
except:
conn.close()
appuifw.note(u"Could not posting information", 'info')
else:
conn.close()
appuifw.note(u"Position Information Posted", 'info')
def getPosition():
"""
Get GPS co-ordinates
"""
appuifw.note(u"Getting Current Position...", 'info')
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
appuifw.note(u"Retrieving GPS co-ordinates ...", 'info')
try:
result = positioning.position(course=1, satellites=1)
except:
appuifw.note(u"Could not retrieve GPS co-ordinates", 'info')
else:
appuifw.note(u"GPS co-ordinates retrieved", 'info')
return result
def quit():
"""
Change exit to here
"""
app_lock.signal()
def not_here():
"""
bind Exit button to this
"""
appuifw.note(u"Exit isn't here", 'info')
def writeLog(data):
"""
write the log
"""
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n\n' % (str(time.ctime()), str(data['position']['latitude']), str(data['position']['longitude'])))
except:
appuifw.note(u"Writing log error", 'info')
else:
appuifw.note(u"Log writed", 'info')
def mark():
"""
starts here
"""
appuifw.note(u"Start getPosition ...", 'info')
data = getPosition()
postPosition(data)
writeLog(data)
appuifw.note(u"End postPosition ...", 'info')
def setting():
name = appuifw.query(u"Type your Name: ", "text")
password = appuifw.query(u"Type your Password: ", "code")
if name and password:
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = name
db[u"password"] = password
db.close()
else:
appuifw.note(u"Cancel!", 'info')
def flyer_input():
address_name = appuifw.query(u"Type address Name: ", "text")
address_info = appuifw.query(u"Type address Description: ", "text")
return address_name, address_info
if __name__ == '__main__':
appuifw.app.title = u"Flying on Wheel"
app_lock = e32.Ao_lock()
appuifw.app.menu = [(u"Setting", setting), (u"Start Mark", mark), (u"Exit", quit)]
appuifw.app.exit_key_handler = not_here
app_lock.wait()
| Python |
#-*- coding: utf-8 -*-
# 测试中文
####
# 02/2006 Will Holcomb <wholcomb@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
Enables the use of multipart/form-data for posting forms
Inspirations:
Upload files in python:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
urllib2_file:
Fabien Seisen: <fabien@seisen.org>
Example:
import MultipartPostHandler, urllib2, cookielib
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
params = { "username" : "bob", "password" : "riviera",
"file" : open("filename", "rb") }
opener.open("http://wwww.bobsite.com/upload/", params)
Further Example:
The main function of this file is a sample which downloads a page and
then uploads it to the W3C validator.
"""
import urllib
import urllib2
import mimetools, mimetypes
import os, stat
from cStringIO import StringIO
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
# Controls how sequences are uncoded. If true, elements may be given multiple values by
# assigning a sequence.
doseq = 1
class MultipartPostHandler(urllib2.BaseHandler):
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if type(value) == file:
v_files.append((key, value))
else:
v_vars.append((key, value))
except TypeError:
systype, value, traceback = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = 'multipart/form-data; boundary=%s' % boundary
if(request.has_header('Content-Type')
and request.get_header('Content-Type').find('multipart/form-data') != 0):
print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
def multipart_encode(vars, files, boundary = None, buf = None):
if boundary is None:
boundary = mimetools.choose_boundary()
if buf is None:
buf = StringIO()
for(key, value) in vars:
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
buf.write('Content-Type: %s\r\n' % contenttype)
# buffer += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf.write('\r\n' + fd.read() + '\r\n')
buf.write('--' + boundary + '--\r\n\r\n')
buf = buf.getvalue()
return boundary, buf
multipart_encode = Callable(multipart_encode)
https_request = http_request
def main():
import tempfile, sys
validatorURL = "http://validator.w3.org/check"
opener = urllib2.build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
os.write(temp[0], opener.open(url).read())
params = { "ss" : "天涯", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
os.remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
validateFile(arg)
else:
validateFile("http://www.google.com")
if __name__=="__main__":
main()
| Python |
#-*- coding: utf-8 -*-
#Author: iamsk <skxiaonan@gmail.com>
#Last Change: 2010-05-16
#Description: mark current position and post it to flyingonwheel.appspot.com
#Notice: your phone must have the 'Location' (GPS data /position) application on,
#and receive satalite data in order to make this script work. (can be problematic indoors).
#Latitute: 纬度, Longitute: 经度
import sys
import os
sys.path.append('e:\\data\\python')
import time
import urllib
import urllib2
import cookielib
import e32
import positioning
import appuifw
import e32dbm
import camera
import key_codes
from graphics import Image
import MultipartPostHandler
DB_FILE = 'e:\\flyingonwheel.db'
MARK_URL = 'http://flyingonwheel.appspot.com/mark'
file_pic = 'e:\\photo.jpg'
photo_taked = False
def de_cn(x):
return x.decode("utf-8")
def en_cn(x):
return x.encode("utf-8")
def viewfinder(img):
"""
handled by camera.take_photo, use for showing current pic
"""
canvas.blit(img)
def shoot():
"""
shoot a pic and save it
"""
camera.stop_finder()
photo = camera.take_photo(size = (640, 480))
w, h = canvas.size
canvas.blit(photo, target = (0, 0, w, h), scale = 1)
photo.save('e:\\photo.jpg')
photo_taked = True
def takePhoto():
"""
take a photo
"""
photo_taked = False
camera.start_finder(viewfinder)
canvas.bind(key_codes.EKeySelect, shoot)
def postPosition(data):
"""
post data to flyingonwheel.appspot.com
"""
if data:
# Read the name and password from db
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
db.close()
address_name, address_info = flyer_input()
appuifw.note(u"Posting position information ...", "info")
latitude = str(data['position']['latitude'])
longitude = str(data['position']['longitude'])
file_data = open(file_pic, 'rb')
try:
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler.MultipartPostHandler)
params = { 'flyer_name': '%s' % name, 'password': '%s' % password, 'point': '%s, %s' % (latitude, longitude), 'name': '%s' % en_cn(address_name), 'point_info': '%s' % en_cn(address_info), "picblob": file_data }
answer = opener.open(MARK_URL, params)
if answer.code == 200:
appuifw.note(u"Position Information Posted", "info")
else:
appuifw.note(u"Please check your network", "info")
except:
appuifw.note(u"Could not posting information", "info")
def getPosition():
"""
Get GPS co-ordinates
"""
appuifw.note(u"Getting Current GPS...", "info")
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
appuifw.note(u"Retrieving GPS co-ordinates ...", "info")
try:
result = positioning.position(course=1, satellites=1)
except:
appuifw.note(u"Could not retrieve GPS co-ordinates", "info")
else:
appuifw.note(u"GPS co-ordinates retrieved", "info")
return result
def quit():
"""
Change exit to here
"""
app_lock.signal()
def not_here():
"""
bind Exit button to this
"""
appuifw.note(u"Exit isn't here", "info")
def writeLog(data):
"""
write the log
"""
if data:
try:
file('e:\\gpslog.log', 'a').write('%s|%s|%s\n\n' % (str(time.ctime()), str(data['position']['latitude']), str(data['position']['longitude'])))
except:
appuifw.note(u"Writing log error", "info")
else:
appuifw.note(u"Log writed", "info")
def mark():
"""
mark starts here
"""
appuifw.note(u"Start mark ...", "info")
data = getPosition()
takePhoto()
e32.ao_sleep(3)
if photo_taked:
postPosition(data)
writeLog(data)
appuifw.note(u"End mark", "info")
else:
e32.ao_sleep(7)
postPosition(data)
writeLog(data)
appuifw.note(u"End mark", "info")
canvas.blit(photo)
def setting():
"""
setting the name and password
"""
name = appuifw.query(u"Type your Name: ", "text")
password = appuifw.query(u"Type your Password: ", "code")
if name and password:
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = name
db[u"password"] = password
db.close()
else:
appuifw.note(u"Cancel!", "info")
def flyer_input():
"""
input the address name and the address description
"""
address_name = appuifw.query(u"Type address Name: ", "text")
address_info = appuifw.query(u"Type address Description: ", "text")
return address_name, address_info
if __name__ == '__main__':
appuifw.app.body = canvas = appuifw.Canvas()
appuifw.app.title = u"Flying on Wheel"
photo = Image.open("e:\\data\\python\\bg.jpg")
canvas.blit(photo)
app_lock = e32.Ao_lock()
appuifw.app.menu = [(u"Setting", setting), (u"Start Mark", mark), (u"Exit", quit)]
appuifw.app.exit_key_handler = not_here
app_lock.wait()
| Python |
# -*- coding: utf-8 -*-
import appuifw
import e32
import graphics
import thread
import time
class Main:
def __init__(self):
appuifw.app.title = u'Fibonacci'
appuifw.app.body = self.canvas = appuifw.Canvas()
appuifw.exit_key_handler = self.OnExit
appuifw.app.menu = [(u'Calcola', self.OnCalcola)]
self.nlock = thread.allocate_lock() # Allocazione di un lock per il thread
self.vlock = thread.allocate_lock()
self.ris1 = 0
self.ris2 = ''
self.loop = 1
self.indice = 0
self.img = graphics.Image.new(self.canvas.size)
self.OnLoop()
def OnCalcola(self):
numero = appuifw.query(u'Inserire un Numero', 'number')
thread.start_new_thread(self.fibonacci, (numero, ))
def fibonacci(self, n):
self.ris1 = time.strftime('%H:%M:%S')
self.ris2 = ''
a, b = 0, 1
for i in range(n):
a, b = b, a + b
self.nlock.acquire()
self.indice = ((i+1)*100)/n
self.nlock.release()
self.nlock.acquire()
self.ris2 = time.strftime('%H:%M:%S')
self.nlock.release()
def OnExit(self):
self.loop = 0
def OnLoop(self):
while self.loop:
e32.ao_sleep(0.1)
self.img.clear(0)
self.img.line([37, 50, 137, 50], 0xffffff, width=20)
self.img.line([37, 50, 37 + self.indice, 50], 0xfffc0d, width=20)
self.img.text((80, 55), u'' + str(self.indice) + u'%', 0x000000)
self.vlock.acquire()
if self.ris2 <> '':
self.img.text((45, 80), ur'Start: ' + str(self.ris1), 0xffffff)
self.img.text((45, 92), ur'End: ' + str(self.ris2), 0xffffff)
self.canvas.blit(self.img)
self.vlock.release()
if __name__ == '__main__':
main = Main()
| Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: alvayang <alvayang@tabex.org>
#Last Change:
#Description:
import positioning
import appuifw
import sys
import e32
import time
def writelog(data):
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n' % \
(str(time.time()),str(data['position']['latitude']), \
str(data['position']['longitude'])))
except Exception, e:
sys.stdout.write("exception happend:" + str(e))
def quit():
app_lock.signal()
def not_allowed():
appuifw.note("请通过选项选择退出.".decode('utf-8'), 'info')
appuifw.app.title = u"GPS Logger"
app_lock = e32.Ao_lock()
appuifw.app.menu = [("退出在这里面".decode('utf-8'), quit)]
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type':'service','format':'application','data':'position'}])
positioning.position(course=1,satellites=1, callback=writelog,\
interval=5000000, partial=0)
appuifw.app.exit_key_handler = not_allowed
app_lock.wait()
| Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: iamsk <skxiaonan@gmail.com>
#Last Change: 2010-04-04
#Description: mark current position and post it to flyingonwheel.appspot.com
#Notice: your phone must have the 'Location' (GPS data /position) application on,
#and receive satalite data in order to make this script work. (can be problematic indoors).
#Latitute: 纬度, Longitute: 经度
import sys
import time
import urllib
import urllib2
import e32
import positioning
import appuifw
import e32dbm
DB_FILE = u"E:\\flyingonwheel.db"
MARK_URL = "http://flyingonwheel.appspot.com/mark"
def postPosition(data):
"""
post data to flyingonwheel.appspot.com
"""
if data:
# Read the name and password from db
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
db.close()
address_name, address_info = flyer_input()
appuifw.note(u"Posting position information ...", "info")
# print str(data)
latitude = str(data['position']['latitude'])
# print latitude
longitude = str(data['position']['longitude'])
# print longitude
params = urllib.urlencode([('flyer_name', '%s' % name), ('password', '%s' % password), ('point', '%s, %s' % (latitude, longitude)), ('name', '%s' % address_name), ('point_info', '%s' % address_info), ])
# print params
# params = urllib.urlencode([('point','11,22')])
req = urllib2.Request(MARK_URL)
try:
answer = urllib2.urlopen(req, params)
except:
appuifw.note(u"Could not posting information", "info")
else:
appuifw.note(u"Position Information Posted", "info")
def getPosition():
"""
Get GPS co-ordinates
"""
appuifw.note(u"Getting Current Position...", "info")
# print 'positioning.select_module'
positioning.select_module(positioning.default_module())
# print 'positioning.set_requestors'
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
# print 'positioning.position'
appuifw.note(u"Retrieving GPS co-ordinates ...", "info")
try:
result = positioning.position(course=1, satellites=1)
except:
appuifw.note(u"Could not retrieve GPS co-ordinates", "info")
else:
appuifw.note(u"GPS co-ordinates retrieved", "info")
# positioning.stop_position()
return result
def quit():
"""
Change exit to here
"""
app_lock.signal()
def not_here():
"""
bind Exit button to this
"""
appuifw.note(u"Exit isn't here", "info")
def writeLog(data):
"""
write the log
"""
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n\n' % (str(time.ctime()), str(data['position']['latitude']), str(data['position']['longitude'])))
except:
appuifw.note(u"Writing log error", "info")
else:
appuifw.note(u"Log writed", "info")
def mark():
"""
mark starts here
"""
appuifw.note(u"Start getPosition ...", "info")
data = getPosition()
postPosition(data)
writeLog(data)
appuifw.note(u"End postPosition ...", "info")
def setting():
"""
setting the name and password
"""
name = appuifw.query(u"Type your Name: ", "text")
password = appuifw.query(u"Type your Password: ", "code")
if name and password:
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = name
db[u"password"] = password
db.close()
else:
appuifw.note(u"Cancel!", "info")
def flyer_input():
"""
input the address name and the address description
"""
address_name = appuifw.query(u"Type address Name: ", "text")
address_info = appuifw.query(u"Type address Description: ", "text")
return address_name, address_info
if __name__ == '__main__':
appuifw.app.title = u"Flying on Wheel"
app_lock = e32.Ao_lock()
appuifw.app.menu = [(u"Setting", setting), (u"Start Mark", mark), (u"Exit", quit)]
appuifw.app.exit_key_handler = not_here
app_lock.wait()
| Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: iamsk <skxiaonan@gmail.com>
#Last Change: 2010-04-04
#Description: mark current position and post it to flyingonwheel.appspot.com
#Notice: your phone must have the 'Location' (GPS data /position) application on,
#and receive satalite data in order to make this script work. (can be problematic indoors).
#Latitute: 纬度, Longitute: 经度
import sys
import time
import urllib
import httplib
import e32
import positioning
import appuifw
import e32dbm
DB_FILE = u"E:\\flyingonwheel.db"
MARK_URL = "http://flyingonwheel.appspot.com/mark"
def postPosition(data):
"""
post data to flyingonwheel.appspot.com
"""
if data:
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
db.close()
address_name, address_info = flyer_input()
appuifw.note(u"Posting position information ...", 'info')
latitude = str(data['position']['latitude'])
longitude = str(data['position']['longitude'])
params = urllib.urlencode([('flyer_name', '%s' % name), ('password', '%s' % password), ('point', '%s, %s' % (latitude, longitude)), ('name', '%s' % address_name), ('point_info', '%s' % address_info), ])
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
conn = httplib.HTTPConnection("flyingonwheel.appspot.com")
try:
conn.request("POST", "/mark", params, headers)
except:
conn.close()
appuifw.note(u"Could not posting information", 'info')
else:
conn.close()
appuifw.note(u"Position Information Posted", 'info')
def getPosition():
"""
Get GPS co-ordinates
"""
appuifw.note(u"Getting Current Position...", 'info')
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
appuifw.note(u"Retrieving GPS co-ordinates ...", 'info')
try:
result = positioning.position(course=1, satellites=1)
except:
appuifw.note(u"Could not retrieve GPS co-ordinates", 'info')
else:
appuifw.note(u"GPS co-ordinates retrieved", 'info')
return result
def quit():
"""
Change exit to here
"""
app_lock.signal()
def not_here():
"""
bind Exit button to this
"""
appuifw.note(u"Exit isn't here", 'info')
def writeLog(data):
"""
write the log
"""
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n\n' % (str(time.ctime()), str(data['position']['latitude']), str(data['position']['longitude'])))
except:
appuifw.note(u"Writing log error", 'info')
else:
appuifw.note(u"Log writed", 'info')
def mark():
"""
starts here
"""
appuifw.note(u"Start getPosition ...", 'info')
data = getPosition()
postPosition(data)
writeLog(data)
appuifw.note(u"End postPosition ...", 'info')
def setting():
name = appuifw.query(u"Type your Name: ", "text")
password = appuifw.query(u"Type your Password: ", "code")
if name and password:
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = name
db[u"password"] = password
db.close()
else:
appuifw.note(u"Cancel!", 'info')
def flyer_input():
address_name = appuifw.query(u"Type address Name: ", "text")
address_info = appuifw.query(u"Type address Description: ", "text")
return address_name, address_info
if __name__ == '__main__':
appuifw.app.title = u"Flying on Wheel"
app_lock = e32.Ao_lock()
appuifw.app.menu = [(u"Setting", setting), (u"Start Mark", mark), (u"Exit", quit)]
appuifw.app.exit_key_handler = not_here
app_lock.wait()
| Python |
# Copyright (c) 2006 Jurgen Scheible
# image upload to URL
import httplib
def upload_image_to_url():
filename = 'D:\logo.jpg'
picture = file(filename).read()
conn = httplib.HTTPConnection("www.mobilenin.com")
conn.request("POST", "/pys60/php/upload_image_to_url.php", picture)
print "upload started ..."
response = conn.getresponse()
remote_file = response.read()
conn.close()
print remote_file
upload_image_to_url()
| Python |
"""Streaming HTTP uploads module.
This module extends the standard httplib and urllib2 objects so that
iterable objects can be used in the body of HTTP requests.
In most cases all one should have to do is call :func:`register_openers()`
to register the new streaming http handlers which will take priority over
the default handlers, and then you can use iterable objects in the body
of HTTP requests.
**N.B.** You must specify a Content-Length header if using an iterable object
since there is no way to determine in advance the total size that will be
yielded, and there is no way to reset an interator.
Example usage:
>>> from StringIO import StringIO
>>> import urllib2, poster.streaminghttp
>>> opener = poster.streaminghttp.register_openers()
>>> s = "Test file data"
>>> f = StringIO(s)
>>> req = urllib2.Request("http://localhost:5000", f, \
{'Content-Length': len(s)})
"""
import httplib, urllib2, socket
from httplib import NotConnected
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
'StreamingHTTPHandler', 'register_openers']
if hasattr(httplib, 'HTTPS'):
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
class _StreamingHTTPMixin:
"""Mixin class for HTTP and HTTPS connections that implements a streaming
send method."""
def send(self, value):
"""Send ``value`` to the server.
``value`` can be a string object, a file-like object that supports
a .read() method, or an iterable object that supports a .next()
method.
"""
# Based on python 2.6's httplib.HTTPConnection.send()
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
# send the data to the server. if we get a broken pipe, then close
# the socket. we want to reconnect when somebody tries to send again.
#
# NOTE: we DO propagate the error, though, because we cannot simply
# ignore the error... the caller will know if they can retry.
if self.debuglevel > 0:
print "send:", repr(value)
try:
blocksize = 8192
if hasattr(value, 'read') :
if self.debuglevel > 0:
print "sendIng a read()able"
data = value.read(blocksize)
while data:
self.sock.sendall(data)
data = value.read(blocksize)
elif hasattr(value, 'next'):
if self.debuglevel > 0:
print "sendIng an iterable"
for data in value:
self.sock.sendall(data)
else:
self.sock.sendall(value)
except socket.error, v:
if v[0] == 32: # Broken pipe
self.close()
raise
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
to support iterable body objects"""
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
`redirect_request` method to properly handle redirected POST requests
This class is required because python 2.5's HTTPRedirectHandler does
not remove the Content-Type or Content-Length headers when requesting
the new resource, but the body of the original request is not preserved.
"""
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
# From python2.6 urllib2's HTTPRedirectHandler
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k, v) for k, v in req.headers.items()
if k.lower() not in (
"content-length", "content-type")
)
return urllib2.Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
class StreamingHTTPHandler(urllib2.HTTPHandler):
"""Subclass of `urllib2.HTTPHandler` that uses
StreamingHTTPConnection as its http connection class."""
handler_order = urllib2.HTTPHandler.handler_order - 1
def http_open(self, req):
"""Open a StreamingHTTPConnection for the given request"""
return self.do_open(StreamingHTTPConnection, req)
def http_request(self, req):
"""Handle a HTTP request. Make sure that Content-Length is specified
if we're using an interable value"""
# Make sure that if we're using an iterable object as the request
# body, that we've also specified Content-Length
if req.has_data():
data = req.get_data()
if hasattr(data, 'read') or hasattr(data, 'next'):
if not req.has_header('Content-length'):
raise ValueError(
"No Content-Length specified for iterable body")
return urllib2.HTTPHandler.do_request_(self, req)
if hasattr(httplib, 'HTTPS'):
class StreamingHTTPSConnection(_StreamingHTTPMixin,
httplib.HTTPSConnection):
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
method to support iterable body objects"""
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
"""Subclass of `urllib2.HTTPSHandler` that uses
StreamingHTTPSConnection as its http connection class."""
handler_order = urllib2.HTTPSHandler.handler_order - 1
def https_open(self, req):
return self.do_open(StreamingHTTPSConnection, req)
def https_request(self, req):
# Make sure that if we're using an iterable object as the request
# body, that we've also specified Content-Length
if req.has_data():
data = req.get_data()
if not hasattr(data, 'read') and hasattr(data, 'next'):
if not req.has_header('Content-length'):
raise ValueError(
"No Content-Length specified for iterable body")
return urllib2.HTTPSHandler.do_request_(self, req)
def register_openers():
"""Register the streaming http handlers in the global urllib2 default
opener object.
Returns the created OpenerDirector object."""
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
if hasattr(httplib, "HTTPS"):
handlers.append(StreamingHTTPSHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
return opener
| Python |
"""multipart/form-data encoding module
This module provides functions that faciliate encoding name/value pairs
as multipart/form-data suitable for a HTTP POST or PUT request.
multipart/form-data is the standard way to upload files over HTTP"""
__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
'multipart_encode']
try:
import uuid
def gen_boundary():
"""Returns a random string to use as the boundary for a message"""
return uuid.uuid4().hex
except ImportError:
import random, sha
def gen_boundary():
"""Returns a random string to use as the boundary for a message"""
bits = random.getrandbits(160)
return sha.new(str(bits)).hexdigest()
import urllib, re, os, mimetypes
def encode_and_quote(data):
"""If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8"))
otherwise return urllib.quote_plus(data)"""
if data is None:
return None
if isinstance(data, unicode):
data = data.encode("utf-8")
return urllib.quote_plus(data)
def _strify(s):
"""If s is a unicode string, encode it to UTF-8 and return the results,
otherwise return str(s), or None if s is None"""
if s is None:
return None
if isinstance(s, unicode):
return s.encode("utf-8")
return str(s)
class MultipartParam(object):
"""Represents a single parameter in a multipart/form-data request
``name`` is the name of this parameter.
If ``value`` is set, it must be a string or unicode object to use as the
data for this parameter.
If ``filename`` is set, it is what to say that this parameter's filename
is. Note that this does not have to be the actual filename any local file.
If ``filetype`` is set, it is used as the Content-Type for this parameter.
If unset it defaults to "text/plain; charset=utf8"
If ``filesize`` is set, it specifies the length of the file ``fileobj``
If ``fileobj`` is set, it must be a file-like object that supports
.read().
Both ``value`` and ``fileobj`` must not be set, doing so will
raise a ValueError assertion.
If ``fileobj`` is set, and ``filesize`` is not specified, then
the file's size will be determined first by stat'ing ``fileobj``'s
file descriptor, and if that fails, by seeking to the end of the file,
recording the current position as the size, and then by seeking back to the
beginning of the file.
"""
def __init__(self, name, value=None, filename=None, filetype=None,
filesize=None, fileobj=None):
self.name = encode_and_quote(name)
self.value = _strify(value)
if filename is None:
self.filename = None
else:
if isinstance(filename, unicode):
# Encode with XML entities
self.filename = filename.encode("ascii", "xmlcharrefreplace")
else:
self.filename = str(filename)
self.filename = self.filename.encode("string_escape").\
replace('"', '\\"')
self.filetype = _strify(filetype)
self.filesize = filesize
self.fileobj = fileobj
if self.value is not None and self.fileobj is not None:
raise ValueError("Only one of value or fileobj may be specified")
if fileobj is not None and filesize is None:
# Try and determine the file size
try:
self.filesize = os.fstat(fileobj.fileno()).st_size
except (OSError, AttributeError):
try:
fileobj.seek(0, 2)
self.filesize = fileobj.tell()
fileobj.seek(0)
except:
raise ValueError("Could not determine filesize")
def __cmp__(self, other):
attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
myattrs = [getattr(self, a) for a in attrs]
oattrs = [getattr(other, a) for a in attrs]
return cmp(myattrs, oattrs)
@classmethod
def from_file(cls, paramname, filename):
"""Returns a new MultipartParam object constructed from the local
file at ``filename``.
``filesize`` is determined by os.path.getsize(``filename``)
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
``filename`` is set to os.path.basename(``filename``)
"""
return cls(paramname, filename=os.path.basename(filename),
filetype=mimetypes.guess_type(filename)[0],
filesize=os.path.getsize(filename),
fileobj=open(filename, "rb"))
@classmethod
def from_params(cls, params):
"""Returns a list of MultipartParam objects from a sequence of
name, value pairs, MultipartParam instances,
or from a mapping of names to values
The values may be strings or file objects."""
if hasattr(params, 'items'):
params = params.items()
retval = []
for item in params:
if isinstance(item, cls):
retval.append(item)
continue
name, value = item
if hasattr(value, 'read'):
# Looks like a file object
filename = getattr(value, 'name', None)
if filename is not None:
filetype = mimetypes.guess_type(filename)[0]
else:
filetype = None
retval.append(cls(name=name, filename=filename,
filetype=filetype, fileobj=value))
else:
retval.append(cls(name, value))
return retval
def encode_hdr(self, boundary):
"""Returns the header of the encoding of this parameter"""
boundary = encode_and_quote(boundary)
headers = ["--%s" % boundary]
if self.filename:
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
self.filename)
else:
disposition = 'form-data; name="%s"' % self.name
headers.append("Content-Disposition: %s" % disposition)
if self.filetype:
filetype = self.filetype
else:
filetype = "text/plain; charset=utf-8"
headers.append("Content-Type: %s" % filetype)
if self.filesize is not None:
headers.append("Content-Length: %i" % self.filesize)
else:
headers.append("Content-Length: %i" % len(self.value))
headers.append("")
headers.append("")
return "\r\n".join(headers)
def encode(self, boundary):
"""Returns the string encoding of this parameter"""
if self.value is None:
value = self.fileobj.read()
else:
value = self.value
if re.search("^--%s$" % re.escape(boundary), value, re.M):
raise ValueError("boundary found in encoded string")
return "%s%s\r\n" % (self.encode_hdr(boundary), value)
def iter_encode(self, boundary, blocksize=4096):
"""Yields the encoding of this parameter
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
yielded."""
if self.value is not None:
yield self.encode(boundary)
else:
yield self.encode_hdr(boundary)
last_block = ""
encoded_boundary = "--%s" % encode_and_quote(boundary)
boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary),
re.M)
while True:
block = self.fileobj.read(blocksize)
if not block:
yield "\r\n"
break
last_block += block
if boundary_exp.search(last_block):
raise ValueError("boundary found in file data")
last_block = last_block[-len(encoded_boundary)-2:]
yield block
def get_size(self, boundary):
"""Returns the size in bytes that this param will be when encoded
with the given boundary."""
if self.filesize is not None:
valuesize = self.filesize
else:
valuesize = len(self.value)
return len(self.encode_hdr(boundary)) + 2 + valuesize
def encode_string(boundary, name, value):
"""Returns ``name`` and ``value`` encoded as a multipart/form-data
variable. ``boundary`` is the boundary string used throughout
a single request to separate variables."""
return MultipartParam(name, value).encode(boundary)
def encode_file_header(boundary, paramname, filesize, filename=None,
filetype=None):
"""Returns the leading data for a multipart/form-data field that contains
file data.
``boundary`` is the boundary string used throughout a single request to
separate variables.
``paramname`` is the name of the variable in this request.
``filesize`` is the size of the file data.
``filename`` if specified is the filename to give to this field. This
field is only useful to the server for determining the original filename.
``filetype`` if specified is the MIME type of this file.
The actual file data should be sent after this header has been sent.
"""
return MultipartParam(paramname, filesize=filesize, filename=filename,
filetype=filetype).encode_hdr(boundary)
def get_body_size(params, boundary):
"""Returns the number of bytes that the multipart/form-data encoding
of ``params`` will be."""
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
return size + len(boundary) + 6
def get_headers(params, boundary):
"""Returns a dictionary with Content-Type and Content-Length headers
for the multipart/form-data encoding of ``params``."""
headers = {}
boundary = urllib.quote_plus(boundary)
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
headers['Content-Length'] = get_body_size(params, boundary)
return headers
def multipart_encode(params, boundary=None):
"""Encode ``params`` as multipart/form-data.
``params`` should be a sequence of (name, value) pairs or MultipartParam
objects, or a mapping of names to values.
Values are either strings parameter values, or file-like objects to use as
the parameter value. The file-like objects must support .read() and either
.fileno() or both .seek() and .tell().
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
a randomly generated boundary will be used. In either case, if the
boundary string appears in the parameter values a ValueError will be
raised.
Returns a tuple of `datagen`, `headers`, where `datagen` is a
generator that will yield blocks of data that make up the encoded
parameters, and `headers` is a dictionary with the assoicated
Content-Type and Content-Length headers.
Examples:
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> p = MultipartParam("key", "value2")
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> datagen, headers = multipart_encode( {"key": "value1"} )
>>> s = "".join(datagen)
>>> assert "value2" not in s and "value1" in s
"""
if boundary is None:
boundary = gen_boundary()
else:
boundary = urllib.quote_plus(boundary)
headers = get_headers(params, boundary)
params = MultipartParam.from_params(params)
def yielder():
"""generator function to yield multipart/form-data representation
of parameters"""
for param in params:
for block in param.iter_encode(boundary):
yield block
yield "--%s--\r\n" % boundary
return yielder(), headers
| Python |
# Copyright (c) 2010 Chris AtLee
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""poster module
Support for streaming HTTP uploads, and multipart/form-data encoding
```poster.version``` is a 3-tuple of integers representing the version number.
New releases of poster will always have a version number that compares greater
than an older version of poster.
New in version 0.6."""
import poster.streaminghttp
import poster.encode
version = (0, 6, 0) # Thanks JP!
| Python |
import time
import threading
class timer(threading.Thread):
def __init__(self, interval):
threading.Thread.__init__(self)
self.interval = interval
def run(self):
print 'berfore'
time.sleep(self.interval)
print 'after'
def test():
threadone = timer(10)
threadone.setDaemon(True)
threadone.start()
time.sleep(15)
if __name__ == '__main__':
test()
| Python |
########################################################
# Send an SMS with your current GPS co-ordinates using a
# Nokia SmartPhoneand PyS60
# Timothy Makobu, 01-01-2009
########################################################
import positioning
import messaging
import appuifw
import sys
# Get your GPS co-ordinates #
def getCoordinates():
positioning.select_module(positioning.default_module())
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
try:
sys.stdout.write('Retrieving GPS co-ordinates ...\n')
data = positioning.position(course=1, satellites=1)
except:
sys.stdout.write('Could not retrieve GPS co-ordinates\n\n\n')
sys.exit(-1)
else:
sys.stdout.write('GPS co-ordinates retrieved\n')
return (data['position']['latitude'], data['position']['longitude'])
# Send your GPS co-ordinates as an SMS #
def sendCoordinates(coords, number):
message = u'I\'m at location:\nLatitute --> %f\nLongitute --> %f \n' % coords
try:
sys.stdout.write('Sending SMS ...\n')
messaging.sms_send(number, message)
except:
sys.stdout.write('Could not send SMS \n\n\n')
sys.exit(-1)
else:
sys.stdout.write('SMS sent \n')
if __name__ == '__main__':
presetPhoneNumber = 15991660074
phoneNumber = presetPhoneNumber or appuifw.query(u'Enter number: ', 'text')
if not phoneNumber:
sys.stdout.write('No number entered; exiting ...\n\n\n')
sys.exit(-1)
sendCoordinates(getCoordinates(), phoneNumber)
| Python |
import appuifw
import e32
import graphics
import thread
import time
class Main:
def __init__(self):
appuifw.app.title = u'Positioning'
appuifw.app.body = self.canvas = appuifw.Canvas()
appuifw.exit_key_handler = self.OnExit
appuifw.app.menu = [(u'Calcola', self.OnCalcola)]
self.loop = 1
self.indice = 0
self.img = graphics.Image.new(self.canvas.size)
self.OnLoop()
def OnCalcola(self):
numero = appuifw.query(u'Inserire un Numero', 'number')
thread.start_new_thread(self.fibonacci, (numero, ))
def fibonacci(self, n):
a, b = 0, 1
for i in range(n):
a, b = b, a + b
self.indice = ((i+1)*100)/n
def OnExit(self):
self.loop = 0
def OnLoop(self):
while self.loop:
e32.ao_sleep(0.1)
self.img.clear(0)
self.img.line([37, 50, 137, 50], 0xffffff, width=20)
self.img.line([37, 50, 37 + self.indice, 50], 0xfffc0d, width=20)
self.img.text((80, 55), str(self.indice) + u'%', 0x000000)
self.canvas.blit(self.img)
if __name__ == '__main__':
main = Main()
| Python |
# Copyright (c) 2006 Jurgen Scheible
# this script lets you upload an XML file to a server.
# use both the httplib, urllib libraries
import httplib, urllib
import appuifw
def senddata():
# read the xml file from the c drive and put it into a variable called xmlfile
f=open('c:/xml_btscan.xml','rt')
xmlfile = f.read()
f.close()
# define your parameters that shall be posted and assign its content: the parameter 'data' gets assigned the content of variable xmlfile
params = urllib.urlencode({'data': xmlfile, 'eggs': 0, 'bacon': 0})
# define your headers
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
try:
# connect to the server: put here your server URL
conn = httplib.HTTPConnection("www.exampleserver.com")
# make the post request to call the example.php file and handover the parameters and headers (put the correct folder structure here!)
conn.request("POST", "/examplefolder/example.php", params, headers)
response = conn.getresponse()
# close the connection
conn.close()
appuifw.note(u"Data sent", "info")
except:
appuifw.note(u"ok something went wrong", "info")
if appuifw.query(u"Upload XML file?","query") == True:
senddata() | Python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Author: iamsk <skxiaonan@gmail.com>
#Last Change: 2010-04-04
#Description: mark current position and post it to flyingonwheel.appspot.com
#Notice: your phone must have the 'Location' (GPS data /position) application on,
#and receive satalite data in order to make this script work. (can be problematic indoors).
#Latitute: 纬度, Longitute: 经度
import sys
import time
import urllib
import urllib2
import e32
import positioning
import appuifw
import e32dbm
DB_FILE = u"E:\\flyingonwheel.db"
MARK_URL = "http://flyingonwheel.appspot.com/mark"
def postPosition(data):
"""
post data to flyingonwheel.appspot.com
"""
if data:
# Read the name and password from db
db = e32dbm.open(DB_FILE, "r")
name = db[u"name"]
password = db[u"password"]
db.close()
address_name, address_info = flyer_input()
appuifw.note(u"Posting position information ...", "info")
# print str(data)
latitude = str(data['position']['latitude'])
# print latitude
longitude = str(data['position']['longitude'])
# print longitude
params = urllib.urlencode([('flyer_name', '%s' % name), ('password', '%s' % password), ('point', '%s, %s' % (latitude, longitude)), ('name', '%s' % address_name), ('point_info', '%s' % address_info), ])
# print params
# params = urllib.urlencode([('point','11,22')])
req = urllib2.Request(MARK_URL)
try:
answer = urllib2.urlopen(req, params)
except:
appuifw.note(u"Could not posting information", "info")
else:
appuifw.note(u"Position Information Posted", "info")
def getPosition():
"""
Get GPS co-ordinates
"""
appuifw.note(u"Getting Current Position...", "info")
# print 'positioning.select_module'
positioning.select_module(positioning.default_module())
# print 'positioning.set_requestors'
positioning.set_requestors([{'type': 'service', 'format': 'application', 'data': 'position'}])
# print 'positioning.position'
appuifw.note(u"Retrieving GPS co-ordinates ...", "info")
try:
result = positioning.position(course=1, satellites=1)
except:
appuifw.note(u"Could not retrieve GPS co-ordinates", "info")
else:
appuifw.note(u"GPS co-ordinates retrieved", "info")
# positioning.stop_position()
return result
def quit():
"""
Change exit to here
"""
app_lock.signal()
def not_here():
"""
bind Exit button to this
"""
appuifw.note(u"Exit isn't here", "info")
def writeLog(data):
"""
write the log
"""
if data:
try:
file('E:\\gpslog.log', 'a').write('%s|%s|%s\n\n' % (str(time.ctime()), str(data['position']['latitude']), str(data['position']['longitude'])))
except:
appuifw.note(u"Writing log error", "info")
else:
appuifw.note(u"Log writed", "info")
def mark():
"""
mark starts here
"""
appuifw.note(u"Start getPosition ...", "info")
data = getPosition()
postPosition(data)
writeLog(data)
appuifw.note(u"End postPosition ...", "info")
def setting():
"""
setting the name and password
"""
name = appuifw.query(u"Type your Name: ", "text")
password = appuifw.query(u"Type your Password: ", "code")
if name and password:
db = e32dbm.open(DB_FILE, "cf")
db[u"name"] = name
db[u"password"] = password
db.close()
else:
appuifw.note(u"Cancel!", "info")
def flyer_input():
"""
input the address name and the address description
"""
address_name = appuifw.query(u"Type address Name: ", "text")
address_info = appuifw.query(u"Type address Description: ", "text")
return address_name, address_info
if __name__ == '__main__':
appuifw.app.title = u"Flying on Wheel"
app_lock = e32.Ao_lock()
appuifw.app.menu = [(u"Setting", setting), (u"Start Mark", mark), (u"Exit", quit)]
appuifw.app.exit_key_handler = not_here
app_lock.wait()
| Python |
# Quickfix for the missing module in the Google App Engine using Python 2.6.
#
# Koen Bollen <meneer koenbollen nl>
# 2009 GPL
import multiprocessing
# vim: expandtab shiftwidth=4 softtabstop=4 textwidth=79:
| Python |
#!/usr/bin/python
import sys, os, re, platform
from os.path import exists, abspath, dirname, join, isdir
try:
# Allow use of setuptools so eggs can be built.
from setuptools import setup, Command
except ImportError:
from distutils.core import setup, Command
from distutils.extension import Extension
from distutils.errors import *
OFFICIAL_BUILD = 9999
def _print(s):
# Python 2/3 compatibility
sys.stdout.write(s + '\n')
class VersionCommand(Command):
description = "prints the pyodbc version, determined from git"
user_options = []
def initialize_options(self):
self.verbose = 0
def finalize_options(self):
pass
def run(self):
version_str, version = get_version()
sys.stdout.write(version_str + '\n')
class TagsCommand(Command):
description = 'runs etags'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# Windows versions of etag do not seem to expand wildcards (which Unix shells normally do for Unix utilities),
# so find all of the files ourselves.
files = [ join('src', f) for f in os.listdir('src') if f.endswith(('.h', '.cpp')) ]
cmd = 'etags %s' % ' '.join(files)
return os.system(cmd)
def main():
version_str, version = get_version()
settings = get_compiler_settings(version_str)
files = [ abspath(join('src', f)) for f in os.listdir('src') if f.endswith('.cpp') ]
if exists('MANIFEST'):
os.remove('MANIFEST')
kwargs = {
'name': "pyodbc",
'version': version_str,
'description': "DB API Module for ODBC",
'long_description': ('A Python DB API 2 module for ODBC. This project provides an up-to-date, '
'convenient interface to ODBC using native data types like datetime and decimal.'),
'maintainer': "Michael Kleehammer",
'maintainer_email': "michael@kleehammer.com",
'ext_modules': [Extension('pyodbc', files, **settings)],
'license': 'MIT',
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
],
'url': 'http://code.google.com/p/pyodbc',
'download_url': 'http://code.google.com/p/pyodbc/downloads/list',
'cmdclass': { 'version' : VersionCommand,
'tags' : TagsCommand }
}
if sys.hexversion >= 0x02060000:
kwargs['options'] = {
'bdist_wininst': {'user_access_control' : 'auto'}
}
setup(**kwargs)
def get_compiler_settings(version_str):
settings = { 'libraries': [],
'define_macros' : [ ('PYODBC_VERSION', version_str) ] }
# This isn't the best or right way to do this, but I don't see how someone is supposed to sanely subclass the build
# command.
for option in ['assert', 'trace', 'leak-check']:
try:
sys.argv.remove('--%s' % option)
settings['define_macros'].append(('PYODBC_%s' % option.replace('-', '_').upper(), 1))
except ValueError:
pass
if os.name == 'nt':
settings['extra_compile_args'] = ['/Wall',
'/wd4668',
'/wd4820',
'/wd4711', # function selected for automatic inline expansion
'/wd4100', # unreferenced formal parameter
'/wd4127', # "conditional expression is constant" testing compilation constants
'/wd4191', # casts to PYCFunction which doesn't have the keywords parameter
]
settings['libraries'].append('odbc32')
settings['libraries'].append('advapi32')
if '--debug' in sys.argv:
sys.argv.remove('--debug')
settings['extra_compile_args'].extend('/Od /Ge /GS /GZ /RTC1 /Wp64 /Yd'.split())
elif os.environ.get("OS", '').lower().startswith('windows'):
# Windows Cygwin (posix on windows)
# OS name not windows, but still on Windows
settings['libraries'].append('odbc32')
elif sys.platform == 'darwin':
# OS/X now ships with iODBC.
settings['libraries'].append('iodbc')
# Apple has decided they won't maintain the iODBC system in OS/X and has added deprecation warnings in 10.8.
# For now target 10.7 to eliminate the warnings.
# Python functions take a lot of 'char *' that really should be const. gcc complains about this *a lot*
settings['extra_compile_args'] = ['-Wno-write-strings', '-Wno-deprecated-declarations']
settings['define_macros'].append( ('MAC_OS_X_VERSION_10_7',) )
else:
# Other posix-like: Linux, Solaris, etc.
# Python functions take a lot of 'char *' that really should be const. gcc complains about this *a lot*
settings['extra_compile_args'] = ['-Wno-write-strings']
# What is the proper way to detect iODBC, MyODBC, unixODBC, etc.?
settings['libraries'].append('odbc')
return settings
def add_to_path():
"""
Prepends the build directory to the path so pyodbcconf can be imported without installing it.
"""
# Now run the utility
import imp
library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ]
library_names = [ 'pyodbcconf%s' % ext for ext in library_exts ]
# Only go into directories that match our version number.
dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1])
build = join(dirname(abspath(__file__)), 'build')
for top, dirs, files in os.walk(build):
dirs = [ d for d in dirs if d.endswith(dir_suffix) ]
for name in library_names:
if name in files:
sys.path.insert(0, top)
return
raise SystemExit('Did not find pyodbcconf')
def get_version():
"""
Returns the version of the product as (description, [major,minor,micro,beta]).
If the release is official, `beta` will be 9999 (OFFICIAL_BUILD).
1. If in a git repository, use the latest tag (git describe).
2. If in an unzipped source directory (from setup.py sdist),
read the version from the PKG-INFO file.
3. Use 3.0.0.0 and complain a lot.
"""
# My goal is to (1) provide accurate tags for official releases but (2) not have to manage tags for every test
# release.
#
# Official versions are tagged using 3 numbers: major, minor, micro. A build of a tagged version should produce
# the version using just these pieces, such as 2.1.4.
#
# Unofficial versions are "working towards" the next version. So the next unofficial build after 2.1.4 would be a
# beta for 2.1.5. Using 'git describe' we can find out how many changes have been made after 2.1.4 and we'll use
# this count as the beta id (beta1, beta2, etc.)
#
# Since the 4 numbers are put into the Windows DLL, we want to make sure the beta versions sort *before* the
# official, so we set the official build number to 9999, but we don't show it.
name = None # branch/feature name. Should be None for official builds.
numbers = None # The 4 integers that make up the version.
# If this is a source release the version will have already been assigned and be in the PKG-INFO file.
name, numbers = _get_version_pkginfo()
# If not a source release, we should be in a git repository. Look for the latest tag.
if not numbers:
name, numbers = _get_version_git()
if not numbers:
_print('WARNING: Unable to determine version. Using 3.0.0.0')
name, numbers = '3.0.0-unsupported', [3,0,0,0]
return name, numbers
def _get_version_pkginfo():
filename = join(dirname(abspath(__file__)), 'PKG-INFO')
if exists(filename):
re_ver = re.compile(r'^Version: \s+ (\d+)\.(\d+)\.(\d+) (?: -beta(\d+))?', re.VERBOSE)
for line in open(filename):
match = re_ver.search(line)
if match:
name = line.split(':', 1)[1].strip()
numbers = [int(n or 0) for n in match.groups()[:3]]
numbers.append(int(match.group(4) or OFFICIAL_BUILD)) # don't use 0 as a default for build
return name, numbers
return None, None
def _get_version_git():
n, result = getoutput('git describe --tags --match 3.*')
if n:
_print('WARNING: git describe failed with: %s %s' % (n, result))
return None, None
match = re.match(r'(\d+).(\d+).(\d+) (?: -(\d+)-g[0-9a-z]+)?', result, re.VERBOSE)
if not match:
return None, None
numbers = [int(n or OFFICIAL_BUILD) for n in match.groups()]
if numbers[-1] == OFFICIAL_BUILD:
name = '%s.%s.%s' % tuple(numbers[:3])
if numbers[-1] != OFFICIAL_BUILD:
# This is a beta of the next micro release, so increment the micro number to reflect this.
numbers[-2] += 1
name = '%s.%s.%s-beta%02d' % tuple(numbers)
n, result = getoutput('git branch')
branch = re.search(r'\* (\w+)', result).group(1)
if branch != 'master' and not re.match('^v\d+$', branch):
name = branch + '-' + name
return name, numbers
def getoutput(cmd):
pipe = os.popen(cmd, 'r')
text = pipe.read().rstrip('\n')
status = pipe.close() or 0
return status, text
if __name__ == '__main__':
main()
| Python |
import os, sys, platform
from os.path import join, dirname, abspath, basename
import unittest
def add_to_path():
"""
Prepends the build directory to the path so that newly built pyodbc libraries are used, allowing it to be tested
without installing it.
"""
# Put the build directory into the Python path so we pick up the version we just built.
#
# To make this cross platform, we'll search the directories until we find the .pyd file.
import imp
library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ]
library_names = [ 'pyodbc%s' % ext for ext in library_exts ]
# Only go into directories that match our version number.
dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1])
build = join(dirname(dirname(abspath(__file__))), 'build')
for root, dirs, files in os.walk(build):
for d in dirs[:]:
if not d.endswith(dir_suffix):
dirs.remove(d)
for name in library_names:
if name in files:
sys.path.insert(0, root)
return
print('Did not find the pyodbc library in the build directory. Will use an installed version.')
def print_library_info(cnxn):
import pyodbc
print('python: %s' % sys.version)
print('pyodbc: %s %s' % (pyodbc.version, os.path.abspath(pyodbc.__file__)))
print('odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER))
print('driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER)))
print(' supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER))
print('os: %s' % platform.system())
print('unicode: Py_Unicode=%s SQLWCHAR=%s' % (pyodbc.UNICODE_SIZE, pyodbc.SQLWCHAR_SIZE))
if platform.system() == 'Windows':
print(' %s' % ' '.join([s for s in platform.win32_ver() if s]))
def load_tests(testclass, name, *args):
"""
Returns a TestSuite for tests in `testclass`.
name
Optional test name if you only want to run 1 test. If not provided all tests in `testclass` will be loaded.
args
Arguments for the test class constructor. These will be passed after the test method name.
"""
if name:
if not name.startswith('test_'):
name = 'test_%s' % name
names = [ name ]
else:
names = [ method for method in dir(testclass) if method.startswith('test_') ]
return unittest.TestSuite([ testclass(name, *args) for name in names ])
def load_setup_connection_string(section):
"""
Attempts to read the default connection string from the setup.cfg file.
If the file does not exist or if it exists but does not contain the connection string, None is returned. If the
file exists but cannot be parsed, an exception is raised.
"""
from os.path import exists, join, dirname, splitext, basename
from configparser import SafeConfigParser
FILENAME = 'setup.cfg'
KEY = 'connection-string'
path = join(dirname(dirname(abspath(__file__))), 'tmp', FILENAME)
if exists(path):
try:
p = SafeConfigParser()
p.read(path)
except:
raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1]))
if p.has_option(section, KEY):
return p.get(section, KEY)
return None
| Python |
#!/usr/bin/python
# Unit tests for PostgreSQL on Linux (Fedora)
# This is a stripped down copy of the SQL Server tests.
import sys, os, re
import unittest
from decimal import Decimal
from testutils import *
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of composed of `seed` to make a string `length` characters long.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class PGTestCase(unittest.TestCase):
# These are from the C++ code. Keep them up to date.
# If we are reading a binary, string, or unicode value and do not know how large it is, we'll try reading 2K into a
# buffer on the stack. We then copy into a new Python object.
SMALL_READ = 2048
# A read guaranteed not to fit in the MAX_STACK_STACK stack buffer, but small enough to be used for varchar (4K max).
LARGE_READ = 4000
SMALL_STRING = _generate_test_string(SMALL_READ)
LARGE_STRING = _generate_test_string(LARGE_READ)
def __init__(self, connection_string, ansi, method_name):
unittest.TestCase.__init__(self, method_name)
self.connection_string = connection_string
self.ansi = ansi
def setUp(self):
self.cnxn = pyodbc.connect(self.connection_string, ansi=self.ansi)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_datasources(self):
p = pyodbc.dataSources()
self.assert_(isinstance(p, dict))
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def test_negative_float(self):
value = -200
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def _test_strtype(self, sqltype, value, colsize=None):
"""
The implementation for string, Unicode, and binary tests.
"""
assert colsize is None or (value is None or colsize >= len(value))
if colsize:
sql = "create table t1(s %s(%s))" % (sqltype, colsize)
else:
sql = "create table t1(s %s)" % sqltype
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), type(value))
if value is not None:
self.assertEqual(len(v), len(value))
self.assertEqual(v, value)
#
# varchar
#
def test_empty_varchar(self):
self._test_strtype('varchar', '', self.SMALL_READ)
def test_null_varchar(self):
self._test_strtype('varchar', None, self.SMALL_READ)
def test_large_null_varchar(self):
# There should not be a difference, but why not find out?
self._test_strtype('varchar', None, self.LARGE_READ)
def test_small_varchar(self):
self._test_strtype('varchar', self.SMALL_STRING, self.SMALL_READ)
def test_large_varchar(self):
self._test_strtype('varchar', self.LARGE_STRING, self.LARGE_READ)
def test_varchar_many(self):
self.cursor.execute("create table t1(c1 varchar(300), c2 varchar(300), c3 varchar(300))")
v1 = 'ABCDEFGHIJ' * 30
v2 = '0123456789' * 30
v3 = '9876543210' * 30
self.cursor.execute("insert into t1(c1, c2, c3) values (?,?,?)", v1, v2, v3);
row = self.cursor.execute("select c1, c2, c3 from t1").fetchone()
self.assertEqual(v1, row.c1)
self.assertEqual(v2, row.c2)
self.assertEqual(v3, row.c3)
def test_small_decimal(self):
# value = Decimal('1234567890987654321')
value = Decimal('100010') # (I use this because the ODBC docs tell us how the bytes should look in the C struct)
self.cursor.execute("create table t1(d numeric(19))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_small_decimal_scale(self):
# The same as small_decimal, except with a different scale. This value exactly matches the ODBC documentation
# example in the C Data Types appendix.
value = '1000.10'
value = Decimal(value)
self.cursor.execute("create table t1(d numeric(20,6))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_negative_decimal_scale(self):
value = Decimal('-10.0010')
self.cursor.execute("create table t1(d numeric(19,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def _exec(self):
self.cursor.execute(self.sql)
def test_close_cnxn(self):
"""Make sure using a Cursor after closing its connection doesn't crash."""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
self.cursor.execute("select * from t1")
self.cnxn.close()
# Now that the connection is closed, we expect an exception. (If the code attempts to use
# the HSTMT, we'll get an access violation instead.)
self.sql = "select * from t1"
self.assertRaises(pyodbc.ProgrammingError, self._exec)
def test_empty_string(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "")
def test_fixed_str(self):
value = "testing"
self.cursor.execute("create table t1(s char(7))")
self.cursor.execute("insert into t1 values(?)", "testing")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL
self.assertEqual(v, value)
def test_negative_row_index(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "1")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(row[0], "1")
self.assertEquals(row[-1], "1")
def test_version(self):
self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc.
def test_rowcount_delete(self):
self.assertEquals(self.cursor.rowcount, -1)
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, count)
def test_rowcount_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a
zero return value.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, 0)
def test_rowcount_select(self):
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("select * from t1")
self.assertEquals(self.cursor.rowcount, 4)
# PostgreSQL driver fails here?
# def test_rowcount_reset(self):
# "Ensure rowcount is reset to -1"
#
# self.cursor.execute("create table t1(i int)")
# count = 4
# for i in range(count):
# self.cursor.execute("insert into t1 values (?)", i)
# self.assertEquals(self.cursor.rowcount, 1)
#
# self.cursor.execute("create table t2(i int)")
# self.assertEquals(self.cursor.rowcount, -1)
def test_lower_case(self):
"Ensure pyodbc.lowercase forces returned column names to lowercase."
# Has to be set before creating the cursor, so we must recreate self.cursor.
pyodbc.lowercase = True
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(Abc int, dEf int)")
self.cursor.execute("select * from t1")
names = [ t[0] for t in self.cursor.description ]
names.sort()
self.assertEquals(names, [ "abc", "def" ])
# Put it back so other tests don't fail.
pyodbc.lowercase = False
def test_row_description(self):
"""
Ensure Cursor.description is accessible as Row.cursor_description.
"""
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(a int, b char(3))")
self.cnxn.commit()
self.cursor.execute("insert into t1 values(1, 'abc')")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(self.cursor.description, row.cursor_description)
def test_executemany(self):
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (i, str(i)) for i in range(1, 6) ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
# REVIEW: Without the cast, we get the following error:
# [07006] [unixODBC]Received an unsupported type from Postgres.;\nERROR: table "t2" does not exist (14)
count = self.cursor.execute("select cast(count(*) as int) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_failure(self):
"""
Ensure that an exception is raised if one query in an executemany fails.
"""
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, 'good'),
('error', 'not an int'),
(3, 'good') ]
self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params)
def test_row_slicing(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = row[:]
self.failUnless(result is row)
result = row[:-1]
self.assertEqual(result, (1,2,3))
result = row[0:4]
self.failUnless(result is row)
def test_row_repr(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = str(row)
self.assertEqual(result, "(1, 2, 3, 4)")
result = str(row[:-1])
self.assertEqual(result, "(1, 2, 3)")
result = str(row[:1])
self.assertEqual(result, "(1,)")
def main():
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] connection_string")
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
parser.add_option('-a', '--ansi', help='ANSI only', default=False, action='store_true')
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('pgtests')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
if options.verbose:
cnxn = pyodbc.connect(connection_string, ansi=options.ansi)
print 'library:', os.path.abspath(pyodbc.__file__)
print 'odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER)
print 'driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER))
print 'driver supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER)
print 'unicode:', pyodbc.UNICODE_SIZE, 'sqlwchar:', pyodbc.SQLWCHAR_SIZE
cnxn.close()
if options.test:
# Run a single test
if not options.test.startswith('test_'):
options.test = 'test_%s' % (options.test)
s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, options.test) ])
else:
# Run all tests in the class
methods = [ m for m in dir(PGTestCase) if m.startswith('test_') ]
methods.sort()
s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, m) for m in methods ])
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(s)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
#!/usr/bin/python
# Tests for reading from Excel files.
#
# I have not been able to successfully create or modify Excel files.
import sys, os, re
import unittest
from os.path import abspath
from testutils import *
CNXNSTRING = None
class ExcelTestCase(unittest.TestCase):
def __init__(self, method_name):
unittest.TestCase.__init__(self, method_name)
def setUp(self):
self.cnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def test_read_sheet(self):
# The first method of reading data is to access worksheets by name in this format [name$].
#
# Our second sheet is named Sheet2 and has two columns. The first has values 10, 20, 30, etc.
rows = self.cursor.execute("select * from [Sheet2$]").fetchall()
self.assertEquals(len(rows), 5)
for index, row in enumerate(rows):
self.assertEquals(row.s2num, float(index + 1) * 10)
def test_read_range(self):
# The second method of reading data is to assign a name to a range of cells and access that as a table.
#
# Our first worksheet has a section named Table1. The first column has values 1, 2, 3, etc.
rows = self.cursor.execute("select * from Table1").fetchall()
self.assertEquals(len(rows), 10)
for index, row in enumerate(rows):
self.assertEquals(row.num, float(index + 1))
self.assertEquals(row.val, chr(ord('a') + index))
def test_tables(self):
# This is useful for figuring out what is available
tables = [ row.table_name for row in self.cursor.tables() ]
assert 'Sheet2$' in tables, 'tables: %s' % ' '.join(tables)
# def test_append(self):
# rows = self.cursor.execute("select s2num, s2val from [Sheet2$]").fetchall()
#
# print rows
#
# nextnum = max([ row.s2num for row in rows ]) + 10
#
# self.cursor.execute("insert into [Sheet2$](s2num, s2val) values (?, 'z')", nextnum)
#
# row = self.cursor.execute("select s2num, s2val from [Sheet2$] where s2num=?", nextnum).fetchone()
# self.assertTrue(row)
#
# print 'added:', nextnum, len(rows), 'rows'
#
# self.assertEquals(row.s2num, nextnum)
# self.assertEquals(row.s2val, 'z')
#
# self.cnxn.commit()
def main():
from optparse import OptionParser
parser = OptionParser() #usage=usage)
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
(options, args) = parser.parse_args()
if args:
parser.error('no arguments expected')
global CNXNSTRING
path = dirname(abspath(__file__))
filename = join(path, 'test.xls')
assert os.path.exists(filename)
CNXNSTRING = 'Driver={Microsoft Excel Driver (*.xls)};DBQ=%s;READONLY=FALSE' % filename
cnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
print_library_info(cnxn)
cnxn.close()
suite = load_tests(ExcelTestCase, options.test)
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(suite)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
#!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__rcs_id__ = '$Id: dbapi20.py,v 1.10 2003/10/09 03:14:14 zenzen Exp $'
__version__ = '$Revision: 1.10 $'[11:-2]
__author__ = 'Stuart Bishop <zen@shangri-la.dropbear.id.au>'
import unittest
import time
# $Log: dbapi20.py,v $
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception heirarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propogates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
con = self._connect()
try:
cur = con.cursor()
for i, ddl in enumerate((self.xddl1,self.xddl2)):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
self.failUnless(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.failUnless(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
self.failUnless(con.Warning is drv.Warning)
self.failUnless(con.Error is drv.Error)
self.failUnless(con.InterfaceError is drv.InterfaceError)
self.failUnless(con.DatabaseError is drv.DatabaseError)
self.failUnless(con.OperationalError is drv.OperationalError)
self.failUnless(con.IntegrityError is drv.IntegrityError)
self.failUnless(con.InternalError is drv.InternalError)
self.failUnless(con.ProgrammingError is drv.ProgrammingError)
self.failUnless(con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount should be -1 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL1(cur)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
'insert into %sbooze values (?)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
'insert into %sbooze values (:1)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
'insert into %sbooze values (:beer)' % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
'insert into %sbooze values (%%s)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
'insert into %sbooze values (%%(beer)s)' % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
self.failUnless(cur.rowcount in (-1,1))
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
self.failUnless(cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
self.failUnless(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
self.failUnless(cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
self.failUnless(cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
self.failUnless(cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
self.failUnless(cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError,'Helper not implemented'
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError,'Helper not implemented'
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError,'Drivers need to override this test'
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
self.failUnless(hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependant
raise NotImplementedError,'Driver need to override this test'
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary('Something')
b = self.driver.Binary('')
def test_STRING(self):
self.failUnless(hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
self.failUnless(hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
self.failUnless(hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
self.failUnless(hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
self.failUnless(hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
| Python |
from testutils import *
add_to_path()
import pyodbc
cnxn = pyodbc.connect("DRIVER={SQL Server Native Client 10.0};SERVER=localhost;DATABASE=test;Trusted_Connection=yes")
print('cnxn:', cnxn)
cursor = cnxn.cursor()
print('cursor:', cursor)
cursor.execute("select 1")
row = cursor.fetchone()
print('row:', row)
| Python |
import unittest
from testutils import *
import dbapi20
def main():
add_to_path()
import pyodbc
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] connection_string")
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('dbapitests')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
class test_pyodbc(dbapi20.DatabaseAPI20Test):
driver = pyodbc
connect_args = [ connection_string ]
connect_kw_args = {}
def test_nextset(self): pass
def test_setoutputsize(self): pass
def test_ExceptionsAsConnectionAttributes(self): pass
suite = unittest.makeSuite(test_pyodbc, 'test')
testRunner = unittest.TextTestRunner(verbosity=(options.verbose > 1) and 9 or 0)
result = testRunner.run(suite)
if __name__ == '__main__':
main()
| Python |
import unittest
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class TestBase(unittest.TestCase):
| Python |
#!/usr/bin/python
# -*- coding: latin-1 -*-
usage = """\
usage: %prog [options] connection_string
Unit tests for SQL Server. To use, pass a connection string as the parameter.
The tests will create and drop tables t1 and t2 as necessary.
These run using the version from the 'build' directory, not the version
installed into the Python directories. You must run python setup.py build
before running the tests.
You can also put the connection string into a setup.cfg file in the root of the project
(the same one setup.py would use) like so:
[sqlservertests]
connection-string=DRIVER={SQL Server};SERVER=localhost;UID=uid;PWD=pwd;DATABASE=db
The connection string above will use the 2000/2005 driver, even if SQL Server 2008
is installed:
2000: DRIVER={SQL Server}
2005: DRIVER={SQL Server}
2008: DRIVER={SQL Server Native Client 10.0}
"""
import sys, os, re
import unittest
from decimal import Decimal
from datetime import datetime, date, time
from os.path import join, getsize, dirname, abspath
from testutils import *
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = int((length + len(_TESTSTR)-1) / len(_TESTSTR))
v = _TESTSTR * c
return v[:length]
class SqlServerTestCase(unittest.TestCase):
SMALL_FENCEPOST_SIZES = [ 0, 1, 255, 256, 510, 511, 512, 1023, 1024, 2047, 2048, 4000 ]
LARGE_FENCEPOST_SIZES = [ 4095, 4096, 4097, 10 * 1024, 20 * 1024 ]
STR_FENCEPOSTS = [ _generate_test_string(size) for size in SMALL_FENCEPOST_SIZES ]
BYTE_FENCEPOSTS = [ bytes(s, 'ascii') for s in STR_FENCEPOSTS ]
IMAGE_FENCEPOSTS = BYTE_FENCEPOSTS + [ bytes(_generate_test_string(size), 'ascii') for size in LARGE_FENCEPOST_SIZES ]
def __init__(self, method_name, connection_string):
unittest.TestCase.__init__(self, method_name)
self.connection_string = connection_string
def get_sqlserver_version(self):
"""
Returns the major version: 8-->2000, 9-->2005, 10-->2008
"""
self.cursor.execute("exec master..xp_msver 'ProductVersion'")
row = self.cursor.fetchone()
return int(row.Character_Value.split('.', 1)[0])
def setUp(self):
self.cnxn = pyodbc.connect(self.connection_string)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
for i in range(3):
try:
self.cursor.execute("drop procedure proc%d" % i)
self.cnxn.commit()
except:
pass
try:
self.cursor.execute('drop function func1')
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_multiple_bindings(self):
"More than one bind and select on a cursor"
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t1 values (?)", 2)
self.cursor.execute("insert into t1 values (?)", 3)
for i in range(3):
self.cursor.execute("select n from t1 where n < ?", 10)
self.cursor.execute("select n from t1 where n < 3")
def test_different_bindings(self):
self.cursor.execute("create table t1(n int)")
self.cursor.execute("create table t2(d datetime)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t2 values (?)", datetime.now())
def test_datasources(self):
p = pyodbc.dataSources()
self.assert_(isinstance(p, dict))
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, int)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def test_noscan(self):
self.assertEqual(self.cursor.noscan, False)
self.cursor.noscan = True
self.assertEqual(self.cursor.noscan, True)
def test_guid(self):
self.cursor.execute("create table t1(g1 uniqueidentifier)")
self.cursor.execute("insert into t1 values (newid())")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(len(v), 36)
def test_nextset(self):
self.cursor.execute("create table t1(i int)")
for i in range(4):
self.cursor.execute("insert into t1(i) values(?)", i)
self.cursor.execute("select i from t1 where i < 2 order by i; select i from t1 where i >= 2 order by i")
for i, row in enumerate(self.cursor):
self.assertEqual(i, row.i)
self.assertEqual(self.cursor.nextset(), True)
for i, row in enumerate(self.cursor):
self.assertEqual(i + 2, row.i)
def test_fixed_unicode(self):
value = "t\xebsting"
self.cursor.execute("create table t1(s nchar(7))")
self.cursor.execute("insert into t1 values(?)", "t\xebsting")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL
self.assertEqual(v, value)
def _test_strtype(self, sqltype, value, resulttype=None, colsize=None):
"""
The implementation for string, Unicode, and binary tests.
"""
assert colsize is None or isinstance(colsize, int), colsize
assert colsize is None or (value is None or colsize >= len(value))
if colsize:
sql = "create table t1(s %s(%s))" % (sqltype, colsize)
else:
sql = "create table t1(s %s)" % sqltype
if resulttype is None:
resulttype = type(value)
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), resulttype)
if value is not None:
self.assertEqual(len(v), len(value))
# To allow buffer --> db --> bytearray tests, always convert the input to the expected result type before
# comparing.
if type(value) is not resulttype:
value = resulttype(value)
self.assertEqual(v, value)
def _test_strliketype(self, sqltype, value, resulttype=None, colsize=None):
"""
The implementation for text, image, ntext, and binary.
These types do not support comparison operators.
"""
assert colsize is None or isinstance(colsize, int), colsize
assert colsize is None or (value is None or colsize >= len(value))
if colsize:
sql = "create table t1(s %s(%s))" % (sqltype, colsize)
else:
sql = "create table t1(s %s)" % sqltype
if resulttype is None:
resulttype = type(value)
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), resulttype)
if value is not None:
self.assertEqual(len(v), len(value))
# To allow buffer --> db --> bytearray tests, always convert the input to the expected result type before
# comparing.
if type(value) is not resulttype:
value = resulttype(value)
self.assertEqual(v, value)
#
# varchar
#
def test_varchar_null(self):
self._test_strtype('varchar', None, colsize=100)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varchar', value, colsize=len(value))
return t
for value in STR_FENCEPOSTS:
locals()['test_varchar_%s' % len(value)] = _maketest(value)
def test_varchar_many(self):
self.cursor.execute("create table t1(c1 varchar(300), c2 varchar(300), c3 varchar(300))")
v1 = 'ABCDEFGHIJ' * 30
v2 = '0123456789' * 30
v3 = '9876543210' * 30
self.cursor.execute("insert into t1(c1, c2, c3) values (?,?,?)", v1, v2, v3);
row = self.cursor.execute("select c1, c2, c3, len(c1) as l1, len(c2) as l2, len(c3) as l3 from t1").fetchone()
self.assertEqual(v1, row.c1)
self.assertEqual(v2, row.c2)
self.assertEqual(v3, row.c3)
#
# unicode
#
def test_unicode_null(self):
self._test_strtype('nvarchar', None, colsize=100)
# Generate a test for each fencepost size: test_unicode_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('nvarchar', value, colsize=len(value))
return t
for value in STR_FENCEPOSTS:
locals()['test_unicode_%s' % len(value)] = _maketest(value)
def test_unicode_longmax(self):
# Issue 188: Segfault when fetching NVARCHAR(MAX) data over 511 bytes
ver = self.get_sqlserver_version()
if ver < 9: # 2005+
return # so pass / ignore
self.cursor.execute("select cast(replicate(N'x', 512) as nvarchar(max))")
#
# binary
#
def test_binary_null(self):
self._test_strtype('varbinary', None, colsize=100)
# bytearray
def _maketest(value):
def t(self):
self._test_strtype('varbinary', bytearray(value), colsize=len(value))
return t
for value in BYTE_FENCEPOSTS:
locals()['test_binary_bytearray_%s' % len(value)] = _maketest(value)
# bytes
def _maketest(value):
def t(self):
self._test_strtype('varbinary', bytes(value), resulttype=bytearray, colsize=len(value))
return t
for value in BYTE_FENCEPOSTS:
locals()['test_binary_bytes_%s' % len(value)] = _maketest(value)
#
# image
#
def test_image_null(self):
self._test_strliketype('image', None)
# bytearray
def _maketest(value):
def t(self):
self._test_strliketype('image', bytearray(value))
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_image_bytearray_%s' % len(value)] = _maketest(value)
# bytes
def _maketest(value):
def t(self):
self._test_strliketype('image', bytes(value), resulttype=bytearray)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_image_bytes_%s' % len(value)] = _maketest(value)
#
# text
#
def test_null_text(self):
self._test_strliketype('text', None)
def _maketest(value):
def t(self):
self._test_strliketype('text', value)
return t
for value in STR_FENCEPOSTS:
locals()['test_text_%s' % len(value)] = _maketest(value)
#
# bit
#
def test_bit(self):
value = True
self.cursor.execute("create table t1(b bit)")
self.cursor.execute("insert into t1 values (?)", value)
v = self.cursor.execute("select b from t1").fetchone()[0]
self.assertEqual(type(v), bool)
self.assertEqual(v, value)
#
# decimal
#
def _decimal(self, precision, scale, negative):
# From test provided by planders (thanks!) in Issue 91
self.cursor.execute("create table t1(d decimal(%s, %s))" % (precision, scale))
# Construct a decimal that uses the maximum precision and scale.
decStr = '9' * (precision - scale)
if scale:
decStr = decStr + "." + '9' * scale
if negative:
decStr = "-" + decStr
value = Decimal(decStr)
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select d from t1").fetchone()[0]
self.assertEqual(v, value)
def _maketest(p, s, n):
def t(self):
self._decimal(p, s, n)
return t
for (p, s, n) in [ (1, 0, False),
(1, 0, True),
(6, 0, False),
(6, 2, False),
(6, 4, True),
(6, 6, True),
(38, 0, False),
(38, 10, False),
(38, 38, False),
(38, 0, True),
(38, 10, True),
(38, 38, True) ]:
locals()['test_decimal_%s_%s_%s' % (p, s, n and 'n' or 'p')] = _maketest(p, s, n)
def test_decimal_e(self):
"""Ensure exponential notation decimals are properly handled"""
value = Decimal((0, (1, 2, 3), 5)) # prints as 1.23E+7
self.cursor.execute("create table t1(d decimal(10, 2))")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(result, value)
def test_subquery_params(self):
"""Ensure parameter markers work in a subquery"""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
row = self.cursor.execute("""
select x.id
from (
select id
from t1
where s = ?
and id between ? and ?
) x
""", 'test', 1, 10).fetchone()
self.assertNotEqual(row, None)
self.assertEqual(row[0], 1)
def _exec(self):
self.cursor.execute(self.sql)
def test_close_cnxn(self):
"""Make sure using a Cursor after closing its connection doesn't crash."""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
self.cursor.execute("select * from t1")
self.cnxn.close()
# Now that the connection is closed, we expect an exception. (If the code attempts to use
# the HSTMT, we'll get an access violation instead.)
self.sql = "select * from t1"
self.assertRaises(pyodbc.ProgrammingError, self._exec)
def test_empty_string(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "")
def test_fixed_str(self):
value = "testing"
self.cursor.execute("create table t1(s char(7))")
self.cursor.execute("insert into t1 values(?)", "testing")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL
self.assertEqual(v, value)
def test_empty_unicode(self):
self.cursor.execute("create table t1(s nvarchar(20))")
self.cursor.execute("insert into t1 values(?)", "")
def test_negative_row_index(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "1")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(row[0], "1")
self.assertEquals(row[-1], "1")
def test_version(self):
self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc.
#
# date, time, datetime
#
def test_datetime(self):
value = datetime(2007, 1, 15, 3, 4, 5)
self.cursor.execute("create table t1(dt datetime)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(type(value), datetime)
self.assertEquals(value, result)
def test_datetime_fraction(self):
# SQL Server supports milliseconds, but Python's datetime supports nanoseconds, so the most granular datetime
# supported is xxx000.
value = datetime(2007, 1, 15, 3, 4, 5, 123000)
self.cursor.execute("create table t1(dt datetime)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(type(value), datetime)
self.assertEquals(result, value)
def test_datetime_fraction_rounded(self):
# SQL Server supports milliseconds, but Python's datetime supports nanoseconds. pyodbc rounds down to what the
# database supports.
full = datetime(2007, 1, 15, 3, 4, 5, 123456)
rounded = datetime(2007, 1, 15, 3, 4, 5, 123000)
self.cursor.execute("create table t1(dt datetime)")
self.cursor.execute("insert into t1 values (?)", full)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(type(result), datetime)
self.assertEquals(result, rounded)
def test_date(self):
ver = self.get_sqlserver_version()
if ver < 10: # 2008 only
return # so pass / ignore
value = date.today()
self.cursor.execute("create table t1(d date)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select d from t1").fetchone()[0]
self.assertEquals(type(value), date)
self.assertEquals(value, result)
def test_time(self):
ver = self.get_sqlserver_version()
if ver < 10: # 2008 only
return # so pass / ignore
value = datetime.now().time()
# We aren't yet writing values using the new extended time type so the value written to the database is only
# down to the second.
value = value.replace(microsecond=0)
self.cursor.execute("create table t1(t time)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select t from t1").fetchone()[0]
self.assertEquals(type(value), time)
self.assertEquals(value, result)
def test_datetime2(self):
value = datetime(2007, 1, 15, 3, 4, 5)
self.cursor.execute("create table t1(dt datetime2)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(type(value), datetime)
self.assertEquals(value, result)
#
# ints and floats
#
def test_int(self):
value = 1234
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_int(self):
value = -1
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_bigint(self):
input = 3000000000
self.cursor.execute("create table t1(d bigint)")
self.cursor.execute("insert into t1 values (?)", input)
result = self.cursor.execute("select d from t1").fetchone()[0]
self.assertEqual(result, input)
def test_float(self):
value = 1234.567
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_float(self):
value = -200
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
#
# stored procedures
#
# def test_callproc(self):
# "callproc with a simple input-only stored procedure"
# pass
def test_sp_results(self):
self.cursor.execute(
"""
Create procedure proc1
AS
select top 10 name, id, xtype, refdate
from sysobjects
""")
rows = self.cursor.execute("exec proc1").fetchall()
self.assertEquals(type(rows), list)
self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects
self.assertEquals(type(rows[0].refdate), datetime)
def test_sp_results_from_temp(self):
# Note: I've used "set nocount on" so that we don't get the number of rows deleted from #tmptable.
# If you don't do this, you'd need to call nextset() once to skip it.
self.cursor.execute(
"""
Create procedure proc1
AS
set nocount on
select top 10 name, id, xtype, refdate
into #tmptable
from sysobjects
select * from #tmptable
""")
self.cursor.execute("exec proc1")
self.assert_(self.cursor.description is not None)
self.assert_(len(self.cursor.description) == 4)
rows = self.cursor.fetchall()
self.assertEquals(type(rows), list)
self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects
self.assertEquals(type(rows[0].refdate), datetime)
def test_sp_results_from_vartbl(self):
self.cursor.execute(
"""
Create procedure proc1
AS
set nocount on
declare @tmptbl table(name varchar(100), id int, xtype varchar(4), refdate datetime)
insert into @tmptbl
select top 10 name, id, xtype, refdate
from sysobjects
select * from @tmptbl
""")
self.cursor.execute("exec proc1")
rows = self.cursor.fetchall()
self.assertEquals(type(rows), list)
self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects
self.assertEquals(type(rows[0].refdate), datetime)
def test_sp_with_dates(self):
# Reported in the forums that passing two datetimes to a stored procedure doesn't work.
self.cursor.execute(
"""
if exists (select * from dbo.sysobjects where id = object_id(N'[test_sp]') and OBJECTPROPERTY(id, N'IsProcedure') = 1)
drop procedure [dbo].[test_sp]
""")
self.cursor.execute(
"""
create procedure test_sp(@d1 datetime, @d2 datetime)
AS
declare @d as int
set @d = datediff(year, @d1, @d2)
select @d
""")
self.cursor.execute("exec test_sp ?, ?", datetime.now(), datetime.now())
rows = self.cursor.fetchall()
self.assert_(rows is not None)
self.assert_(rows[0][0] == 0) # 0 years apart
def test_sp_with_none(self):
# Reported in the forums that passing None caused an error.
self.cursor.execute(
"""
if exists (select * from dbo.sysobjects where id = object_id(N'[test_sp]') and OBJECTPROPERTY(id, N'IsProcedure') = 1)
drop procedure [dbo].[test_sp]
""")
self.cursor.execute(
"""
create procedure test_sp(@x varchar(20))
AS
declare @y varchar(20)
set @y = @x
select @y
""")
self.cursor.execute("exec test_sp ?", None)
rows = self.cursor.fetchall()
self.assert_(rows is not None)
self.assert_(rows[0][0] == None) # 0 years apart
#
# rowcount
#
def test_rowcount_delete(self):
self.assertEquals(self.cursor.rowcount, -1)
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, count)
def test_rowcount_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a
zero return value.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, 0)
def test_rowcount_select(self):
"""
Ensure Cursor.rowcount is set properly after a select statement.
pyodbc calls SQLRowCount after each execute and sets Cursor.rowcount, but SQL Server 2005 returns -1 after a
select statement, so we'll test for that behavior. This is valid behavior according to the DB API
specification, but people don't seem to like it.
"""
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("select * from t1")
self.assertEquals(self.cursor.rowcount, -1)
rows = self.cursor.fetchall()
self.assertEquals(len(rows), count)
self.assertEquals(self.cursor.rowcount, -1)
def test_rowcount_reset(self):
"Ensure rowcount is reset to -1"
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.assertEquals(self.cursor.rowcount, 1)
self.cursor.execute("create table t2(i int)")
self.assertEquals(self.cursor.rowcount, -1)
#
# always return Cursor
#
# In the 2.0.x branch, Cursor.execute sometimes returned the cursor and sometimes the rowcount. This proved very
# confusing when things went wrong and added very little value even when things went right since users could always
# use: cursor.execute("...").rowcount
def test_retcursor_delete(self):
self.cursor.execute("create table t1(i int)")
self.cursor.execute("insert into t1 values (1)")
v = self.cursor.execute("delete from t1")
self.assertEquals(v, self.cursor)
def test_retcursor_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
v = self.cursor.execute("delete from t1")
self.assertEquals(v, self.cursor)
def test_retcursor_select(self):
self.cursor.execute("create table t1(i int)")
self.cursor.execute("insert into t1 values (1)")
v = self.cursor.execute("select * from t1")
self.assertEquals(v, self.cursor)
#
# misc
#
def test_lower_case(self):
"Ensure pyodbc.lowercase forces returned column names to lowercase."
# Has to be set before creating the cursor, so we must recreate self.cursor.
pyodbc.lowercase = True
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(Abc int, dEf int)")
self.cursor.execute("select * from t1")
names = [ t[0] for t in self.cursor.description ]
names.sort()
self.assertEquals(names, [ "abc", "def" ])
# Put it back so other tests don't fail.
pyodbc.lowercase = False
def test_row_description(self):
"""
Ensure Cursor.description is accessible as Row.cursor_description.
"""
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(a int, b char(3))")
self.cnxn.commit()
self.cursor.execute("insert into t1 values(1, 'abc')")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(self.cursor.description, row.cursor_description)
def test_temp_select(self):
# A project was failing to create temporary tables via select into.
self.cursor.execute("create table t1(s char(7))")
self.cursor.execute("insert into t1 values(?)", "testing")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(v, "testing")
self.cursor.execute("select s into t2 from t1")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), str)
self.assertEqual(v, "testing")
def test_money(self):
d = Decimal('123456.78')
self.cursor.execute("create table t1(i int identity(1,1), m money)")
self.cursor.execute("insert into t1(m) values (?)", d)
v = self.cursor.execute("select m from t1").fetchone()[0]
self.assertEqual(v, d)
def test_executemany(self):
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (i, str(i)) for i in range(1, 6) ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
count = self.cursor.execute("select count(*) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_one(self):
"Pass executemany a single sequence"
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, "test") ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
count = self.cursor.execute("select count(*) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_failure(self):
"""
Ensure that an exception is raised if one query in an executemany fails.
"""
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, 'good'),
('error', 'not an int'),
(3, 'good') ]
self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params)
def test_row_slicing(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = row[:]
self.failUnless(result is row)
result = row[:-1]
self.assertEqual(result, (1,2,3))
result = row[0:4]
self.failUnless(result is row)
def test_row_repr(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = str(row)
self.assertEqual(result, "(1, 2, 3, 4)")
result = str(row[:-1])
self.assertEqual(result, "(1, 2, 3)")
result = str(row[:1])
self.assertEqual(result, "(1,)")
def test_concatenation(self):
v2 = '0123456789' * 30
v3 = '9876543210' * 30
self.cursor.execute("create table t1(c1 int identity(1, 1), c2 varchar(300), c3 varchar(300))")
self.cursor.execute("insert into t1(c2, c3) values (?,?)", v2, v3)
row = self.cursor.execute("select c2, c3, c2 + c3 as both from t1").fetchone()
self.assertEqual(row.both, v2 + v3)
def test_view_select(self):
# Reported in forum: Can't select from a view? I think I do this a lot, but another test never hurts.
# Create a table (t1) with 3 rows and a view (t2) into it.
self.cursor.execute("create table t1(c1 int identity(1, 1), c2 varchar(50))")
for i in range(3):
self.cursor.execute("insert into t1(c2) values (?)", "string%s" % i)
self.cursor.execute("create view t2 as select * from t1")
# Select from the view
self.cursor.execute("select * from t2")
rows = self.cursor.fetchall()
self.assert_(rows is not None)
self.assert_(len(rows) == 3)
def test_autocommit(self):
self.assertEqual(self.cnxn.autocommit, False)
othercnxn = pyodbc.connect(self.connection_string, autocommit=True)
self.assertEqual(othercnxn.autocommit, True)
othercnxn.autocommit = False
self.assertEqual(othercnxn.autocommit, False)
def test_unicode_results(self):
"Ensure unicode_results forces Unicode"
othercnxn = pyodbc.connect(self.connection_string, unicode_results=True)
othercursor = othercnxn.cursor()
# ANSI data in an ANSI column ...
othercursor.execute("create table t1(s varchar(20))")
othercursor.execute("insert into t1 values(?)", 'test')
# ... should be returned as Unicode
value = othercursor.execute("select s from t1").fetchone()[0]
self.assertEqual(value, 'test')
def test_sqlserver_callproc(self):
try:
self.cursor.execute("drop procedure pyodbctest")
self.cnxn.commit()
except:
pass
self.cursor.execute("create table t1(s varchar(10))")
self.cursor.execute("insert into t1 values(?)", "testing")
self.cursor.execute("""
create procedure pyodbctest @var1 varchar(32)
as
begin
select s
from t1
return
end
""")
self.cnxn.commit()
# for row in self.cursor.procedureColumns('pyodbctest'):
# print row.procedure_name, row.column_name, row.column_type, row.type_name
self.cursor.execute("exec pyodbctest 'hi'")
# print self.cursor.description
# for row in self.cursor:
# print row.s
def test_skip(self):
# Insert 1, 2, and 3. Fetch 1, skip 2, fetch 3.
self.cursor.execute("create table t1(id int)");
for i in range(1, 5):
self.cursor.execute("insert into t1 values(?)", i)
self.cursor.execute("select id from t1 order by id")
self.assertEqual(self.cursor.fetchone()[0], 1)
self.cursor.skip(2)
self.assertEqual(self.cursor.fetchone()[0], 4)
def test_timeout(self):
self.assertEqual(self.cnxn.timeout, 0) # defaults to zero (off)
self.cnxn.timeout = 30
self.assertEqual(self.cnxn.timeout, 30)
self.cnxn.timeout = 0
self.assertEqual(self.cnxn.timeout, 0)
def test_sets_execute(self):
# Only lists and tuples are allowed.
def f():
self.cursor.execute("create table t1 (word varchar (100))")
words = set (['a'])
self.cursor.execute("insert into t1 (word) VALUES (?)", [words])
self.assertRaises(pyodbc.ProgrammingError, f)
def test_sets_executemany(self):
# Only lists and tuples are allowed.
def f():
self.cursor.execute("create table t1 (word varchar (100))")
words = set (['a'])
self.cursor.executemany("insert into t1 (word) values (?)", [words])
self.assertRaises(TypeError, f)
def test_row_execute(self):
"Ensure we can use a Row object as a parameter to execute"
self.cursor.execute("create table t1(n int, s varchar(10))")
self.cursor.execute("insert into t1 values (1, 'a')")
row = self.cursor.execute("select n, s from t1").fetchone()
self.assertNotEqual(row, None)
self.cursor.execute("create table t2(n int, s varchar(10))")
self.cursor.execute("insert into t2 values (?, ?)", row)
def test_row_executemany(self):
"Ensure we can use a Row object as a parameter to executemany"
self.cursor.execute("create table t1(n int, s varchar(10))")
for i in range(3):
self.cursor.execute("insert into t1 values (?, ?)", i, chr(ord('a')+i))
rows = self.cursor.execute("select n, s from t1").fetchall()
self.assertNotEqual(len(rows), 0)
self.cursor.execute("create table t2(n int, s varchar(10))")
self.cursor.executemany("insert into t2 values (?, ?)", rows)
def test_description(self):
"Ensure cursor.description is correct"
self.cursor.execute("create table t1(n int, s varchar(8), d decimal(5,2))")
self.cursor.execute("insert into t1 values (1, 'abc', '1.23')")
self.cursor.execute("select * from t1")
# (I'm not sure the precision of an int is constant across different versions, bits, so I'm hand checking the
# items I do know.
# int
t = self.cursor.description[0]
self.assertEqual(t[0], 'n')
self.assertEqual(t[1], int)
self.assertEqual(t[5], 0) # scale
self.assertEqual(t[6], True) # nullable
# varchar(8)
t = self.cursor.description[1]
self.assertEqual(t[0], 's')
self.assertEqual(t[1], str)
self.assertEqual(t[4], 8) # precision
self.assertEqual(t[5], 0) # scale
self.assertEqual(t[6], True) # nullable
# decimal(5, 2)
t = self.cursor.description[2]
self.assertEqual(t[0], 'd')
self.assertEqual(t[1], Decimal)
self.assertEqual(t[4], 5) # precision
self.assertEqual(t[5], 2) # scale
self.assertEqual(t[6], True) # nullable
def test_none_param(self):
"Ensure None can be used for params other than the first"
# Some driver/db versions would fail if NULL was not the first parameter because SQLDescribeParam (only used
# with NULL) could not be used after the first call to SQLBindParameter. This means None always worked for the
# first column, but did not work for later columns.
#
# If SQLDescribeParam doesn't work, pyodbc would use VARCHAR which almost always worked. However,
# binary/varbinary won't allow an implicit conversion.
self.cursor.execute("create table t1(n int, blob varbinary(max))")
self.cursor.execute("insert into t1 values (1, newid())")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEqual(row.n, 1)
self.assertEqual(type(row.blob), bytearray)
self.cursor.execute("update t1 set n=?, blob=?", 2, None)
row = self.cursor.execute("select * from t1").fetchone()
self.assertEqual(row.n, 2)
self.assertEqual(row.blob, None)
def test_output_conversion(self):
def convert(value):
# `value` will be a string. We'll simply add an X at the beginning at the end.
return 'X' + value + 'X'
self.cnxn.add_output_converter(pyodbc.SQL_VARCHAR, convert)
self.cursor.execute("create table t1(n int, v varchar(10))")
self.cursor.execute("insert into t1 values (1, '123.45')")
value = self.cursor.execute("select v from t1").fetchone()[0]
self.assertEqual(value, 'X123.45X')
# Now clear the conversions and try again. There should be no Xs this time.
self.cnxn.clear_output_converters()
value = self.cursor.execute("select v from t1").fetchone()[0]
self.assertEqual(value, '123.45')
def test_too_large(self):
"""Ensure error raised if insert fails due to truncation"""
value = 'x' * 1000
self.cursor.execute("create table t1(s varchar(800))")
def test():
self.cursor.execute("insert into t1 values (?)", value)
self.assertRaises(pyodbc.DataError, test)
def test_geometry_null_insert(self):
def convert(value):
return value
self.cnxn.add_output_converter(-151, convert) # -151 is SQL Server's geometry
self.cursor.execute("create table t1(n int, v geometry)")
self.cursor.execute("insert into t1 values (?, ?)", 1, None)
value = self.cursor.execute("select v from t1").fetchone()[0]
self.assertEqual(value, None)
self.cnxn.clear_output_converters()
def test_login_timeout(self):
# This can only test setting since there isn't a way to cause it to block on the server side.
cnxns = pyodbc.connect(self.connection_string, timeout=2)
def test_row_equal(self):
self.cursor.execute("create table t1(n int, s varchar(20))")
self.cursor.execute("insert into t1 values (1, 'test')")
row1 = self.cursor.execute("select n, s from t1").fetchone()
row2 = self.cursor.execute("select n, s from t1").fetchone()
b = (row1 == row2)
self.assertEqual(b, True)
def test_row_gtlt(self):
self.cursor.execute("create table t1(n int, s varchar(20))")
self.cursor.execute("insert into t1 values (1, 'test1')")
self.cursor.execute("insert into t1 values (1, 'test2')")
rows = self.cursor.execute("select n, s from t1 order by s").fetchall()
self.assert_(rows[0] < rows[1])
self.assert_(rows[0] <= rows[1])
self.assert_(rows[1] > rows[0])
self.assert_(rows[1] >= rows[0])
self.assert_(rows[0] != rows[1])
rows = list(rows)
rows.sort() # uses <
def test_context_manager_success(self):
self.cursor.execute("create table t1(n int)")
self.cnxn.commit()
try:
with pyodbc.connect(self.connection_string) as cnxn:
cursor = cnxn.cursor()
cursor.execute("insert into t1 values (1)")
except Exception:
pass
cnxn = None
cursor = None
rows = self.cursor.execute("select n from t1").fetchall()
self.assertEquals(len(rows), 1)
self.assertEquals(rows[0][0], 1)
def test_untyped_none(self):
# From issue 129
value = self.cursor.execute("select ?", None).fetchone()[0]
self.assertEqual(value, None)
def test_large_update_nodata(self):
self.cursor.execute('create table t1(a varbinary(max))')
hundredkb = b'x'*100*1024
self.cursor.execute('update t1 set a=? where 1=0', (hundredkb,))
def test_func_param(self):
self.cursor.execute('''
create function func1 (@testparam varchar(4))
returns @rettest table (param varchar(4))
as
begin
insert @rettest
select @testparam
return
end
''')
self.cnxn.commit()
value = self.cursor.execute("select * from func1(?)", 'test').fetchone()[0]
self.assertEquals(value, 'test')
def test_no_fetch(self):
# Issue 89 with FreeTDS: Multiple selects (or catalog functions that issue selects) without fetches seem to
# confuse the driver.
self.cursor.execute('select 1')
self.cursor.execute('select 1')
self.cursor.execute('select 1')
def test_drivers(self):
drivers = pyodbc.drivers()
self.assertEqual(list, type(drivers))
self.assert_(len(drivers) > 1)
m = re.search('DRIVER={([^}]+)}', self.connection_string, re.IGNORECASE)
current = m.group(1)
self.assert_(current in drivers)
def main():
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="count", default=0, help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('sqlservertests')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
cnxn = pyodbc.connect(connection_string)
print_library_info(cnxn)
cnxn.close()
suite = load_tests(SqlServerTestCase, options.test, connection_string)
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(suite)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
#!/usr/bin/python
usage="""\
usage: %prog [options] filename
Unit tests for Microsoft Access
These run using the version from the 'build' directory, not the version
installed into the Python directories. You must run python setup.py build
before running the tests.
To run, pass the filename of an Access database on the command line:
accesstests test.accdb
An empty Access 2000 database (empty.mdb) and an empty Access 2007 database
(empty.accdb), are provided.
To run a single test, use the -t option:
accesstests test.accdb -t unicode_null
If you want to report an error, it would be helpful to include the driver information
by using the verbose flag and redirecting the output to a file:
accesstests test.accdb -v >& results.txt
You can pass the verbose flag twice for more verbose output:
accesstests test.accdb -vv
"""
# Access SQL data types: http://msdn2.microsoft.com/en-us/library/bb208866.aspx
import sys, os, re
import unittest
from decimal import Decimal
from datetime import datetime, date, time
from os.path import abspath
from testutils import *
CNXNSTRING = None
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of composed of `seed` to make a string `length` characters long.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class AccessTestCase(unittest.TestCase):
SMALL_FENCEPOST_SIZES = [ 0, 1, 254, 255 ] # text fields <= 255
LARGE_FENCEPOST_SIZES = [ 256, 270, 304, 508, 510, 511, 512, 1023, 1024, 2047, 2048, 4000, 4095, 4096, 4097, 10 * 1024, 20 * 1024 ]
ANSI_FENCEPOSTS = [ _generate_test_string(size) for size in SMALL_FENCEPOST_SIZES ]
UNICODE_FENCEPOSTS = [ unicode(s) for s in ANSI_FENCEPOSTS ]
IMAGE_FENCEPOSTS = ANSI_FENCEPOSTS + [ _generate_test_string(size) for size in LARGE_FENCEPOST_SIZES ]
def __init__(self, method_name):
unittest.TestCase.__init__(self, method_name)
def setUp(self):
self.cnxn = pyodbc.connect(CNXNSTRING)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_multiple_bindings(self):
"More than one bind and select on a cursor"
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t1 values (?)", 2)
self.cursor.execute("insert into t1 values (?)", 3)
for i in range(3):
self.cursor.execute("select n from t1 where n < ?", 10)
self.cursor.execute("select n from t1 where n < 3")
def test_different_bindings(self):
self.cursor.execute("create table t1(n int)")
self.cursor.execute("create table t2(d datetime)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t2 values (?)", datetime.now())
def test_datasources(self):
p = pyodbc.dataSources()
self.assert_(isinstance(p, dict))
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def _test_strtype(self, sqltype, value, colsize=None):
"""
The implementation for string, Unicode, and binary tests.
"""
assert colsize is None or (value is None or colsize >= len(value)), 'colsize=%s value=%s' % (colsize, (value is None) and 'none' or len(value))
if colsize:
sql = "create table t1(n1 int not null, s1 %s(%s), s2 %s(%s))" % (sqltype, colsize, sqltype, colsize)
else:
sql = "create table t1(n1 int not null, s1 %s, s2 %s)" % (sqltype, sqltype)
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(1, ?, ?)", (value, value))
row = self.cursor.execute("select s1, s2 from t1").fetchone()
# Access only uses Unicode, but strings might have been passed in to see if they can be written. When we read
# them back, they'll be unicode, so compare our results to a Unicode version of `value`.
if type(value) is str:
value = unicode(value)
for i in range(2):
v = row[i]
self.assertEqual(type(v), type(value))
if value is not None:
self.assertEqual(len(v), len(value))
self.assertEqual(v, value)
#
# unicode
#
def test_unicode_null(self):
self._test_strtype('varchar', None, 255)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varchar', value, len(value))
t.__doc__ = 'unicode %s' % len(value)
return t
for value in UNICODE_FENCEPOSTS:
locals()['test_unicode_%s' % len(value)] = _maketest(value)
#
# ansi -> varchar
#
# Access only stores Unicode text but it should accept ASCII text.
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varchar', value, len(value))
t.__doc__ = 'ansi %s' % len(value)
return t
for value in ANSI_FENCEPOSTS:
locals()['test_ansivarchar_%s' % len(value)] = _maketest(value)
#
# binary
#
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varbinary', buffer(value), len(value))
t.__doc__ = 'binary %s' % len(value)
return t
for value in ANSI_FENCEPOSTS:
locals()['test_binary_%s' % len(value)] = _maketest(value)
#
# image
#
def test_null_image(self):
self._test_strtype('image', None)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('image', buffer(value))
t.__doc__ = 'image %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_image_%s' % len(value)] = _maketest(value)
#
# memo
#
def test_null_memo(self):
self._test_strtype('memo', None)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('memo', unicode(value))
t.__doc__ = 'Unicode to memo %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_memo_%s' % len(value)] = _maketest(value)
# ansi -> memo
def _maketest(value):
def t(self):
self._test_strtype('memo', value)
t.__doc__ = 'ANSI to memo %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_ansimemo_%s' % len(value)] = _maketest(value)
def test_subquery_params(self):
"""Ensure parameter markers work in a subquery"""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
row = self.cursor.execute("""
select x.id
from (
select id
from t1
where s = ?
and id between ? and ?
) x
""", 'test', 1, 10).fetchone()
self.assertNotEqual(row, None)
self.assertEqual(row[0], 1)
def _exec(self):
self.cursor.execute(self.sql)
def test_close_cnxn(self):
"""Make sure using a Cursor after closing its connection doesn't crash."""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
self.cursor.execute("select * from t1")
self.cnxn.close()
# Now that the connection is closed, we expect an exception. (If the code attempts to use
# the HSTMT, we'll get an access violation instead.)
self.sql = "select * from t1"
self.assertRaises(pyodbc.ProgrammingError, self._exec)
def test_unicode_query(self):
self.cursor.execute(u"select 1")
def test_negative_row_index(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "1")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(row[0], "1")
self.assertEquals(row[-1], "1")
def test_version(self):
self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc.
#
# date, time, datetime
#
def test_datetime(self):
value = datetime(2007, 1, 15, 3, 4, 5)
self.cursor.execute("create table t1(dt datetime)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(value, result)
#
# ints and floats
#
def test_int(self):
value = 1234
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_int(self):
value = -1
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_smallint(self):
value = 32767
self.cursor.execute("create table t1(n smallint)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_real(self):
value = 1234.5
self.cursor.execute("create table t1(n real)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_real(self):
value = -200.5
self.cursor.execute("create table t1(n real)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def test_float(self):
value = 1234.567
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_float(self):
value = -200.5
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def test_tinyint(self):
self.cursor.execute("create table t1(n tinyint)")
value = 10
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(result), type(value))
self.assertEqual(value, result)
#
# decimal & money
#
def test_decimal(self):
value = Decimal('12345.6789')
self.cursor.execute("create table t1(n numeric(10,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_money(self):
self.cursor.execute("create table t1(n money)")
value = Decimal('1234.45')
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(result), type(value))
self.assertEqual(value, result)
def test_negative_decimal_scale(self):
value = Decimal('-10.0010')
self.cursor.execute("create table t1(d numeric(19,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
#
# bit
#
def test_bit(self):
self.cursor.execute("create table t1(b bit)")
value = True
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select b from t1").fetchone()[0]
self.assertEqual(type(result), bool)
self.assertEqual(value, result)
def test_bit_null(self):
self.cursor.execute("create table t1(b bit)")
value = None
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select b from t1").fetchone()[0]
self.assertEqual(type(result), bool)
self.assertEqual(False, result)
def test_guid(self):
# REVIEW: Python doesn't (yet) have a UUID type so the value is returned as a string. Access, however, only
# really supports Unicode. For now, we'll have to live with this difference. All strings in Python 3.x will
# be Unicode -- pyodbc 3.x will have different defaults.
value = "de2ac9c6-8676-4b0b-b8a6-217a8580cbee"
self.cursor.execute("create table t1(g1 uniqueidentifier)")
self.cursor.execute("insert into t1 values (?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), type(value))
self.assertEqual(len(v), len(value))
#
# rowcount
#
def test_rowcount_delete(self):
self.assertEquals(self.cursor.rowcount, -1)
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, count)
def test_rowcount_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a
zero return value.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, 0)
def test_rowcount_select(self):
"""
Ensure Cursor.rowcount is set properly after a select statement.
pyodbc calls SQLRowCount after each execute and sets Cursor.rowcount, but SQL Server 2005 returns -1 after a
select statement, so we'll test for that behavior. This is valid behavior according to the DB API
specification, but people don't seem to like it.
"""
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("select * from t1")
self.assertEquals(self.cursor.rowcount, -1)
rows = self.cursor.fetchall()
self.assertEquals(len(rows), count)
self.assertEquals(self.cursor.rowcount, -1)
def test_rowcount_reset(self):
"Ensure rowcount is reset to -1"
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.assertEquals(self.cursor.rowcount, 1)
self.cursor.execute("create table t2(i int)")
self.assertEquals(self.cursor.rowcount, -1)
#
# Misc
#
def test_lower_case(self):
"Ensure pyodbc.lowercase forces returned column names to lowercase."
# Has to be set before creating the cursor, so we must recreate self.cursor.
pyodbc.lowercase = True
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(Abc int, dEf int)")
self.cursor.execute("select * from t1")
names = [ t[0] for t in self.cursor.description ]
names.sort()
self.assertEquals(names, [ "abc", "def" ])
# Put it back so other tests don't fail.
pyodbc.lowercase = False
def test_row_description(self):
"""
Ensure Cursor.description is accessible as Row.cursor_description.
"""
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(a int, b char(3))")
self.cnxn.commit()
self.cursor.execute("insert into t1 values(1, 'abc')")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(self.cursor.description, row.cursor_description)
def test_executemany(self):
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (i, str(i)) for i in range(1, 6) ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
count = self.cursor.execute("select count(*) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_failure(self):
"""
Ensure that an exception is raised if one query in an executemany fails.
"""
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, 'good'),
('error', 'not an int'),
(3, 'good') ]
self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params)
def test_row_slicing(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = row[:]
self.failUnless(result is row)
result = row[:-1]
self.assertEqual(result, (1,2,3))
result = row[0:4]
self.failUnless(result is row)
def test_row_repr(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = str(row)
self.assertEqual(result, "(1, 2, 3, 4)")
result = str(row[:-1])
self.assertEqual(result, "(1, 2, 3)")
result = str(row[:1])
self.assertEqual(result, "(1,)")
def test_concatenation(self):
v2 = u'0123456789' * 25
v3 = u'9876543210' * 25
value = v2 + 'x' + v3
self.cursor.execute("create table t1(c2 varchar(250), c3 varchar(250))")
self.cursor.execute("insert into t1(c2, c3) values (?,?)", v2, v3)
row = self.cursor.execute("select c2 + 'x' + c3 from t1").fetchone()
self.assertEqual(row[0], value)
def test_autocommit(self):
self.assertEqual(self.cnxn.autocommit, False)
othercnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
self.assertEqual(othercnxn.autocommit, True)
othercnxn.autocommit = False
self.assertEqual(othercnxn.autocommit, False)
def main():
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('dbfile argument required')
if args[0].endswith('.accdb'):
driver = 'Microsoft Access Driver (*.mdb, *.accdb)'
else:
driver = 'Microsoft Access Driver (*.mdb)'
global CNXNSTRING
CNXNSTRING = 'DRIVER={%s};DBQ=%s;ExtendedAnsiSQL=1' % (driver, abspath(args[0]))
cnxn = pyodbc.connect(CNXNSTRING)
print_library_info(cnxn)
cnxn.close()
suite = load_tests(AccessTestCase, options.test)
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(suite)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
import os, sys, platform
from os.path import join, dirname, abspath, basename
import unittest
def add_to_path():
"""
Prepends the build directory to the path so that newly built pyodbc libraries are used, allowing it to be tested
without installing it.
"""
# Put the build directory into the Python path so we pick up the version we just built.
#
# To make this cross platform, we'll search the directories until we find the .pyd file.
import imp
library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ]
library_names = [ 'pyodbc%s' % ext for ext in library_exts ]
# Only go into directories that match our version number.
dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1])
build = join(dirname(dirname(abspath(__file__))), 'build')
for root, dirs, files in os.walk(build):
for d in dirs[:]:
if not d.endswith(dir_suffix):
dirs.remove(d)
for name in library_names:
if name in files:
sys.path.insert(0, root)
return
print >>sys.stderr, 'Did not find the pyodbc library in the build directory. Will use an installed version.'
def print_library_info(cnxn):
import pyodbc
print 'python: %s' % sys.version
print 'pyodbc: %s %s' % (pyodbc.version, os.path.abspath(pyodbc.__file__))
print 'odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER)
print 'driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER))
print ' supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER)
print 'os: %s' % platform.system()
print 'unicode: Py_Unicode=%s SQLWCHAR=%s' % (pyodbc.UNICODE_SIZE, pyodbc.SQLWCHAR_SIZE)
if platform.system() == 'Windows':
print ' %s' % ' '.join([s for s in platform.win32_ver() if s])
def load_tests(testclass, name, *args):
"""
Returns a TestSuite for tests in `testclass`.
name
Optional test name if you only want to run 1 test. If not provided all tests in `testclass` will be loaded.
args
Arguments for the test class constructor. These will be passed after the test method name.
"""
if name:
if not name.startswith('test_'):
name = 'test_%s' % name
names = [ name ]
else:
names = [ method for method in dir(testclass) if method.startswith('test_') ]
return unittest.TestSuite([ testclass(name, *args) for name in names ])
def load_setup_connection_string(section):
"""
Attempts to read the default connection string from the setup.cfg file.
If the file does not exist or if it exists but does not contain the connection string, None is returned. If the
file exists but cannot be parsed, an exception is raised.
"""
from os.path import exists, join, dirname, splitext, basename
from ConfigParser import SafeConfigParser
FILENAME = 'setup.cfg'
KEY = 'connection-string'
path = join(dirname(dirname(abspath(__file__))), 'tmp', FILENAME)
if exists(path):
try:
p = SafeConfigParser()
p.read(path)
except:
raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1]))
if p.has_option(section, KEY):
return p.get(section, KEY)
return None
| Python |
#!/usr/bin/python
# Unit tests for PostgreSQL on Linux (Fedora)
# This is a stripped down copy of the SQL Server tests.
import sys, os, re
import unittest
from decimal import Decimal
from testutils import *
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of composed of `seed` to make a string `length` characters long.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class PGTestCase(unittest.TestCase):
# These are from the C++ code. Keep them up to date.
# If we are reading a binary, string, or unicode value and do not know how large it is, we'll try reading 2K into a
# buffer on the stack. We then copy into a new Python object.
SMALL_READ = 2048
# A read guaranteed not to fit in the MAX_STACK_STACK stack buffer, but small enough to be used for varchar (4K max).
LARGE_READ = 4000
SMALL_STRING = _generate_test_string(SMALL_READ)
LARGE_STRING = _generate_test_string(LARGE_READ)
def __init__(self, connection_string, ansi, unicode_results, method_name):
unittest.TestCase.__init__(self, method_name)
self.connection_string = connection_string
self.ansi = ansi
self.unicode = unicode_results
def setUp(self):
self.cnxn = pyodbc.connect(self.connection_string, ansi=self.ansi)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_datasources(self):
p = pyodbc.dataSources()
self.assert_(isinstance(p, dict))
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def test_negative_float(self):
value = -200
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def _test_strtype(self, sqltype, value, colsize=None):
"""
The implementation for string, Unicode, and binary tests.
"""
assert colsize is None or (value is None or colsize >= len(value))
if colsize:
sql = "create table t1(s %s(%s))" % (sqltype, colsize)
else:
sql = "create table t1(s %s)" % sqltype
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(?)", value)
result = self.cursor.execute("select * from t1").fetchone()[0]
if self.unicode and value != None:
self.assertEqual(type(result), unicode)
else:
self.assertEqual(type(result), type(value))
if value is not None:
self.assertEqual(len(result), len(value))
self.assertEqual(result, value)
#
# varchar
#
def test_empty_varchar(self):
self._test_strtype('varchar', '', self.SMALL_READ)
def test_null_varchar(self):
self._test_strtype('varchar', None, self.SMALL_READ)
def test_large_null_varchar(self):
# There should not be a difference, but why not find out?
self._test_strtype('varchar', None, self.LARGE_READ)
def test_small_varchar(self):
self._test_strtype('varchar', self.SMALL_STRING, self.SMALL_READ)
def test_large_varchar(self):
self._test_strtype('varchar', self.LARGE_STRING, self.LARGE_READ)
def test_varchar_many(self):
self.cursor.execute("create table t1(c1 varchar(300), c2 varchar(300), c3 varchar(300))")
v1 = 'ABCDEFGHIJ' * 30
v2 = '0123456789' * 30
v3 = '9876543210' * 30
self.cursor.execute("insert into t1(c1, c2, c3) values (?,?,?)", v1, v2, v3);
row = self.cursor.execute("select c1, c2, c3 from t1").fetchone()
self.assertEqual(v1, row.c1)
self.assertEqual(v2, row.c2)
self.assertEqual(v3, row.c3)
def test_small_decimal(self):
# value = Decimal('1234567890987654321')
value = Decimal('100010') # (I use this because the ODBC docs tell us how the bytes should look in the C struct)
self.cursor.execute("create table t1(d numeric(19))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_small_decimal_scale(self):
# The same as small_decimal, except with a different scale. This value exactly matches the ODBC documentation
# example in the C Data Types appendix.
value = '1000.10'
value = Decimal(value)
self.cursor.execute("create table t1(d numeric(20,6))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_negative_decimal_scale(self):
value = Decimal('-10.0010')
self.cursor.execute("create table t1(d numeric(19,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def _exec(self):
self.cursor.execute(self.sql)
def test_close_cnxn(self):
"""Make sure using a Cursor after closing its connection doesn't crash."""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
self.cursor.execute("select * from t1")
self.cnxn.close()
# Now that the connection is closed, we expect an exception. (If the code attempts to use
# the HSTMT, we'll get an access violation instead.)
self.sql = "select * from t1"
self.assertRaises(pyodbc.ProgrammingError, self._exec)
def test_empty_string(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "")
def test_fixed_str(self):
value = "testing"
self.cursor.execute("create table t1(s char(7))")
self.cursor.execute("insert into t1 values(?)", "testing")
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), self.unicode and unicode or str)
self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL
self.assertEqual(v, value)
def test_negative_row_index(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "1")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(row[0], "1")
self.assertEquals(row[-1], "1")
def test_version(self):
self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc.
def test_rowcount_delete(self):
self.assertEquals(self.cursor.rowcount, -1)
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, count)
def test_rowcount_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a
zero return value.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, 0)
def test_rowcount_select(self):
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("select * from t1")
self.assertEquals(self.cursor.rowcount, 4)
# PostgreSQL driver fails here?
# def test_rowcount_reset(self):
# "Ensure rowcount is reset to -1"
#
# self.cursor.execute("create table t1(i int)")
# count = 4
# for i in range(count):
# self.cursor.execute("insert into t1 values (?)", i)
# self.assertEquals(self.cursor.rowcount, 1)
#
# self.cursor.execute("create table t2(i int)")
# self.assertEquals(self.cursor.rowcount, -1)
def test_lower_case(self):
"Ensure pyodbc.lowercase forces returned column names to lowercase."
# Has to be set before creating the cursor, so we must recreate self.cursor.
pyodbc.lowercase = True
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(Abc int, dEf int)")
self.cursor.execute("select * from t1")
names = [ t[0] for t in self.cursor.description ]
names.sort()
self.assertEquals(names, [ "abc", "def" ])
# Put it back so other tests don't fail.
pyodbc.lowercase = False
def test_row_description(self):
"""
Ensure Cursor.description is accessible as Row.cursor_description.
"""
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(a int, b char(3))")
self.cnxn.commit()
self.cursor.execute("insert into t1 values(1, 'abc')")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(self.cursor.description, row.cursor_description)
def test_executemany(self):
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (i, str(i)) for i in range(1, 6) ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
# REVIEW: Without the cast, we get the following error:
# [07006] [unixODBC]Received an unsupported type from Postgres.;\nERROR: table "t2" does not exist (14)
count = self.cursor.execute("select cast(count(*) as int) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_failure(self):
"""
Ensure that an exception is raised if one query in an executemany fails.
"""
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, 'good'),
('error', 'not an int'),
(3, 'good') ]
self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params)
def test_executemany_generator(self):
self.cursor.execute("create table t1(a int)")
self.cursor.executemany("insert into t1(a) values (?)", ((i,) for i in range(4)))
row = self.cursor.execute("select min(a) mina, max(a) maxa from t1").fetchone()
self.assertEqual(row.mina, 0)
self.assertEqual(row.maxa, 3)
def test_executemany_iterator(self):
self.cursor.execute("create table t1(a int)")
values = [ (i,) for i in range(4) ]
self.cursor.executemany("insert into t1(a) values (?)", iter(values))
row = self.cursor.execute("select min(a) mina, max(a) maxa from t1").fetchone()
self.assertEqual(row.mina, 0)
self.assertEqual(row.maxa, 3)
def test_row_slicing(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = row[:]
self.failUnless(result is row)
result = row[:-1]
self.assertEqual(result, (1,2,3))
result = row[0:4]
self.failUnless(result is row)
def test_row_repr(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = str(row)
self.assertEqual(result, "(1, 2, 3, 4)")
result = str(row[:-1])
self.assertEqual(result, "(1, 2, 3)")
result = str(row[:1])
self.assertEqual(result, "(1,)")
def test_pickling(self):
row = self.cursor.execute("select 1 a, 'two' b").fetchone()
import pickle
s = pickle.dumps(row)
other = pickle.loads(s)
self.assertEqual(row, other)
def test_int_limits(self):
values = [ (-sys.maxint - 1), -1, 0, 1, 3230392212, sys.maxint ]
self.cursor.execute("create table t1(a bigint)")
for value in values:
self.cursor.execute("delete from t1")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select a from t1").fetchone()[0]
self.assertEqual(v, value)
def main():
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] connection_string")
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
parser.add_option('-a', '--ansi', help='ANSI only', default=False, action='store_true')
parser.add_option('-u', '--unicode', help='Expect results in Unicode', default=False, action='store_true')
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('pgtests')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
if options.verbose:
cnxn = pyodbc.connect(connection_string, ansi=options.ansi)
print 'library:', os.path.abspath(pyodbc.__file__)
print 'odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER)
print 'driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER))
print 'driver supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER)
print 'unicode:', pyodbc.UNICODE_SIZE, 'sqlwchar:', pyodbc.SQLWCHAR_SIZE
cnxn.close()
if options.test:
# Run a single test
if not options.test.startswith('test_'):
options.test = 'test_%s' % (options.test)
s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, options.unicode, options.test) ])
else:
# Run all tests in the class
methods = [ m for m in dir(PGTestCase) if m.startswith('test_') ]
methods.sort()
s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, options.unicode, m) for m in methods ])
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(s)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
#!/usr/bin/python
# Tests for reading from Excel files.
#
# I have not been able to successfully create or modify Excel files.
import sys, os, re
import unittest
from os.path import abspath
from testutils import *
CNXNSTRING = None
class ExcelTestCase(unittest.TestCase):
def __init__(self, method_name):
unittest.TestCase.__init__(self, method_name)
def setUp(self):
self.cnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def test_read_sheet(self):
# The first method of reading data is to access worksheets by name in this format [name$].
#
# Our second sheet is named Sheet2 and has two columns. The first has values 10, 20, 30, etc.
rows = self.cursor.execute("select * from [Sheet2$]").fetchall()
self.assertEquals(len(rows), 5)
for index, row in enumerate(rows):
self.assertEquals(row.s2num, float(index + 1) * 10)
def test_read_range(self):
# The second method of reading data is to assign a name to a range of cells and access that as a table.
#
# Our first worksheet has a section named Table1. The first column has values 1, 2, 3, etc.
rows = self.cursor.execute("select * from Table1").fetchall()
self.assertEquals(len(rows), 10)
for index, row in enumerate(rows):
self.assertEquals(row.num, float(index + 1))
self.assertEquals(row.val, chr(ord('a') + index))
def test_tables(self):
# This is useful for figuring out what is available
tables = [ row.table_name for row in self.cursor.tables() ]
assert 'Sheet2$' in tables, 'tables: %s' % ' '.join(tables)
# def test_append(self):
# rows = self.cursor.execute("select s2num, s2val from [Sheet2$]").fetchall()
#
# print rows
#
# nextnum = max([ row.s2num for row in rows ]) + 10
#
# self.cursor.execute("insert into [Sheet2$](s2num, s2val) values (?, 'z')", nextnum)
#
# row = self.cursor.execute("select s2num, s2val from [Sheet2$] where s2num=?", nextnum).fetchone()
# self.assertTrue(row)
#
# print 'added:', nextnum, len(rows), 'rows'
#
# self.assertEquals(row.s2num, nextnum)
# self.assertEquals(row.s2val, 'z')
#
# self.cnxn.commit()
def main():
from optparse import OptionParser
parser = OptionParser() #usage=usage)
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
(options, args) = parser.parse_args()
if args:
parser.error('no arguments expected')
global CNXNSTRING
path = dirname(abspath(__file__))
filename = join(path, 'test.xls')
assert os.path.exists(filename)
CNXNSTRING = 'Driver={Microsoft Excel Driver (*.xls)};DBQ=%s;READONLY=FALSE' % filename
cnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
print_library_info(cnxn)
cnxn.close()
suite = load_tests(ExcelTestCase, options.test)
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(suite)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
#!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__rcs_id__ = '$Id: dbapi20.py,v 1.10 2003/10/09 03:14:14 zenzen Exp $'
__version__ = '$Revision: 1.10 $'[11:-2]
__author__ = 'Stuart Bishop <zen@shangri-la.dropbear.id.au>'
import unittest
import time
# $Log: dbapi20.py,v $
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception heirarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propogates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
con = self._connect()
try:
cur = con.cursor()
for i, ddl in enumerate((self.xddl1,self.xddl2)):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
self.failUnless(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.failUnless(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
self.failUnless(con.Warning is drv.Warning)
self.failUnless(con.Error is drv.Error)
self.failUnless(con.InterfaceError is drv.InterfaceError)
self.failUnless(con.DatabaseError is drv.DatabaseError)
self.failUnless(con.OperationalError is drv.OperationalError)
self.failUnless(con.IntegrityError is drv.IntegrityError)
self.failUnless(con.InternalError is drv.InternalError)
self.failUnless(con.ProgrammingError is drv.ProgrammingError)
self.failUnless(con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount should be -1 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL1(cur)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
'insert into %sbooze values (?)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
'insert into %sbooze values (:1)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
'insert into %sbooze values (:beer)' % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
'insert into %sbooze values (%%s)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
'insert into %sbooze values (%%(beer)s)' % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
self.failUnless(cur.rowcount in (-1,1))
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
self.failUnless(cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
self.failUnless(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
self.failUnless(cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
self.failUnless(cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
self.failUnless(cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
self.failUnless(cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError,'Helper not implemented'
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError,'Helper not implemented'
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError,'Drivers need to override this test'
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
self.failUnless(hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependant
raise NotImplementedError,'Driver need to override this test'
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary('Something')
b = self.driver.Binary('')
def test_STRING(self):
self.failUnless(hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
self.failUnless(hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
self.failUnless(hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
self.failUnless(hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
self.failUnless(hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
| Python |
#!/usr/bin/env python
from testutils import *
add_to_path()
import pyodbc
def main():
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('test')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
cnxn = pyodbc.connect(connection_string)
if options.verbose:
print_library_info(cnxn)
if __name__ == '__main__':
main()
| Python |
import unittest
from testutils import *
import dbapi20
def main():
add_to_path()
import pyodbc
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] connection_string")
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('Only one argument is allowed. Do you need quotes around the connection string?')
if not args:
connection_string = load_setup_connection_string('dbapitests')
if not connection_string:
parser.print_help()
raise SystemExit()
else:
connection_string = args[0]
class test_pyodbc(dbapi20.DatabaseAPI20Test):
driver = pyodbc
connect_args = [ connection_string ]
connect_kw_args = {}
def test_nextset(self): pass
def test_setoutputsize(self): pass
def test_ExceptionsAsConnectionAttributes(self): pass
suite = unittest.makeSuite(test_pyodbc, 'test')
testRunner = unittest.TextTestRunner(verbosity=(options.verbose > 1) and 9 or 0)
result = testRunner.run(suite)
if __name__ == '__main__':
main()
| Python |
import unittest
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class TestBase(unittest.TestCase):
| Python |
#!/usr/bin/python
usage="""\
usage: %prog [options] filename
Unit tests for Microsoft Access
These run using the version from the 'build' directory, not the version
installed into the Python directories. You must run python setup.py build
before running the tests.
To run, pass the filename of an Access database on the command line:
accesstests test.accdb
An empty Access 2000 database (empty.mdb) and an empty Access 2007 database
(empty.accdb), are provided.
To run a single test, use the -t option:
accesstests test.accdb -t unicode_null
If you want to report an error, it would be helpful to include the driver information
by using the verbose flag and redirecting the output to a file:
accesstests test.accdb -v >& results.txt
You can pass the verbose flag twice for more verbose output:
accesstests test.accdb -vv
"""
# Access SQL data types: http://msdn2.microsoft.com/en-us/library/bb208866.aspx
import sys, os, re
import unittest
from decimal import Decimal
from datetime import datetime, date, time
from os.path import abspath
from testutils import *
CNXNSTRING = None
_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'
def _generate_test_string(length):
"""
Returns a string of composed of `seed` to make a string `length` characters long.
To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are
tested with 3 lengths. This function helps us generate the test data.
We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will
be hidden and to help us manually identify where a break occurs.
"""
if length <= len(_TESTSTR):
return _TESTSTR[:length]
c = (length + len(_TESTSTR)-1) / len(_TESTSTR)
v = _TESTSTR * c
return v[:length]
class AccessTestCase(unittest.TestCase):
SMALL_FENCEPOST_SIZES = [ 0, 1, 254, 255 ] # text fields <= 255
LARGE_FENCEPOST_SIZES = [ 256, 270, 304, 508, 510, 511, 512, 1023, 1024, 2047, 2048, 4000, 4095, 4096, 4097, 10 * 1024, 20 * 1024 ]
ANSI_FENCEPOSTS = [ _generate_test_string(size) for size in SMALL_FENCEPOST_SIZES ]
UNICODE_FENCEPOSTS = [ unicode(s) for s in ANSI_FENCEPOSTS ]
IMAGE_FENCEPOSTS = ANSI_FENCEPOSTS + [ _generate_test_string(size) for size in LARGE_FENCEPOST_SIZES ]
def __init__(self, method_name):
unittest.TestCase.__init__(self, method_name)
def setUp(self):
self.cnxn = pyodbc.connect(CNXNSTRING)
self.cursor = self.cnxn.cursor()
for i in range(3):
try:
self.cursor.execute("drop table t%d" % i)
self.cnxn.commit()
except:
pass
self.cnxn.rollback()
def tearDown(self):
try:
self.cursor.close()
self.cnxn.close()
except:
# If we've already closed the cursor or connection, exceptions are thrown.
pass
def test_multiple_bindings(self):
"More than one bind and select on a cursor"
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t1 values (?)", 2)
self.cursor.execute("insert into t1 values (?)", 3)
for i in range(3):
self.cursor.execute("select n from t1 where n < ?", 10)
self.cursor.execute("select n from t1 where n < 3")
def test_different_bindings(self):
self.cursor.execute("create table t1(n int)")
self.cursor.execute("create table t2(d datetime)")
self.cursor.execute("insert into t1 values (?)", 1)
self.cursor.execute("insert into t2 values (?)", datetime.now())
def test_datasources(self):
p = pyodbc.dataSources()
self.assert_(isinstance(p, dict))
def test_getinfo_string(self):
value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)
self.assert_(isinstance(value, str))
def test_getinfo_bool(self):
value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)
self.assert_(isinstance(value, bool))
def test_getinfo_int(self):
value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)
self.assert_(isinstance(value, (int, long)))
def test_getinfo_smallint(self):
value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR)
self.assert_(isinstance(value, int))
def _test_strtype(self, sqltype, value, resulttype=None, colsize=None):
"""
The implementation for string, Unicode, and binary tests.
"""
assert colsize is None or (value is None or colsize >= len(value)), 'colsize=%s value=%s' % (colsize, (value is None) and 'none' or len(value))
if colsize:
sql = "create table t1(n1 int not null, s1 %s(%s), s2 %s(%s))" % (sqltype, colsize, sqltype, colsize)
else:
sql = "create table t1(n1 int not null, s1 %s, s2 %s)" % (sqltype, sqltype)
if resulttype is None:
# Access only uses Unicode, but strings might have been passed in to see if they can be written. When we
# read them back, they'll be unicode, so compare our results to a Unicode version of `value`.
if type(value) is str:
resulttype = unicode
else:
resulttype = type(value)
self.cursor.execute(sql)
self.cursor.execute("insert into t1 values(1, ?, ?)", (value, value))
v = self.cursor.execute("select s1, s2 from t1").fetchone()[0]
if type(value) is not resulttype:
# To allow buffer --> db --> bytearray tests, always convert the input to the expected result type before
# comparing.
value = resulttype(value)
self.assertEqual(type(v), resulttype)
if value is not None:
self.assertEqual(len(v), len(value))
self.assertEqual(v, value)
#
# unicode
#
def test_unicode_null(self):
self._test_strtype('varchar', None, colsize=255)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varchar', value, colsize=len(value))
t.__doc__ = 'unicode %s' % len(value)
return t
for value in UNICODE_FENCEPOSTS:
locals()['test_unicode_%s' % len(value)] = _maketest(value)
#
# ansi -> varchar
#
# Access only stores Unicode text but it should accept ASCII text.
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varchar', value, colsize=len(value))
t.__doc__ = 'ansi %s' % len(value)
return t
for value in ANSI_FENCEPOSTS:
locals()['test_ansivarchar_%s' % len(value)] = _maketest(value)
#
# binary
#
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('varbinary', buffer(value), colsize=len(value), resulttype=pyodbc.BINARY)
t.__doc__ = 'binary %s' % len(value)
return t
for value in ANSI_FENCEPOSTS:
locals()['test_binary_%s' % len(value)] = _maketest(value)
#
# image
#
def test_null_image(self):
self._test_strtype('image', None)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('image', buffer(value), resulttype=pyodbc.BINARY)
t.__doc__ = 'image %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_image_%s' % len(value)] = _maketest(value)
#
# memo
#
def test_null_memo(self):
self._test_strtype('memo', None)
# Generate a test for each fencepost size: test_varchar_0, etc.
def _maketest(value):
def t(self):
self._test_strtype('memo', unicode(value))
t.__doc__ = 'Unicode to memo %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_memo_%s' % len(value)] = _maketest(value)
# ansi -> memo
def _maketest(value):
def t(self):
self._test_strtype('memo', value)
t.__doc__ = 'ANSI to memo %s' % len(value)
return t
for value in IMAGE_FENCEPOSTS:
locals()['test_ansimemo_%s' % len(value)] = _maketest(value)
def test_subquery_params(self):
"""Ensure parameter markers work in a subquery"""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
row = self.cursor.execute("""
select x.id
from (
select id
from t1
where s = ?
and id between ? and ?
) x
""", 'test', 1, 10).fetchone()
self.assertNotEqual(row, None)
self.assertEqual(row[0], 1)
def _exec(self):
self.cursor.execute(self.sql)
def test_close_cnxn(self):
"""Make sure using a Cursor after closing its connection doesn't crash."""
self.cursor.execute("create table t1(id integer, s varchar(20))")
self.cursor.execute("insert into t1 values (?,?)", 1, 'test')
self.cursor.execute("select * from t1")
self.cnxn.close()
# Now that the connection is closed, we expect an exception. (If the code attempts to use
# the HSTMT, we'll get an access violation instead.)
self.sql = "select * from t1"
self.assertRaises(pyodbc.ProgrammingError, self._exec)
def test_unicode_query(self):
self.cursor.execute(u"select 1")
def test_negative_row_index(self):
self.cursor.execute("create table t1(s varchar(20))")
self.cursor.execute("insert into t1 values(?)", "1")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(row[0], "1")
self.assertEquals(row[-1], "1")
def test_version(self):
self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc.
#
# date, time, datetime
#
def test_datetime(self):
value = datetime(2007, 1, 15, 3, 4, 5)
self.cursor.execute("create table t1(dt datetime)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select dt from t1").fetchone()[0]
self.assertEquals(value, result)
#
# ints and floats
#
def test_int(self):
value = 1234
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_int(self):
value = -1
self.cursor.execute("create table t1(n int)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_smallint(self):
value = 32767
self.cursor.execute("create table t1(n smallint)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_real(self):
value = 1234.5
self.cursor.execute("create table t1(n real)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_real(self):
value = -200.5
self.cursor.execute("create table t1(n real)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def test_float(self):
value = 1234.567
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEquals(result, value)
def test_negative_float(self):
value = -200.5
self.cursor.execute("create table t1(n float)")
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(value, result)
def test_tinyint(self):
self.cursor.execute("create table t1(n tinyint)")
value = 10
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(result), type(value))
self.assertEqual(value, result)
#
# decimal & money
#
def test_decimal(self):
value = Decimal('12345.6789')
self.cursor.execute("create table t1(n numeric(10,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
def test_money(self):
self.cursor.execute("create table t1(n money)")
value = Decimal('1234.45')
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select n from t1").fetchone()[0]
self.assertEqual(type(result), type(value))
self.assertEqual(value, result)
def test_negative_decimal_scale(self):
value = Decimal('-10.0010')
self.cursor.execute("create table t1(d numeric(19,4))")
self.cursor.execute("insert into t1 values(?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), Decimal)
self.assertEqual(v, value)
#
# bit
#
def test_bit(self):
self.cursor.execute("create table t1(b bit)")
value = True
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select b from t1").fetchone()[0]
self.assertEqual(type(result), bool)
self.assertEqual(value, result)
def test_bit_null(self):
self.cursor.execute("create table t1(b bit)")
value = None
self.cursor.execute("insert into t1 values (?)", value)
result = self.cursor.execute("select b from t1").fetchone()[0]
self.assertEqual(type(result), bool)
self.assertEqual(False, result)
def test_guid(self):
# REVIEW: Python doesn't (yet) have a UUID type so the value is returned as a string. Access, however, only
# really supports Unicode. For now, we'll have to live with this difference. All strings in Python 3.x will
# be Unicode -- pyodbc 3.x will have different defaults.
value = "de2ac9c6-8676-4b0b-b8a6-217a8580cbee"
self.cursor.execute("create table t1(g1 uniqueidentifier)")
self.cursor.execute("insert into t1 values (?)", value)
v = self.cursor.execute("select * from t1").fetchone()[0]
self.assertEqual(type(v), type(value))
self.assertEqual(len(v), len(value))
#
# rowcount
#
def test_rowcount_delete(self):
self.assertEquals(self.cursor.rowcount, -1)
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, count)
def test_rowcount_nodata(self):
"""
This represents a different code path than a delete that deleted something.
The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over
the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a
zero return value.
"""
self.cursor.execute("create table t1(i int)")
# This is a different code path internally.
self.cursor.execute("delete from t1")
self.assertEquals(self.cursor.rowcount, 0)
def test_rowcount_select(self):
"""
Ensure Cursor.rowcount is set properly after a select statement.
pyodbc calls SQLRowCount after each execute and sets Cursor.rowcount, but SQL Server 2005 returns -1 after a
select statement, so we'll test for that behavior. This is valid behavior according to the DB API
specification, but people don't seem to like it.
"""
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.cursor.execute("select * from t1")
self.assertEquals(self.cursor.rowcount, -1)
rows = self.cursor.fetchall()
self.assertEquals(len(rows), count)
self.assertEquals(self.cursor.rowcount, -1)
def test_rowcount_reset(self):
"Ensure rowcount is reset to -1"
self.cursor.execute("create table t1(i int)")
count = 4
for i in range(count):
self.cursor.execute("insert into t1 values (?)", i)
self.assertEquals(self.cursor.rowcount, 1)
self.cursor.execute("create table t2(i int)")
self.assertEquals(self.cursor.rowcount, -1)
#
# Misc
#
def test_lower_case(self):
"Ensure pyodbc.lowercase forces returned column names to lowercase."
# Has to be set before creating the cursor, so we must recreate self.cursor.
pyodbc.lowercase = True
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(Abc int, dEf int)")
self.cursor.execute("select * from t1")
names = [ t[0] for t in self.cursor.description ]
names.sort()
self.assertEquals(names, [ "abc", "def" ])
# Put it back so other tests don't fail.
pyodbc.lowercase = False
def test_row_description(self):
"""
Ensure Cursor.description is accessible as Row.cursor_description.
"""
self.cursor = self.cnxn.cursor()
self.cursor.execute("create table t1(a int, b char(3))")
self.cnxn.commit()
self.cursor.execute("insert into t1 values(1, 'abc')")
row = self.cursor.execute("select * from t1").fetchone()
self.assertEquals(self.cursor.description, row.cursor_description)
def test_executemany(self):
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (i, str(i)) for i in range(1, 6) ]
self.cursor.executemany("insert into t1(a, b) values (?,?)", params)
count = self.cursor.execute("select count(*) from t1").fetchone()[0]
self.assertEqual(count, len(params))
self.cursor.execute("select a, b from t1 order by a")
rows = self.cursor.fetchall()
self.assertEqual(count, len(rows))
for param, row in zip(params, rows):
self.assertEqual(param[0], row[0])
self.assertEqual(param[1], row[1])
def test_executemany_failure(self):
"""
Ensure that an exception is raised if one query in an executemany fails.
"""
self.cursor.execute("create table t1(a int, b varchar(10))")
params = [ (1, 'good'),
('error', 'not an int'),
(3, 'good') ]
self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params)
def test_row_slicing(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = row[:]
self.failUnless(result is row)
result = row[:-1]
self.assertEqual(result, (1,2,3))
result = row[0:4]
self.failUnless(result is row)
def test_row_repr(self):
self.cursor.execute("create table t1(a int, b int, c int, d int)");
self.cursor.execute("insert into t1 values(1,2,3,4)")
row = self.cursor.execute("select * from t1").fetchone()
result = str(row)
self.assertEqual(result, "(1, 2, 3, 4)")
result = str(row[:-1])
self.assertEqual(result, "(1, 2, 3)")
result = str(row[:1])
self.assertEqual(result, "(1,)")
def test_concatenation(self):
v2 = u'0123456789' * 25
v3 = u'9876543210' * 25
value = v2 + 'x' + v3
self.cursor.execute("create table t1(c2 varchar(250), c3 varchar(250))")
self.cursor.execute("insert into t1(c2, c3) values (?,?)", v2, v3)
row = self.cursor.execute("select c2 + 'x' + c3 from t1").fetchone()
self.assertEqual(row[0], value)
def test_autocommit(self):
self.assertEqual(self.cnxn.autocommit, False)
othercnxn = pyodbc.connect(CNXNSTRING, autocommit=True)
self.assertEqual(othercnxn.autocommit, True)
othercnxn.autocommit = False
self.assertEqual(othercnxn.autocommit, False)
def main():
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)")
parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items")
parser.add_option("-t", "--test", help="Run only the named test")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('dbfile argument required')
if args[0].endswith('.accdb'):
driver = 'Microsoft Access Driver (*.mdb, *.accdb)'
else:
driver = 'Microsoft Access Driver (*.mdb)'
global CNXNSTRING
CNXNSTRING = 'DRIVER={%s};DBQ=%s;ExtendedAnsiSQL=1' % (driver, abspath(args[0]))
cnxn = pyodbc.connect(CNXNSTRING)
print_library_info(cnxn)
cnxn.close()
suite = load_tests(AccessTestCase, options.test)
testRunner = unittest.TextTestRunner(verbosity=options.verbose)
result = testRunner.run(suite)
if __name__ == '__main__':
# Add the build directory to the path so we're testing the latest build, not the installed version.
add_to_path()
import pyodbc
main()
| Python |
# Django settings for cms project.
import os
import sys
ROOT = os.path.abspath(os.path.dirname(__file__))
path = lambda *args: os.path.join(ROOT, *args)
sys.path.insert(0, path("lib"))
sys.path.insert(0, path("plugins"))
#USER SETTINGS (LATER TAKE FROM DATABASE)
LANGUAGES = (
('en', 'English'),
('ru', 'Russian'),
)
DEFAULT_LANGUAGE = 1
UPLOADS_DIR = path('uploads')
UPLOADS_URL = '/uploads/'
TEMPLATE_DIR = 'templates'
CURRENT_TEMPLATE = 'default'
MEDIA_ROOT = path('uploads')
STATIC_DIR = path(TEMPLATE_DIR+'/'+CURRENT_TEMPLATE)
STATIC_URL = '/media/'
STATIC_ROOT = path(TEMPLATE_DIR+'/'+CURRENT_TEMPLATE)
ADMIN_MEDIA_ROOT = path(TEMPLATE_DIR+'/admin/static/grappelli')
ADMIN_MEDIA_PREFIX = 'http://127.0.0.1:8000/media_admin/'
ADMIN_MEDIA_URL = '/static_admin/'
#END
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
#('Aleksandr Hrelyuk', 'hrelyuk.a@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'flying', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'root', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '80', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Moscow'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'ru-RU'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
# Make this unique, and don't share it with anybody.
SECRET_KEY = '4*7s&a0a_xytz+s78a89u7&#f^a3d76u6=u%d1k(xaxsi2yeh#'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
path(TEMPLATE_DIR),
path(TEMPLATE_DIR+'/'+CURRENT_TEMPLATE),
)
INSTALLED_APPS = (
'content',
'core.language',
'core.tree',
'core.setting',
'functions',
'mptt',
'grappelli.dashboard',
'grappelli',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'multilingual',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"context_processors.setting",
'multilingual.context_processors.multilingual',
)
GRAPPELLI_INDEX_DASHBOARD = 'dashboard.CustomIndexDashboard'
MPTT_ADMIN_LEVEL_INDENT = 20
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django import forms
class AdminImageWidget(forms.FileInput):
"""
A ImageField Widget for admin that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(AdminImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = []
if value and hasattr(value, "url"):
output.append(('<a target="_blank" href="%s">'
'<img src="%s" style="height: 60px;" /></a> '
% (value.url, value.url)))
output.append(super(AdminImageWidget, self).render(name, value, attrs))
return mark_safe(u''.join(output)) | Python |
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^cms/', include('cms.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^grappelli/', include('grappelli.urls')),
(r'^admin/', include(admin.site.urls)),
(r'^media_admin/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.ADMIN_MEDIA_ROOT,
'show_indexes': True}),
(r'^uploads/(?P<path>.*)$', 'django.views.static.serve',{'document_root': settings.UPLOADS_DIR, 'show_indexes': True}),
)
| Python |
from django.conf import settings
def setting(request):
setting = {
'TEMPLATE_DIR': settings.TEMPLATE_DIR,
'CURRENT_TEMPLATE': settings.CURRENT_TEMPLATE,
'STATIC_DIR': settings.STATIC_DIR,
'STATIC_URL': settings.STATIC_URL,
'MEDIA_ROOT': settings.MEDIA_ROOT,
'ADMIN_MEDIA_ROOT': settings.ADMIN_MEDIA_ROOT,
'ADMIN_MEDIA_URL': settings.ADMIN_MEDIA_URL,
'UPLOADS_DIR': settings.UPLOADS_DIR,
}
return setting | Python |
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.