code
stringlengths
1
1.49M
vector
listlengths
0
7.38k
snippet
listlengths
0
7.38k
#!/usr/bin/env python ########################## #By:Anatoliy Liberman # #For: Genome Center UCD # #Supervisor: # # Alexander Kozik # # # ########################## import sys import os import fileinput import re opts = None #Returns the number of domain hits found def domain_HITS(fin): hmm_FILE = open(fin, "rb") EVal_FILE = open(fout, "a") for line in hmm_FILE: pos = line.find('Domain search space ') if pos != -1: domainValueString = str(line) domainValue = re.findall('\d+',domainValueString) if domainValue: domainValue = int(domainValue[0]) print str(domainValue) + ' Domain hits found' if domainValue == 0: sys.exit(1) return domainValue #goes through every char in line and seperate by spaces def get_space_array(line): arr = [] in_word = False for c in line: if c == ' ': in_word = False elif in_word == False: arr.append('') in_word = True if in_word: arr[len(arr) - 1] = arr[len(arr) - 1] + c return arr #tab delimiter def tab_separate_from_array(arr): result = '' for word in arr: result = result + word result = result + '\t' result = result[:-1] #chop off the extra tab return result #find line of id and append def seq_get_id(fin, identifier, count): f = open(fin, 'rb') arr = [] found = False for line in f: if not found: if line.find('>> ' + identifier) != -1: next(f) next(f) found = True else: line = re.sub('\.\.','P',line) line = re.sub('\[\]','C',line) line = re.sub('\.\]','R',line) line = re.sub('\[\.','F',line) arr.append(line) count = count - 1 if count == 0: break f.close() return arr #calls id and seq def seq_get_all_ids(fin, lines, opt): result = '' offset = 0 if opts and 'g' in opts: offset = 3 for line in lines: identifier = line[8 + offset] count = int(line[7 + offset]) seqs = seq_get_id(fin, identifier, count) for seq in seqs: result = result + identifier + seq return result def tab_separated_evals(evalues): result = '' for evalue in evalues: result = result + tab_separate_from_array(evalue) return result #writes header to file and return array of E-Values which follow it(2nd function) def eval_get_array (ecount, fin, fout, opt): check = True hmm_FILE = open(fin, "rb") EVal_FILE = open(fout, "a") lines = [] for line in hmm_FILE: if check: pos= line.find('E-value score bias E-value score bias exp N Sequence ') if opt and 'g' in opt: line = 'Query Accession Description' + line if pos != -1: check = False if ecount != 0: if opt and 'h' in opt: to_print = tab_separate_from_array(get_space_array(line)) EVal_FILE.write(to_print) next(hmm_FILE) else: ecount = ecount - 1 line_arr = get_space_array(line) line_arr[-4] = line_arr[-4] + ' ' + line_arr[-3] + ' ' + line_arr[-2] + ' ' + line_arr[-1] del line_arr[-3:] lines.append(line_arr) if ecount == 0: break hmm_FILE.close() EVal_FILE.close() return lines def get_quar_assent(fin): hmm_FILE = open(fin, "r") q_str = '' a_str = '' d_str = '' q_tot = '' for line in hmm_FILE: query = re.match('Query:[\s\t]+(\S+)[\s\t]+\[(\w+)\=(\d+)\]',line) if query: q_str= query.group(1) q_tot= query.group(3) acces = re.match("Accession:[\s\t]+(\S+)[\s\t\n]+",line) if acces: a_str = acces.group(1) desc = re.match("Description:[\s\t]+(.*)",line) if desc: d_str = desc.group(1) return q_str, a_str, d_str, q_tot def fout2_headers(fout2, opt): f2 = open(fout2, "a") to_print = '' new_print = [] check = True hmm_FILE = open(fin, "rb") EVal2 = open(fout2, "a") to_print = 'Sequence # type score bias c-Evalue i-Evalue hmmfrom hmm to alifrom alito type envfrom envto type acc' if 'g' in opt: to_print = 'Query Accession Description ' + to_print new_print[:] = to_print.split() new_print = tab_separate_from_array(new_print) + '\n' f2.write(new_print) def tab_fout2(lines): arr_rows = [] arr_row=[] count = 0 for line in lines: to_print = tab_separate_from_array(get_space_array(lines)) arr = to_print.split() for i in arr: if len(arr_row) != 17: arr_row.append(i) elif len(arr_row) == 17: arr_rows.append(arr_row) if i: count = 0 arr_row = [] arr_row.append(i) else: break return arr_rows def dfilter(arr_rows, thresh, q_perc,q_tot): arr_hold = [] arr_hold2 = [] perc = '' l_span = '' if thresh !=0: thresh = float(thresh) for i in arr_rows: if float(i[6]) <= thresh: arr_hold.append(i) if q_perc !=0: q_perc = float(q_perc) for i in arr_hold: l_span = int(i[11]) - int(i[10]) l_span = float(l_span) perc = l_span/float(q_tot) if perc >=q_perc: arr_hold2.append(i) if q_perc!=0: arr_hold = arr_hold2 arr_rows = arr_hold return arr_rows def desc2(arr_rows, d_str, a_str, q_str): for i in arr_rows: i.insert(0, d_str) i.insert(0, a_str) i.insert(0, q_str) return arr_rows def arr_row_tabs(arr_rows): i = '' w_tab = '' w_tabs = '' row = '' rows = '' for i in arr_rows: # for word in i: if i: w_tab = '\t'.join(i) + '\n' w_tabs = w_tabs + w_tab # print w_tabs return w_tabs if __name__ == '__main__': if len(sys.argv) < 4 or len(sys.argv) > 9: print len(sys.argv[:]) print "[1] = file_input [2]=evalue_output [3]=id_details_output" print "if [4]=y append header: 'E-value score bias....' to evalue_output to evalue_output" print "g = add 3 columbs for accession, query, and description for file 1" print "h = prints headers for both filesS" print "d -allows for detailed analysis(options e e and l" print "-e = threshold only returns e value over threshold" print "-l = threshhold for query lenght" sys.exit(1) fin = sys.argv[1] fout = sys.argv[2] fout2 = sys.argv[3] #option to print E-Val header line' E-value score bias....Description' in file 1 if len(sys.argv) >= 5: opt = sys.argv[4] #do other tuff else: opt = 0 opts = opt if 'd' in opt: if '-e' in sys.argv[5]: thresh = sys.argv[6] else: thresh = 0 if 'l' in sys.argv[7]: q_perc = sys.argv[8] else: q_perc = 0 hits = domain_HITS(fin) evalues = eval_get_array(hits, fin, fout, opt) q_str, a_str, d_str,q_tot = get_quar_assent(fin) if opt and 'g' in opt: for evalue in evalues: evalue.insert(0, d_str) evalue.insert(0, a_str) evalue.insert(0, q_str) if opt and 'h' in opt: fout2_headers(fout2, opt) evals_string = tab_separated_evals(evalues) id_string = seq_get_all_ids(fin, evalues, opt) arr_rows = tab_fout2(id_string) if 'd' in opt: arr_rows = dfilter(arr_rows, thresh, q_perc,q_tot) if opt and 'g' in opt: arr_rows = desc2(arr_rows,d_str,a_str,q_str) arr_rows = arr_row_tabs(arr_rows) f = open(fout, 'a') f2 = open(fout2, 'a') f.write(evals_string) # print evals_string f.write('\n') f2.write(arr_rows) # print id_string f.close() f2.close()
[ [ 1, 0, 0.0401, 0.0033, 0, 0.66, 0, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 1, 0, 0.0435, 0.0033, 0, 0.66, 0.0556, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.0468, 0.0033, 0, ...
[ "import sys", "import os", "import fileinput", "import re", "opts = None", "def domain_HITS(fin):\n\thmm_FILE = open(fin, \"rb\")\n\tEVal_FILE = open(fout, \"a\")\n\tfor line in hmm_FILE:\n\t\tpos = line.find('Domain search space ')\n\t\tif pos != -1:\n\t\t\tdomainValueString = str(line)\n\t\t\tdomainV...
#!/usr/bin/env python ########################## #By:Anatoliy Liberman # #For: Genome Center UCD # #Supervisor: # # Alexander Kozik # # # ########################## import sys import os import fileinput import re opts = None #Returns the number of domain hits found def domain_HITS(fin): hmm_FILE = open(fin, "rb") EVal_FILE = open(fout, "a") for line in hmm_FILE: pos = line.find('Domain search space ') if pos != -1: domainValueString = str(line) domainValue = re.findall('\d+',domainValueString) if domainValue: domainValue = int(domainValue[0]) print str(domainValue) + ' Domain hits found' if domainValue == 0: sys.exit(1) return domainValue #goes through every char in line and seperate by spaces def get_space_array(line): arr = [] in_word = False for c in line: if c == ' ': in_word = False elif in_word == False: arr.append('') in_word = True if in_word: arr[len(arr) - 1] = arr[len(arr) - 1] + c return arr #tab delimiter def tab_separate_from_array(arr): result = '' for word in arr: result = result + word result = result + '\t' result = result[:-1] #chop off the extra tab return result #find line of id and append def seq_get_id(fin, identifier, count): f = open(fin, 'rb') arr = [] found = False for line in f: if not found: if line.find('>> ' + identifier) != -1: next(f) next(f) found = True else: line = re.sub('\.\.','P',line) line = re.sub('\[\]','C',line) line = re.sub('\.\]','R',line) line = re.sub('\[\.','F',line) arr.append(line) count = count - 1 if count == 0: break f.close() return arr #calls id and seq def seq_get_all_ids(fin, lines, opt): result = '' offset = 0 if opts and 'g' in opts: offset = 3 for line in lines: identifier = line[8 + offset] count = int(line[7 + offset]) seqs = seq_get_id(fin, identifier, count) for seq in seqs: result = result + identifier + seq return result def tab_separated_evals(evalues): result = '' for evalue in evalues: result = result + tab_separate_from_array(evalue) return result #writes header to file and return array of E-Values which follow it(2nd function) def eval_get_array (ecount, fin, fout, opt): check = True hmm_FILE = open(fin, "rb") EVal_FILE = open(fout, "a") lines = [] for line in hmm_FILE: if check: pos= line.find('E-value score bias E-value score bias exp N Sequence ') if opt and 'g' in opt: line = 'Query Accession Description' + line if pos != -1: check = False if ecount != 0: if opt and 'h' in opt: to_print = tab_separate_from_array(get_space_array(line)) EVal_FILE.write(to_print) next(hmm_FILE) else: ecount = ecount - 1 line_arr = get_space_array(line) line_arr[-4] = line_arr[-4] + ' ' + line_arr[-3] + ' ' + line_arr[-2] + ' ' + line_arr[-1] del line_arr[-3:] lines.append(line_arr) if ecount == 0: break hmm_FILE.close() EVal_FILE.close() return lines def get_quar_assent(fin): hmm_FILE = open(fin, "r") q_str = '' a_str = '' d_str = '' q_tot = '' for line in hmm_FILE: query = re.match('Query:[\s\t]+(\S+)[\s\t]+\[(\w+)\=(\d+)\]',line) if query: q_str= query.group(1) q_tot= query.group(3) acces = re.match("Accession:[\s\t]+(\S+)[\s\t\n]+",line) if acces: a_str = acces.group(1) desc = re.match("Description:[\s\t]+(.*)",line) if desc: d_str = desc.group(1) return q_str, a_str, d_str, q_tot def fout2_headers(fout2, opt): f2 = open(fout2, "a") to_print = '' new_print = [] check = True hmm_FILE = open(fin, "rb") EVal2 = open(fout2, "a") to_print = 'Sequence # type score bias c-Evalue i-Evalue hmmfrom hmm to alifrom alito type envfrom envto type acc' if 'g' in opt: to_print = 'Query Accession Description ' + to_print new_print[:] = to_print.split() new_print = tab_separate_from_array(new_print) + '\n' f2.write(new_print) def tab_fout2(lines): arr_rows = [] arr_row=[] count = 0 for line in lines: to_print = tab_separate_from_array(get_space_array(lines)) arr = to_print.split() for i in arr: if len(arr_row) != 17: arr_row.append(i) elif len(arr_row) == 17: arr_rows.append(arr_row) if i: count = 0 arr_row = [] arr_row.append(i) else: break return arr_rows def dfilter(arr_rows, thresh, q_perc,q_tot): arr_hold = [] arr_hold2 = [] perc = '' l_span = '' if thresh !=0: thresh = float(thresh) for i in arr_rows: if float(i[6]) <= thresh: arr_hold.append(i) if q_perc !=0: q_perc = float(q_perc) for i in arr_hold: l_span = int(i[11]) - int(i[10]) l_span = float(l_span) perc = l_span/float(q_tot) if perc >=q_perc: arr_hold2.append(i) if q_perc!=0: arr_hold = arr_hold2 arr_rows = arr_hold return arr_rows def desc2(arr_rows, d_str, a_str, q_str): for i in arr_rows: i.insert(0, d_str) i.insert(0, a_str) i.insert(0, q_str) return arr_rows def arr_row_tabs(arr_rows): i = '' w_tab = '' w_tabs = '' row = '' rows = '' for i in arr_rows: # for word in i: if i: w_tab = '\t'.join(i) + '\n' w_tabs = w_tabs + w_tab # print w_tabs return w_tabs if __name__ == '__main__': if len(sys.argv) < 4 or len(sys.argv) > 9: print len(sys.argv[:]) print "[1] = file_input [2]=evalue_output [3]=id_details_output" print "if [4]=y append header: 'E-value score bias....' to evalue_output to evalue_output" print "g = add 3 columbs for accession, query, and description for file 1" print "h = prints headers for both filesS" print "d -allows for detailed analysis(options e e and l" print "-e = threshold only returns e value over threshold" print "-l = threshhold for query lenght" sys.exit(1) fin = sys.argv[1] fout = sys.argv[2] fout2 = sys.argv[3] #option to print E-Val header line' E-value score bias....Description' in file 1 if len(sys.argv) >= 5: opt = sys.argv[4] #do other tuff else: opt = 0 opts = opt if 'd' in opt: if '-e' in sys.argv[5]: thresh = sys.argv[6] else: thresh = 0 if 'l' in sys.argv[7]: q_perc = sys.argv[8] else: q_perc = 0 hits = domain_HITS(fin) evalues = eval_get_array(hits, fin, fout, opt) q_str, a_str, d_str,q_tot = get_quar_assent(fin) if opt and 'g' in opt: for evalue in evalues: evalue.insert(0, d_str) evalue.insert(0, a_str) evalue.insert(0, q_str) if opt and 'h' in opt: fout2_headers(fout2, opt) evals_string = tab_separated_evals(evalues) id_string = seq_get_all_ids(fin, evalues, opt) arr_rows = tab_fout2(id_string) if 'd' in opt: arr_rows = dfilter(arr_rows, thresh, q_perc,q_tot) if opt and 'g' in opt: arr_rows = desc2(arr_rows,d_str,a_str,q_str) arr_rows = arr_row_tabs(arr_rows) f = open(fout, 'a') f2 = open(fout2, 'a') f.write(evals_string) # print evals_string f.write('\n') f2.write(arr_rows) # print id_string f.close() f2.close()
[ [ 1, 0, 0.0401, 0.0033, 0, 0.66, 0, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 1, 0, 0.0435, 0.0033, 0, 0.66, 0.0556, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.0468, 0.0033, 0, ...
[ "import sys", "import os", "import fileinput", "import re", "opts = None", "def domain_HITS(fin):\n\thmm_FILE = open(fin, \"rb\")\n\tEVal_FILE = open(fout, \"a\")\n\tfor line in hmm_FILE:\n\t\tpos = line.find('Domain search space ')\n\t\tif pos != -1:\n\t\t\tdomainValueString = str(line)\n\t\t\tdomainV...
#!/usr/bin/python # Copyright 2011 Google, Inc. All Rights Reserved. # simple script to walk source tree looking for third-party licenses # dumps resulting html page to stdout import os, re, mimetypes, sys # read source directories to scan from command line SOURCE = sys.argv[1:] # regex to find /* */ style comment blocks COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL) # regex used to detect if comment block is a license COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE) COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE) EXCLUDE_TYPES = [ "application/xml", "image/png", ] # list of known licenses; keys are derived by stripping all whitespace and # forcing to lowercase to help combine multiple files that have same license. KNOWN_LICENSES = {} class License: def __init__(self, license_text): self.license_text = license_text self.filenames = [] # add filename to the list of files that have the same license text def add_file(self, filename): if filename not in self.filenames: self.filenames.append(filename) LICENSE_KEY = re.compile(r"[^\w]") def find_license(license_text): # TODO(alice): a lot these licenses are almost identical Apache licenses. # Most of them differ in origin/modifications. Consider combining similar # licenses. license_key = LICENSE_KEY.sub("", license_text).lower() if license_key not in KNOWN_LICENSES: KNOWN_LICENSES[license_key] = License(license_text) return KNOWN_LICENSES[license_key] def discover_license(exact_path, filename): # when filename ends with LICENSE, assume applies to filename prefixed if filename.endswith("LICENSE"): with open(exact_path) as file: license_text = file.read() target_filename = filename[:-len("LICENSE")] if target_filename.endswith("."): target_filename = target_filename[:-1] find_license(license_text).add_file(target_filename) return None # try searching for license blocks in raw file mimetype = mimetypes.guess_type(filename) if mimetype in EXCLUDE_TYPES: return None with open(exact_path) as file: raw_file = file.read() # include comments that have both "license" and "copyright" in the text for comment in COMMENT_BLOCK.finditer(raw_file): comment = comment.group(1) if COMMENT_LICENSE.search(comment) is None: continue if COMMENT_COPYRIGHT.search(comment) is None: continue find_license(comment).add_file(filename) for source in SOURCE: for root, dirs, files in os.walk(source): for name in files: discover_license(os.path.join(root, name), name) print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>" for license in KNOWN_LICENSES.values(): print "<h3>Notices for files:</h3><ul>" filenames = license.filenames filenames.sort() for filename in filenames: print "<li>%s</li>" % (filename) print "</ul>" print "<pre>%s</pre>" % license.license_text print "</body></html>"
[ [ 1, 0, 0.0816, 0.0102, 0, 0.66, 0, 688, 0, 4, 0, 0, 688, 0, 0 ], [ 14, 0, 0.1224, 0.0102, 0, 0.66, 0.0714, 792, 6, 0, 0, 0, 0, 0, 0 ], [ 14, 0, 0.1531, 0.0102, 0, ...
[ "import os, re, mimetypes, sys", "SOURCE = sys.argv[1:]", "COMMENT_BLOCK = re.compile(r\"(/\\*.+?\\*/)\", re.MULTILINE | re.DOTALL)", "COMMENT_LICENSE = re.compile(r\"(license)\", re.IGNORECASE)", "COMMENT_COPYRIGHT = re.compile(r\"(copyright)\", re.IGNORECASE)", "EXCLUDE_TYPES = [\n \"application/xml\...
#!/usr/bin/env python # -*- coding: utf-8 -*- import bisect #Entrada edificios (izq,alt,der) #Entrada de ejemplo del enunciado a = [ (4,15,10), (2,11,9), (7,7,18), (12,13,16), (20,9,28), (22,20,25), (26,13,29) ] #Entrada con edificios solapados #a = [ #(22,20,25), #(24,13,29), #(25,13,29), #(26,13,29), #(27,13,29), #(28,13,29), #(27,13,33) #] id=0 flancos = [] #Desgloso los edificios en flancos de subida y bajada for p in a: #subida flancos.append((p[0],p[1],True,id)) #bajada flancos.append((p[2],p[1],False,id)) id += 1 flancos.sort() alturas = []#Estructura que contendra las alturas de los edificios(inserción ordenada) bisect.insort_left(alturas,(0,-1))#Inserto el piso, altura=0. solCandidata = (flancos[0][0],flancos[0][1])#Solucion candidata, se usa para poder comparar con edificioActual y evitar problemas de solapamiento, la solución se imprimirá cdo se sepa que no está solapada edificioActual = (flancos[0][1],flancos[0][3])#Edificio que se está "dibujando" actualmente for flanco in flancos[1:]: #Si el flanco que encontre corresponde al edificio actual entonces es un flanco de bajada #Caso 4 if(flanco[3] == edificioActual[1] ): #Para evitar problema de edificios solapados por flanco de bajada #No se agrega a la solución a menos que la altura sea diferente if(edificioActual[0]!=alturas[-1][0]): print(solCandidata) solCandidata = (flanco[0],alturas[-1][0]) edificioActual = alturas.pop() #El flanco encontrado es de subida, por ende, corresponde a otro edificio elif(flanco[2]): #si la altura es mayor a la actual #Se agrega a la solucion #Caso 1 if(flanco[1]>edificioActual[0]): #Para evitar problema de edificios solapados por flanco de subida if(flanco[0] != solCandidata[0]): print(solCandidata) solCandidata = (flanco[0],flanco[1]) bisect.insort_left(alturas,edificioActual) edificioActual = (flanco[1],flanco[3]) #Flanco de subida con altura menor o igual a la del edificio actual #Caso 5 else: bisect.insort_left(alturas,(flanco[1],flanco[3])) #Si el flanco encontrado es de bajada y no es del edificio actual #Caso 2 else: alturas.remove((flanco[1],flanco[3])) print solCandidata
[ [ 1, 0, 0.0361, 0.012, 0, 0.66, 0, 325, 0, 1, 0, 0, 325, 0, 0 ], [ 14, 0, 0.1386, 0.1205, 0, 0.66, 0.0909, 475, 0, 0, 0, 0, 0, 5, 0 ], [ 14, 0, 0.3614, 0.012, 0, 0....
[ "import bisect", "a = [\n\n(4,15,10),\n(2,11,9),\n(7,7,18),\n(12,13,16),\n(20,9,28),\n(22,20,25),", "id=0", "flancos = []", "for p in a:\n\t#subida\n\tflancos.append((p[0],p[1],True,id))\n\t#bajada\n\tflancos.append((p[2],p[1],False,id))\n\tid += 1", "\tflancos.append((p[0],p[1],True,id))", "\tflancos.a...
#!/usr/bin/python import random,\ sys MIN_CARGAS = 5 MAX_CARGAS = 5 MIN_CARGAS_PELIGRO = 1 MAX_CARGAS_PELIGRO = 6 cargas_disponibles = range(10) #menor cantidad de cargas utilizadas #hasta ahora en la mejor solucion. #menor_cantidad = -1 # for i in range(len(cargas_disponibles)): # for c in solucion.camiones # #poner i e C # #cantidad = llamar recursivamente (cargas restantes, resultado, menor_cantidad) # if (cantidad < menor_cantidad) # menor_cantidad = cantidad # guardar mejor solucion # retornar mejor_cantidad for t in range(1): cantCargas =random.randint(MIN_CARGAS,MAX_CARGAS) limitePeligro = random.randint(5,30) sys.stdout.write("%s %s\n" % (cantCargas,limitePeligro )) for i in reversed(range(cantCargas-1)): for j in range(i+1): sys.stdout.write("%s " % random.randint(MIN_CARGAS_PELIGRO,MAX_CARGAS_PELIGRO)) sys.stdout.write("\n") sys.stdout.write("0\n")
[ [ 1, 0, 0.1029, 0.0588, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1765, 0.0294, 0, 0.66, 0.1667, 98, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.2059, 0.0294, 0, 0...
[ "import random,\\\n\tsys", "MIN_CARGAS = 5", "MAX_CARGAS = 5", "MIN_CARGAS_PELIGRO = 1", "MAX_CARGAS_PELIGRO = 6", "cargas_disponibles = range(10)", "for t in range(1):\n\tcantCargas =random.randint(MIN_CARGAS,MAX_CARGAS)\n\tlimitePeligro = random.randint(5,30)\n\tsys.stdout.write(\"%s %s\\n\" % (cantCa...
#!/usr/bin/python import sys #aca se puede ver los lugares a los que se puede saltar desde #la o, las x son los saltos posibles con 1 salto. #| |x| |x| | #|x| | | |x| #| | |o| | | #|x| | | |x| #| |x| |x| | #quiero saltar desde (x,y) a (w,z), el tablero tiene nXx y matriz es la tabla dinamica recorridoCount = 0 def cantidadDeSaltos(x,y,w,z,n,m): global matriz, recorrido, recorridoCount print "|%s|%s|" % (x,y) if recorrido[ (x*n) + y ] == 0: print "aumento %s %s %s " % (x,y,recorridoCount) recorridoCount += 1 recorrido[ (x*n) + y ] = recorridoCount if w > n-1 or z > n-1 or w < 0 or z < 0: print "no encuentro" return -1 vecinos = [(-2,-1),(-2,1),\ (-1,-2),(-1,2),\ (1,-2),(1,2),\ (2,-1),(2,1)] (p,q) = (abs(w-x),abs(z-y)) if (p,q) in matriz: print "encuentro en matriz (%s,%s) -> %s" % (p,q,matriz[(p,q)]) return matriz[(p,q)] if p == 0 and q == 0: print "llege " return 0 if (p,q) in vecinos: print "es un vecino" matriz[(p,q)] = 1 return 1 ret = -1 for t in vecinos: (p,q) = t #verifico que se pueda saltar a ese vecino if p+x >= 0 and q+y >= 0 and p+x < n and q+y < n and recorrido[ ((p+x)*n) + (q+y) ] == 0: print "llamo recursivo desde (%s,%s,%s,%s)" % (p+x,q+y,w,z) saltos = cantidadDeSaltos(p+x,q+y,w,z,n,matriz) if saltos >= 0 and (ret == -1 or saltos+1 < ret ): ret = saltos+1 if ret != -1: matriz[(abs(w-x),abs(z-y))] = ret print "de (%s,%s) a (%s,%s) se llega con %s saltos " % (x,y,w,z,ret) return ret # la matriz la pienso tomando solo 1 cuarto del grafico # que esta arriba con la o y las x. # Si estoy en la o con saber como saltar al cuadrante derecho superior # me alcanza para moverme en cualquier direccion matriz = dict([ ((0,0) , 0 )]) n = 5 recorrido = [0] * (n * n) resultado = cantidadDeSaltos(0,4,2,4,5,matriz) print "fin %s" % resultado print "-------------------------------------------------" resultado = cantidadDeSaltos(0,4,0,3,5,matriz) print "fin %s" % resultado print "recorrido" print "-------------------------------------------------" resultado = cantidadDeSaltos(0,0,4,0,5,matriz) print "fin %s" % resultado print "recorrido" for i in range(n): for j in range(n): sys.stdout.write( "%-3s " % recorrido[i*n+j] ) sys.stdout.write( "\n") sys.stdout.write( "\n") for k,v in matriz.iteritems(): print "%s => %s" % (k,v)
[ [ 1, 0, 0.0235, 0.0118, 0, 0.66, 0, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 14, 0, 0.1412, 0.0118, 0, 0.66, 0.0556, 55, 1, 0, 0, 0, 0, 1, 0 ], [ 2, 0, 0.4176, 0.5412, 0, 0....
[ "import sys", "recorridoCount = 0", "def cantidadDeSaltos(x,y,w,z,n,m):\n\n\tglobal matriz, recorrido, recorridoCount\n\n\tprint(\"|%s|%s|\" % (x,y))\n\tif recorrido[ (x*n) + y ] == 0:\n\t\tprint(\"aumento %s %s %s \" % (x,y,recorridoCount))\n\t\trecorridoCount += 1", "\tprint(\"|%s|%s|\" % (x,y))", "\tif r...
#!/usr/bin/python #matriz 4x4, tengo 4 ciudades #esta matriz, lo que indica, es para la posicion i,j #la lista de vuelos que hay. Osea lista de vuelos desde la ciudad i a la ciudad j #entonces por ej en [(0,1),(0,2)] estoy diciendo que hay 2 vuelos uno sale a las #0 y llega a las 2 y el otro sale a las 0 y llega a las 2 ciudades = [[ [] , [(0,1),(0,2)] , [ (1,3)], [ (3,4)] , []],\ [ [] , [] , [(4,6)] , [] ,[] ],\ [ [] , [] , [] , [] , [(8,10)] ],\ [ [] , [] , [] , [] , [(6,8)]],\ [ [] , [] , [] , [] , []] ] #esta lista de abajo, indica la mejor hora de llegada que conozco a una ciudad. #y desde que ciudad llego a esa hora. La idea de la lista #es ir recorriendo el grafico del grafo siempre desde la hora #mas temprana acumulada #https://www.youtube.com/watch?v=VENf0GXRd6E # lo que este primero en la lista o lo que saque primero #sera un vuelo que me permite llegar en el menor #tiempo posible desde un nodo ya recorrido a uno no recorrido # (ciudad origen, hora llegada, ciudad destino) lista = [(0,0,0)] a = lista.pop() # esta es la lista que indica que nodos ya recorri #en busqueda de cual es el menor tiempo en el que puedo llegar. visitados = [None,None,None, None,None] while a : #tomo el menor vuelo posible en cuanto a hora de llegada. #que me llevara de un nodo recorrido a uno aun no recorrido ciudadid = a[2] horallegada = a[1] origen = a[0] for i in range(len(ciudades)): if i == ciudadid : continue #para cada una de las ciudades que tiene vuelos con la ciudad #que estoy evaluando ahora. veo si #con los vuelos desde esta ciudad actual puedo llegar antes #del tiempo en que lo hacia antes for (sale,llega) in ciudades[ciudadid][i]: #de todos los vuelos posibles desde la ciudad actual solo # puedo tener en cuenta los que salen 2 horas adelante de la hora # de llegada a estaa ciudad que estoy evaluando. Como # tome de "lista" un vuelo que es el que mas temprano me deja en esta # ciudad, entonces solo puedo evaluar los vuelos 2 horas # despues a esta hora de llegada if horallegada+2 <= sale: #puedo tomar ese vuelo #si a esta ciudad no tenia hora de llegada, obviamente cualquiera #sera mejor que nada if not visitados[i] : visitados[i] = (ciudadid,llega) lista.append( ( ciudadid,llega,i)) print "actualize arriba" #si la hora de llegada que tenia a la ciudad de destino #es mayor a la que puedo conseguir entonces reemplazo #por que ahora desde esta ciudad qu eestoy evaluando a la ciudadde #destino llego antes entonces es una mejora if llega < visitados[i][1]: visitados[i] = (ciudadid,llega) lista.append( ( ciudadid,llega,i)) print "actualize abajo" # print " vuelo de %s a %s, sale a las %s llega a las %s " % (ciudadid,i,sale,llega) #esto en realidad ordena la lista por orden de llegada de los vuelos. #esta hecho a mano por que en realidad es un heap ordenado por hora de llegada. # en donde si hay una ciudad que figura como destino en 2 vuelos entonces #se deja el mejor vuelo en la lista sorted(lista,lambda x,y : cmp(x[1],y[1]) ) a = lista.pop() if len(lista) else False #al final quedara en visitados la informacion con la mejor #hora de llegada a cada ciudad, y desde que otra ciudad se logra #ese tiempo. De esta manera se puede recontruir hacia atras el recorrido print visitados # for i in range(len(ciudades)): # for j in range(len(ciudades)): # print ciudades[i][j] #mejorHorario # lista = (origen, horas) //lista prioridad ordenada por horas. # extraer primero de lista en A # # para cada vuelo partiendo de A hacia un destino X # donde destino X no es alcanzable o si destino X se alcanza despues # que el vuelo que evaluo actualizar la fecha de llegada # desde A a destino X. # Si actualize horas de llegada a destino X y destino X # no fue recorrido todabia pusheo a la lista ordenada # (X, hora de llegada) # Si en la lista de destinos esta B # el valor en horas sera el menor.
[ [ 14, 0, 0.1154, 0.0481, 0, 0.66, 0, 350, 0, 0, 0, 0, 0, 5, 0 ], [ 14, 0, 0.2404, 0.0096, 0, 0.66, 0.2, 434, 0, 0, 0, 0, 0, 5, 0 ], [ 14, 0, 0.25, 0.0096, 0, 0.66, ...
[ "ciudades = [[ [] , [(0,1),(0,2)] , [ (1,3)], [ (3,4)] , []],\\\n\t\t [ [] , [] , [(4,6)] , [] ,[] ],\\\n\t\t [ [] , [] , [] , [] , [(8,10)] ],\\\n\t\t [ [] , [] , [] , [] , [(6,8)]],\\\n\t\t [ [] , []...
from random import uniform NUMTEST = 30 for test in range(0,NUMTEST): cantFilas = test for cantCaballos in range(0,int(cantFilas*cantFilas/2)): #cantCaballos = int(uniform(0,cantFilas*cantFilas)) print cantFilas,cantCaballos for c in range(1,cantCaballos+1): print int(uniform(1,cantFilas)),int(uniform(1,cantFilas)) print 0
[ [ 1, 0, 0.0909, 0.0909, 0, 0.66, 0, 715, 0, 1, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1818, 0.0909, 0, 0.66, 0.3333, 404, 1, 0, 0, 0, 0, 1, 0 ], [ 6, 0, 0.5455, 0.6364, 0, 0...
[ "from random import uniform", "NUMTEST = 30", "for test in range(0,NUMTEST):\n\tcantFilas = test\n\tfor cantCaballos in range(0,int(cantFilas*cantFilas/2)):\n\t\t#cantCaballos = int(uniform(0,cantFilas*cantFilas))\n\t\tprint(cantFilas,cantCaballos)\n\t\tfor c in range(1,cantCaballos+1):\n\t\t\tprint(int(uniform...
from random import uniform NUMTEST = 100 for test in range(1,NUMTEST+1): cantNodos = test print cantNodos,(cantNodos*(cantNodos-1))/2 for u in range(1,cantNodos+1): for v in range(u+1,cantNodos+1): print u,v,int(uniform(0,100)) print 0
[ [ 1, 0, 0.0833, 0.0833, 0, 0.66, 0, 715, 0, 1, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1667, 0.0833, 0, 0.66, 0.3333, 404, 1, 0, 0, 0, 0, 1, 0 ], [ 6, 0, 0.4583, 0.5, 0, 0.66...
[ "from random import uniform", "NUMTEST = 100", "for test in range(1,NUMTEST+1):\n\tcantNodos = test\n\tprint(cantNodos,(cantNodos*(cantNodos-1))/2)\n\tfor u in range(1,cantNodos+1):\n\t\tfor v in range(u+1,cantNodos+1):\n\t\t\tprint(u,v,int(uniform(0,100)))", "\tcantNodos = test", "\tprint(cantNodos,(cantNo...
#!/usr/bin/python import random,\ sys MIN_VERTICES = 3 MAX_VERTICES = 10 INSTANCIAS = 50 MAX_EJE_PESO = 5 for i in range(INSTANCIAS): #cantidad de vertices. v = random.randint(MIN_VERTICES,MAX_VERTICES) #cantidad de ejes. #grafo esparso w = random.randint(v, max(int( ( (v*(v-1))* 0.5 ) * 0.5),v) ); #grafo denso #w = random.randint(max(int( ( (v*(v-1))* 0.5 ) * 0.5),v),max(int( ( (v*(v-1))* 0.5 )),v) ) ; #cantidad de particiones. k = random.randint(2,int(v/2)+1) ejes = set([]) pesos = dict([]) for t in range(w): (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) while v1 == v2 or (v1,v2) in ejes or (v2,v1) in ejes : (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) ejes.add((v1, v2)) pesos["%d-%d" % (v1, v2) ] = random.randint(1,MAX_EJE_PESO) print "%s %s %s" % (v,w,k) for (e1,e2) in ejes: print "%s %s %s" % (e1,e2,pesos["%d-%d" % (e1, e2)]) print "0"
[ [ 1, 0, 0.0714, 0.0571, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1429, 0.0286, 0, 0.66, 0.1667, 413, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.1714, 0.0286, 0, ...
[ "import random,\\\n sys", "MIN_VERTICES = 3", "MAX_VERTICES = 10", "INSTANCIAS = 50", "MAX_EJE_PESO = 5", "for i in range(INSTANCIAS):\n \n #cantidad de vertices.\n v = random.randint(MIN_VERTICES,MAX_VERTICES)\n #cantidad de ejes.\n\n #grafo esparso\n w = random.randint(v, max(int(...
#!/usr/bin/python import random,\ sys MIN_VERTICES = 3 MAX_VERTICES = 15 INSTANCIAS = 500 MAX_EJE_PESO = 50 for i in range(INSTANCIAS): v = random.randint(MIN_VERTICES,MAX_VERTICES) w = random.randint(v,(v*(v-1))/2); k = random.randint(2,int(v/2)+1) ejes = set([]) pesos = dict([]) for t in range(w): (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) while v1 == v2 or (v1,v2) in ejes or (v2,v1) in ejes : (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) ejes.add((v1, v2)) pesos["%d-%d" % (v1, v2) ] = random.randint(1,MAX_EJE_PESO) print "%s %s %s" % (v,w,k) for (e1,e2) in ejes: print "%s %s %s" % (e1,e2,pesos["%d-%d" % (e1, e2)]) print "0"
[ [ 1, 0, 0.0926, 0.0741, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1852, 0.037, 0, 0.66, 0.1667, 413, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.2222, 0.037, 0, 0....
[ "import random,\\\n sys", "MIN_VERTICES = 3", "MAX_VERTICES = 15", "INSTANCIAS = 500", "MAX_EJE_PESO = 50", "for i in range(INSTANCIAS):\n \n v = random.randint(MIN_VERTICES,MAX_VERTICES)\n w = random.randint(v,(v*(v-1))/2);\n k = random.randint(2,int(v/2)+1)\n ejes = set([])\n peso...
#!/usr/bin/python import random,\ sys MIN_VERTICES = 3 MAX_VERTICES = 15 INSTANCIAS = 300 MAX_EJE_PESO = 50 for i in range(INSTANCIAS): v = random.randint(MIN_VERTICES,MAX_VERTICES) w = random.randint(v,int((v*(v-1))/4)+2); k = random.randint(2,int(v/4)+2) ejes = set([]) pesos = dict([]) for t in range(w): (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) while v1 == v2 or (v1,v2) in ejes or (v2,v1) in ejes : (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) ejes.add((v1, v2)) pesos["%d-%d" % (v1, v2) ] = random.randint(1,MAX_EJE_PESO) print "%s %s %s" % (v,w,k) for (e1,e2) in ejes: print "%s %s %s" % (e1,e2,pesos["%d-%d" % (e1, e2)]) print "0"
[ [ 1, 0, 0.0926, 0.0741, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1852, 0.037, 0, 0.66, 0.1667, 413, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.2222, 0.037, 0, 0....
[ "import random,\\\n sys", "MIN_VERTICES = 3", "MAX_VERTICES = 15", "INSTANCIAS = 300", "MAX_EJE_PESO = 50", "for i in range(INSTANCIAS):\n \n v = random.randint(MIN_VERTICES,MAX_VERTICES)\n w = random.randint(v,int((v*(v-1))/4)+2);\n k = random.randint(2,int(v/4)+2)\n ejes = set([])\n ...
#!/usr/bin/python import random,\ sys MIN_VERTICES = 3 MAX_VERTICES = 30 INSTANCIAS = 500 MAX_EJE_PESO = 100 for i in range(INSTANCIAS): v = random.randint(MIN_VERTICES,MAX_VERTICES) w = random.randint(((v*(v-1))/4),((v*(v-1))/2)); k = random.randint(2,int(v/2)+1) ejes = set([]) pesos = dict([]) for t in range(w): (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) while v1 == v2 or (v1,v2) in ejes or (v2,v1) in ejes : (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) ejes.add((v1, v2)) pesos["%d-%d" % (v1, v2) ] = random.randint(1,MAX_EJE_PESO) print "%s %s %s" % (v,w,k) for (e1,e2) in ejes: print "%s %s %s" % (e1,e2,pesos["%d-%d" % (e1, e2)]) print "0"
[ [ 1, 0, 0.0926, 0.0741, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.1852, 0.037, 0, 0.66, 0.1667, 413, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.2222, 0.037, 0, 0....
[ "import random,\\\n sys", "MIN_VERTICES = 3", "MAX_VERTICES = 30", "INSTANCIAS = 500", "MAX_EJE_PESO = 100", "for i in range(INSTANCIAS):\n \n v = random.randint(MIN_VERTICES,MAX_VERTICES)\n w = random.randint(((v*(v-1))/4),((v*(v-1))/2));\n k = random.randint(2,int(v/2)+1)\n ejes = se...
#!/usr/bin/python import random,\ sys MIN_VERTICES = 3 MAX_VERTICES = 50 INSTANCIAS = 2000 MAX_EJE_PESO = 5 kmax = 1 disperso = False if(len(sys.argv) > 2 and sys.argv[2] == "-e"): disperso = True sys.stderr.write("Generando grafos dispersos\n") elif(len(sys.argv) > 1): sys.stderr.write("Generando grafos densos\n") else: sys.stderr.write("USO: %s {k} [-e]\n\tk Cantidad de particiones\n\t-e Genera grafos dispersos, de lo contrario genera densos.\n" % sys.argv[0]) exit() kmax = int(sys.argv[1]) sys.stderr.write("KMAX = %d\n" % kmax) for i in range(INSTANCIAS): #cantidad de vertices. v = random.randint(MIN_VERTICES,MAX_VERTICES) #cantidad de ejes. #grafo esparso if(disperso): w = random.randint(v, max(int( ( (v*(v-1))* 0.5 ) * 0.5),v) ); #grafo denso else: w = random.randint(max(int( ( (v*(v-1))* 0.5 ) * 0.5),v),max(int( ( (v*(v-1))* 0.5 )),v) ) ; #cantidad de particiones. #k = random.randint(2,int(v/2)+1) #k = int(v/2)+1 k= random.randint(2,kmax) ejes = set([]) pesos = dict([]) for t in range(w): (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) while v1 == v2 or (v1,v2) in ejes or (v2,v1) in ejes : (v1, v2) = (random.randint(0,v-1),random.randint(0,v-1)) ejes.add((v1, v2)) pesos["%d-%d" % (v1, v2) ] = random.randint(1,MAX_EJE_PESO) print "%s %s %s" % (v,w,k) for (e1,e2) in ejes: print "%s %s %s" % (e1,e2,pesos["%d-%d" % (e1, e2)]) print "0"
[ [ 1, 0, 0.0455, 0.0364, 0, 0.66, 0, 715, 0, 2, 0, 0, 715, 0, 0 ], [ 14, 0, 0.0909, 0.0182, 0, 0.66, 0.0909, 413, 1, 0, 0, 0, 0, 1, 0 ], [ 14, 0, 0.1091, 0.0182, 0, ...
[ "import random,\\\n sys", "MIN_VERTICES = 3", "MAX_VERTICES = 50", "INSTANCIAS = 2000", "MAX_EJE_PESO = 5", "kmax = 1", "disperso = False", "if(len(sys.argv) > 2 and sys.argv[2] == \"-e\"):\n disperso = True\n sys.stderr.write(\"Generando grafos dispersos\\n\")\nelif(len(sys.argv) > 1):\n ...
import differential as p1 import numpy as np import matplotlib.pylab as mp from math import sqrt ## ------------------------- pendule simple ------------------------------## ## equation : ## y'' + W^2sin(y) = 0 si y petit y'' + W^2y = 0 avec W^2=g/l d = np.pi / 2 step = 0.001 N = int (d / step) # tige de longueur 10cm l=0.1 g=9.81 y0 = np.array([1.5, 0.]) t0 = np.array(0) f_expe = lambda y, t: np.array([y[1], (-g/l)*np.sin(y[0])]) f_theo = lambda y, t: y0[0]*np.cos((sqrt(g/l)*t)) # resolution de l'equation differentielle du pendule a l'aide des methodes mises en place dans la partie 1 def pendule_plot (): T_theo = p1.meth_N_step_theo(t0, N, step, f_theo) V = p1.meth_N_step(y0, t0, N, step, f_expe, p1.step_rk4) T_expe = [0 for i in range(0, np.shape(V)[0])] for i in range (0, np.shape(V)[0]): T_expe[i] = V[i][0] plt1, = mp.plot(T_expe) plt2, = mp.plot(T_theo) mp.xlabel("t") mp.ylabel("theta") mp.legend([plt1, plt2], ["courbe experimentale", "courbe theorique"]) # mp.savefig("pendule_simple") mp.show() ## frequence ## # returns the frequency of the function or -1 if the funtion is not periodical def function_freq (T, epsi): i = 0 j = 1 length = len(T)-1 while j < length and (abs(T[j] - T[i]) >= epsi or (T[j+1]-T[j]) * (T[i+1]-T[i]) > 0): j+=1 if j == length: return -1 else : while j < length and abs(T[j] - T[i]) <= 2*epsi: i+=1 j+=1 if j == length: return float(2*np.pi / ((j - i) * step)) else : return -1 def pendule_freq (theta0): V = p1.meth_N_step(theta0, t0, N, step, f_expe, p1.step_rk4) T_expe = [0 for i in range(0, np.shape(V)[0])] for i in range (0, np.shape(V)[0]): T_expe[i] = V[i][0] epsi = 0.001 while(function_freq (T_expe, epsi)) == -1: epsi += 0.0001 return function_freq (T_expe, epsi) def plot_pendule_freq (): theta = np.arange(-np.pi/4., np.pi/4., 0.1) T = [] for i in range(len(theta)): T = T + [pendule_freq([theta[i], 0])] mp.plot(theta, T) mp.xlabel("Angle initial") mp.ylabel("Frequence du pendule") # mp.savefig("pendule_frequence") mp.show() pendule_plot () plot_pendule_freq ()
[ [ 1, 0, 0.0106, 0.0106, 0, 0.66, 0, 439, 0, 1, 0, 0, 439, 0, 0 ], [ 1, 0, 0.0213, 0.0106, 0, 0.66, 0.0556, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0319, 0.0106, 0, ...
[ "import differential as p1", "import numpy as np", "import matplotlib.pylab as mp", "from math import sqrt", "d = np.pi / 2", "step = 0.001", "N = int (d / step)", "l=0.1", "g=9.81", "y0 = np.array([1.5, 0.])", "t0 = np.array(0)", "f_expe = lambda y, t: np.array([y[1], (-g/l)*np.sin(y[0])])", ...
import numpy as np import differential as p1 import matplotlib.pyplot as plt def resolution_eq(y0,t0,N,h,meth): """ Resolution des equations de Malthus et Verhuslt """ gamma = 2.9 k = 20e9 Malthus = lambda y,t : y Verhulst = lambda y,t : gamma*y*(1-(y/k)) X = p1.meth_N_step(y0,t0,N,h,Malthus,meth) Y = p1.meth_N_step(y0,t0,N,h,Verhulst,meth) plt1, = plt.plot(X) plt2, = plt.plot(Y) plt.legend([plt1,plt2],["Malthus","Verhulst"]) plt.show() #resolution_eq( 20e9 , 0. , 100 , 1/float(20) , p1.step_rk4 ) #resolution_eq( 40e9 , 0. , 100 , 1/float(20) , p1.step_rk4 ) def resolution_Lotka_Volterra(y0,t0,N,h,meth): """ Resolution du systeme et affichage de solutions autour du point de depart ainsi que des variations """ a = b = c = d = 1 F = lambda y,t: np.array([ y[0]*(a-b*y[1]) , y[1]*(c*y[0]-d) ]) for i in range(0,5): res = p1.meth_N_step(y0+i*0.1, t0, N, h, F, meth) X = [] Y = [] for j in res: X.append(j[0]) Y.append(j[1]) plt3 = plt.plot(X , Y) plt.title("Solution autour du point de depart") plt.show() plt1, = plt.plot(X) plt2, = plt.plot(Y) plt.legend([plt1,plt2],["Proies","Predateurs"]) plt.title("Variations") plt.show() return X,Y #resolution_Lotka_Volterra(np.array([0.5,0.05]), 0, 1000, 1/float(100), p1.step_rk4) #resolution_Lotka_Volterra(np.array([0.5,0.5]), 0, 1000, 1/float(100), p1.step_rk4) #resolution_Lotka_Volterra(np.array([0.5,5]), 0, 1000, 1/float(100), p1.step_rk4) def calcul_periode(X): """ Recherche les deux premiers pics de X et retourne la difference entre leurs indices """ n = len(X) i = 1 while( not( (X[i-1]<= X[i]) and (X[i] >= X[i+1]) ) and (i < n-1) ): i += 1 p1 = i i+=1 while( not( (X[i-1]<= X[i]) and (X[i] >= X[i+1]) ) and (i < n-1) ): i += 1 p2 = i return p2-p1 resolution_eq( 6e9 , 0. , 100 , 1/float(20) , p1.step_rk4 ) print calcul_periode(resolution_Lotka_Volterra(np.array([0.5,0.5]),1,1000,1/float(100),p1.step_rk4)[0])
[ [ 1, 0, 0.0147, 0.0147, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0294, 0.0147, 0, 0.66, 0.1429, 439, 0, 1, 0, 0, 439, 0, 0 ], [ 1, 0, 0.0441, 0.0147, 0, ...
[ "import numpy as np", "import differential as p1", "import matplotlib.pyplot as plt", "def resolution_eq(y0,t0,N,h,meth):\n \"\"\" Resolution des equations de Malthus et Verhuslt \"\"\"\n gamma = 2.9\n k = 20e9\n\n Malthus = lambda y,t : y\n Verhulst = lambda y,t : gamma*y*(1-(y/k))\n X = p1...
import differential as p1 import numpy as np import matplotlib.pyplot as mp from math import sqrt d = 3.14 step = 0.001 # tige de longueur 10cm N=3500 l=0.1 g=9.81 ## pendule a deux maillons ## # longueur de le tige 10cm masse de 1g m = 1 # Y=[y1, y2, y1', y2'] F(Y,t)=[y1', y2', y1'', y2''] ou y1'' et y2'' verifient les equations du pendule double def modelisation_pendule_double(theta10, theta20): A0 = np.array([theta10, theta20, 0., 0.]) T0 = 0 pendule_double_expe = lambda y, t: np.array([y[2], y[3], ((-3*g*m*np.sin(y[0])-m*g*np.sin(y[0]-2*y[1])-2*m*l*np.sin(y[0]-y[1])*(y[3]**2+(y[2]**2)*np.cos(y[0]-y[1]))) / (l*m*(3-np.cos(2*(y[0]-y[1]))))) , ((2*np.sin(y[0]-y[1])*((y[2]**2)*2*m*l+2*m*g*np.cos(y[0])+(y[3]**2)*l*m*np.cos(y[0]-y[1]))) / (l*m*(3-np.cos(2*(y[0]-y[1])))))]) V = p1.meth_N_step(A0, T0, N, step, pendule_double_expe, p1.step_rk4) T_expe_1 = [] T_expe_2 = [] for i in range (np.shape(V)[0]): T_expe_1 = T_expe_1 + [V[i][0]] for i in range (0, np.shape(V)[0]): T_expe_2 = T_expe_2 + [V[i][1]] plt1,=mp.plot(T_expe_1) plt2,=mp.plot(T_expe_2) mp.legend([plt1, plt2],["theta1(t) avec theta1(0)=PI/2","theta2(t) avec theta2(0)=0"]) mp.show() def trajectoire_pendule_double(theta10, theta20): A0=np.array([theta10, theta20, 0., 0.]) T0=np.array([0.]) pendule_double_expe = lambda y, t: np.array([y[2], y[3], ((-3*g*m*np.sin(y[0])-m*g*np.sin(y[0]-2*y[1])-2*m*l*np.sin(y[0]-y[1])*(y[3]**2+(y[2]**2)*np.cos(y[0]-y[1]))) / (l*m*(3-np.cos(2*(y[0]-y[1]))))) , ((2*np.sin(y[0]-y[1])*((y[2]**2)*2*m*l+2*m*g*np.cos(y[0])+(y[3]**2)*l*m*np.cos(y[0]-y[1]))) / (l*m*(3-np.cos(2*(y[0]-y[1])))))]) V = p1.meth_N_step(A0, T0, N, step, pendule_double_expe, p1.step_rk4) T_expe_1 = [] T_expe_2 = [] for i in range (np.shape(V)[0]): T_expe_1 = T_expe_1 + [V[i][0]] for i in range (np.shape(V)[0]): T_expe_2 = T_expe_2 + [V[i][1]] a2=[] b2=[] for i in range (np.shape(V)[0]): b2 = b2 + [- l*np.cos(T_expe_1[i]) - l*np.cos(T_expe_2[i]+T_expe_1[i])] a2 = a2 + [l*np.sin(T_expe_1[i]) + l*np.sin(T_expe_2[i]+T_expe_1[i])] return (a2, b2) def plot_pendule_double (): (x1, y1) = trajectoire_pendule_double(3.14/2., 0.) plt1, = mp.plot (x1, y1) (x2, y2) = trajectoire_pendule_double(3.14/2. + 0.1, 0.) plt2, = mp.plot (x2, y2) mp.legend([plt1,plt2],["theta1(0)=PI/2, theta2(0)=0","theta1(0)=PI/2+0.1, theta2(0)=0"]) mp.show() def plot_difference (): (x1, y1) = trajectoire_pendule_double(3.14/2., 0.) (x2, y2) = trajectoire_pendule_double(3.14/2. + 0.1, 0.) (x3, y3) = trajectoire_pendule_double(3.14/2. + 0.01, 0.) (x4, y4) = trajectoire_pendule_double(3.14/2. + 0.001, 0.) x = [] y = [] z = [] for i in range (N): x = x + [np.sqrt((x1[i]-x2[i])**2+(y1[i]-y2[i])**2)] for i in range (N): y = y + [10*np.sqrt((x1[i]-x3[i])**2+(y1[i]-y3[i])**2)] for i in range (N): z = z + [100*np.sqrt((x1[i]-x4[i])**2+(y1[i]-y4[i])**2)] plt1,=mp.plot(x) plt2,=mp.plot(y) plt3,=mp.plot(z) mp.legend([plt1,plt2,plt3],[ "distance entre 2 trajectoires pour un diff de CI de 0.1rad", "10 * distance entre 2 trajectoires pour un diff de CI de 0.01rad", "100 * distance entre 2 trajectoires pour un diff de CI de 0.001rad"]) mp.show() plot_pendule_double () modelisation_pendule_double(3.14/2., 0.) plot_difference()
[ [ 1, 0, 0.0099, 0.0099, 0, 0.66, 0, 439, 0, 1, 0, 0, 439, 0, 0 ], [ 1, 0, 0.0198, 0.0099, 0, 0.66, 0.0625, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0297, 0.0099, 0, ...
[ "import differential as p1", "import numpy as np", "import matplotlib.pyplot as mp", "from math import sqrt", "d = 3.14", "step = 0.001", "N=3500", "l=0.1", "g=9.81", "m = 1", "def modelisation_pendule_double(theta10, theta20):\n\tA0 = np.array([theta10, theta20, 0., 0.])\n\tT0 = 0\n\n\tpendule_...
import numpy as np import matplotlib.pylab as mp from math import exp ## CI dans un tableau ## ## METHODE D'EULER def step_euler(y,t,h,F): n=np.shape(y) z=np.zeros(n) z=y+h*F(y,t) return z ## METHODE DU POINT MILIEU def step_pt_milieu(y,t,h,F): n=np.shape(y) y_aux=np.zeros(n) ##y_aux represente y(n+1/2) z=np.zeros(n) y_aux=y+(h/2)*F(y,t) z=y+h*F(y_aux,t+h/2) return z ## METHODE DE HEUN def step_heun(y,t,h,F): n=np.shape(y) z=np.zeros(n) y1=step_euler(y,t,h,F) p1=F(y1,t+h) p2=F(y,t) z=y+(1/2.)*h*(p1+p2) return z ## METHODE DE RUNGE-KUTTA 4 def step_rk4(y,t,h,F): n=np.shape(y) z=np.zeros(n) p1=F(y,t) y2=y+(h/2)*p1 p2=F(y2,t+(h/2)) y3=y+(h/2)*p2 p3=F(y3,t+(h/2)) y4=y+h*p3 p4=F(y4,t+h) z=y+(h/6)*(p1+2*p2+2*p3+p4) return z ## N ETAPES def meth_N_step(y0, t0, N, h, F, meth): T = [y0] y=y0 t=t0 x = t0 for i in range(0, N): x = float(x+h) y = meth(y,t,h,F) T = T + [y] return T def meth_N_step_theo(t0, N, h, f): T = [f(0, t0)] x = t0 for i in range(0, N): x = float(x+h) y = f(0, x) T = T + [y] return T ### Tracer du champ des tangentes def tangent_field(x_max,y_max,fct): """ Trace le champ des tangentes sur le pave [-x_max,x_max]*[-y_max,y_max] pour la fonction """ N = 20 X = np.linspace(-x_max, x_max, N) Y = np.linspace(-y_max, y_max, N) U = np.ones([N,N]) V = np.ones([N,N]) for i in range(N): for j in range(N): V[i,j] = fct([Y[i]],Y[j]) mp.clf() mp.quiver(X,Y,U,V) mp.title('Champ de tangentes pour la fonction f(x,y) ') mp.xlabel('x ') mp.ylabel('y ') mp.savefig("champ-tangentes.png") mp.show() def test_differential(y0, t0, tf, step, f_expe, f_theo, methods_nb = 4): methods = [ step_euler, "Methode d'euler", step_pt_milieu, "Methode du point milieu", step_heun, "Methode de Heun", step_rk4, "Methode de Runge-Kutta"] d = tf - t0 N = int (d / step) T_theo = meth_N_step_theo(t0, N, step, f_theo) for i in range(0, methods_nb): T_expe = meth_N_step(y0, t0, N, step, f_expe, methods[2*i]) if len(np.shape(T_theo)) == 1: plot_differential (T_expe, T_theo, methods[2*i+1]) else : for k in range (np.shape(T_theo)[1]): T_expe_f = [] T_theo_f = [] for j in range (len(T_theo)): T_expe_f = T_expe_f + [T_expe[j][k]] T_theo_f = T_theo_f + [T_theo[j][k]] plot_differential (T_expe_f, T_theo_f, ("courbe", k+1, methods[2*i+1])) def plot_differential (T_expe, T_theo, title=""): plt1, = mp.plot(T_expe) plt2, = mp.plot(T_theo) mp.legend([plt1, plt2], ["courbe experimentale", "courbe theorique"]) mp.title (title) # mp.savefig("resolution_intro") mp.show() if __name__ == "__main__": tangent_field(4,4,lambda x,y:np.cos(x)) a = 1 << 0 b = 1 << 1 c = 1 << 2 ## entrer le code du test a effectuer ## test = a methods_nb = 4 ## ----------- ## if test & a: # test personnel y0 = np.array(1) t0 = 0 tf = 5 step = 0.01 f_expe = lambda y, t: y f_theo = lambda y, t: exp(t) test_differential(y0, t0, tf, step, f_expe, f_theo, methods_nb) if test & b: # test 1 y0 = np.array(1) t0 = 0 tf = 2.5 step = 0.001 f_expe = lambda y, t: float(y / (1 + t**2)) f_theo = lambda y, t: exp(np.arctan(t)) test_differential(y0, t0, tf, step, f_expe, f_theo, methods_nb) if test & c: # test 2 y0 = np.array([1, 0]) t0 = 0 tf = 6*np.pi step = 0.1 f_expe = lambda y, t: np.array([-y[1], y[0]]) f_theo = lambda y, t: np.array([np.cos(t), np.sin(t)]) test_differential(y0, t0, tf, step, f_expe, f_theo, methods_nb)
[ [ 1, 0, 0.0056, 0.0056, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0111, 0.0056, 0, 0.66, 0.0833, 607, 0, 1, 0, 0, 607, 0, 0 ], [ 1, 0, 0.0167, 0.0056, 0, ...
[ "import numpy as np", "import matplotlib.pylab as mp", "from math import exp", "def step_euler(y,t,h,F):\n n=np.shape(y)\n z=np.zeros(n)\n z=y+h*F(y,t)\n return z", " n=np.shape(y)", " z=np.zeros(n)", " z=y+h*F(y,t)", " return z", "def step_pt_milieu(y,t,h,F):\n n=np.shape...
''' Created on 21-03-2011 @author: maciek ''' from formater import formatString import os class IndexGenerator(object): ''' Generates Index.html for iOS app OTA distribution ''' basePath = os.path.dirname(__file__) templateFile = os.path.join(basePath,"templates/index.tmpl") releaseUrls = "" appName = "" changeLog = "" description = "" version = "" release = "" def __init__(self,appName, releaseUrls, changeLog, description, version, releases): ''' Constructor ''' self.appName = appName self.releaseUrls = releaseUrls self.changeLog = changeLog self.description = description self.version = version self.releases = releases def get(self): ''' returns index.html source code from template file ''' urlList = self.releaseUrls.split(",") releaseList = self.releases.split(",") generatedHtml="" count=0; for release in releaseList: generatedHtml += " <li>\n" generatedHtml += " <h3><a href=\"javascript:load('" + urlList[count] + "')\">" + release + "</a></h3>\n" generatedHtml += " </li>\n" count += 1 template = open(self.templateFile).read() index = formatString(template, downloads=generatedHtml, changeLog=self.changeLog, appName=self.appName, description=self.description, version = self.version); return index
[ [ 8, 0, 0.0526, 0.0877, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.1053, 0.0175, 0, 0.66, 0.3333, 11, 0, 1, 0, 0, 11, 0, 0 ], [ 1, 0, 0.1228, 0.0175, 0, 0.66, ...
[ "'''\nCreated on 21-03-2011\n\n@author: maciek\n'''", "from formater import formatString", "import os", "class IndexGenerator(object):\n '''\n Generates Index.html for iOS app OTA distribution\n '''\n basePath = os.path.dirname(__file__)\n templateFile = os.path.join(basePath,\"templates/index....
''' Created on 21-03-2011 @author: maciek ''' def formatString(format, **kwargs): ''' ''' if not format: return '' for arg in kwargs.keys(): format = format.replace("{" + arg + "}", "##" + arg + "##") format = format.replace ("{", "{{") format = format.replace("}", "}}") for arg in kwargs.keys(): format = format.replace("##" + arg + "##", "{" + arg + "}") res = format.format(**kwargs) res = res.replace("{{", "{") res = res.replace("}}", "}") return res
[ [ 8, 0, 0.1304, 0.2174, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 2, 0, 0.6739, 0.6957, 0, 0.66, 1, 798, 0, 2, 1, 0, 0, 0, 9 ], [ 8, 1, 0.413, 0.087, 1, 0, 0, ...
[ "'''\nCreated on 21-03-2011\n\n@author: maciek\n'''", "def formatString(format, **kwargs):\n '''\n '''\n if not format: return ''\n \n for arg in kwargs.keys():\n format = format.replace(\"{\" + arg + \"}\", \"##\" + arg + \"##\")\n format = format.replace (\"{\", \"{{\")", " '''\n ...
''' Created on 21-03-2011 @author: maciek ''' from IndexGenerator import IndexGenerator from optparse import OptionParser import os import tempfile import shutil import logging logging.basicConfig(level = logging.DEBUG) parser = OptionParser() parser.add_option('-n', '--app-name', action='store', dest='appName', help='aplication name') parser.add_option('-u', '--release-urls', action='store', dest='releaseUrls', help='URLs of download files - as coma separated list of entrires') parser.add_option('-d', '--destination-directory', action='store', dest='otaAppDir', help='Directory where OTA files are created') parser.add_option('-v', '--version', action='store', dest='version', help='Version of the application') parser.add_option('-r', '--releases', action='store', dest='releases', help='Release names of the application') parser.add_option('-R', '--release-notes', action='store', dest='releaseNotes', help='Release notes of the application (in txt2tags format)') parser.add_option('-D', '--description', action='store', dest='description', help='Description of the application (in txt2tags format)') (options, args) = parser.parse_args() if options.appName == None: parser.error("Please specify the appName.") elif options.releaseUrls == None: parser.error("Please specify releaseUrls") elif options.otaAppDir == None: parser.error("Please specify destination directory") elif options.version == None: parser.error("Please specify version") elif options.releases == None: parser.error("Please specify releases") elif options.releaseNotes == None: parser.error("Please specify releaseNotes") elif options.description == None: parser.error("Please specify description") appName = options.appName releaseUrls = options.releaseUrls otaAppDir = options.otaAppDir version = options.version releases = options.releases releaseNotes = options.releaseNotes description = options.description def findIconFilename(): iconPath = "res/drawable-hdpi/icon.png" if not os.path.exists(iconPath): iconPath = "res/drawable-mdpi/icon.png" if not os.path.exists(iconPath): iconPath = "res/drawable-ldpi/icon.png" if not os.path.exists(iconPath): iconPath = "res/drawable/icon.png" logging.debug("IconPath: "+iconPath) return iconPath def createOTApackage(): ''' crates all needed files in tmp dir ''' releaseNotesContent = open(releaseNotes).read() descriptionContent = open(description).read() indexGenerator = IndexGenerator(appName, releaseUrls, releaseNotesContent, descriptionContent, version, releases) index = indexGenerator.get(); tempIndexFile = tempfile.TemporaryFile() tempIndexFile.write(index) tempIndexFile.flush() tempIndexFile.seek(0) return tempIndexFile tempIndexFile = createOTApackage() if not os.path.isdir(otaAppDir): logging.debug("creating dir: "+otaAppDir) os.mkdir(otaAppDir) else: logging.warning("dir: "+otaAppDir+" exists") indexFile = open(os.path.join(otaAppDir,"index.html"),'w') shutil.copyfileobj(tempIndexFile, indexFile) srcIconFileName = findIconFilename() disIconFileName = os.path.join(otaAppDir,"Icon.png") shutil.copy(srcIconFileName,disIconFileName)
[ [ 8, 0, 0.0341, 0.0568, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.0682, 0.0114, 0, 0.66, 0.0303, 933, 0, 1, 0, 0, 933, 0, 0 ], [ 1, 0, 0.0795, 0.0114, 0, 0.66...
[ "'''\nCreated on 21-03-2011\n\n@author: maciek\n'''", "from IndexGenerator import IndexGenerator", "from optparse import OptionParser", "import os", "import tempfile", "import shutil", "import logging", "logging.basicConfig(level = logging.DEBUG)", "parser = OptionParser()", "parser.add_option('-n...
#! /usr/bin/env python # encoding: utf-8 # waf 1.6.2 VERSION='0.3.3' import sys APPNAME='p2t' top = '.' out = 'build' CPP_SOURCES = ['poly2tri/common/shapes.cc', 'poly2tri/sweep/cdt.cc', 'poly2tri/sweep/advancing_front.cc', 'poly2tri/sweep/sweep_context.cc', 'poly2tri/sweep/sweep.cc', 'testbed/main.cc'] from waflib.Tools.compiler_cxx import cxx_compiler cxx_compiler['win32'] = ['g++'] #Platform specific libs if sys.platform == 'win32': # MS Windows sys_libs = ['glfw', 'opengl32'] elif sys.platform == 'darwin': # Apple OSX sys_libs = ['glfw', 'OpenGL'] else: # GNU/Linux, BSD, etc sys_libs = ['glfw', 'GL'] def options(opt): print(' set_options') opt.load('compiler_cxx') def configure(conf): print(' calling the configuration') conf.load('compiler_cxx') conf.env.CXXFLAGS = ['-O3', '-ffast-math'] conf.env.DEFINES_P2T = ['P2T'] conf.env.LIB_P2T = sys_libs def build(bld): print(' building') bld.program(features = 'cxx cxxprogram', source=CPP_SOURCES, target = 'p2t', uselib = 'P2T')
[ [ 14, 0, 0.1111, 0.0222, 0, 0.66, 0, 557, 1, 0, 0, 0, 0, 3, 0 ], [ 1, 0, 0.1333, 0.0222, 0, 0.66, 0.0909, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 14, 0, 0.1556, 0.0222, 0, ...
[ "VERSION='0.3.3'", "import sys", "APPNAME='p2t'", "top = '.'", "out = 'build'", "CPP_SOURCES = ['poly2tri/common/shapes.cc',\n 'poly2tri/sweep/cdt.cc',\n 'poly2tri/sweep/advancing_front.cc',\n 'poly2tri/sweep/sweep_context.cc',\n 'poly2tri/sweep/swee...
#!/usr/bin/env python # This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com import urllib from zipfile import ZipFile TARGET = 'http://ip-to-country.webhosting.info/downloads/ip-to-country.csv.zip' OUT_FILE_NAME = 'ip-to-country.zip' def reporthook(blocknum, bs, size): elapsed = (blocknum * bs) * 100 / size if elapsed > 100: elapsed = 100 print '%s%%' % elapsed def main(): print 'Downloading ip2country database file' urllib.urlretrieve(TARGET, OUT_FILE_NAME, reporthook=reporthook) print 'Extracting data file' zf = ZipFile(OUT_FILE_NAME) zf.extractall() if __name__ == '__main__': main()
[ [ 1, 0, 0.4615, 0.0256, 0, 0.66, 0, 614, 0, 1, 0, 0, 614, 0, 0 ], [ 1, 0, 0.4872, 0.0256, 0, 0.66, 0.1667, 93, 0, 1, 0, 0, 93, 0, 0 ], [ 14, 0, 0.5641, 0.0256, 0, 0...
[ "import urllib", "from zipfile import ZipFile", "TARGET = 'http://ip-to-country.webhosting.info/downloads/ip-to-country.csv.zip'", "OUT_FILE_NAME = 'ip-to-country.zip'", "def reporthook(blocknum, bs, size):\n\telapsed = (blocknum * bs) * 100 / size\n\tif elapsed > 100: elapsed = 100\n\tprint('%s%%' % elapse...
#!/usr/bin/env python # This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com import csv import os import sys from optparse import OptionParser from django.conf import settings from django_ip2country import models def main(): if not 'DJANGO_SETTINGS_MODULE' in os.environ: print 'DJANGO_SETTINGS_MODULE should be set, exiting.' sys.exit(0) usage = "usage: %prog -p PATH_TO_DB | --path=PATH_TO_DB" parser = OptionParser(usage) parser.add_option('-p', '--path', dest='db_path', help="The path to the project directory.") (options, args) = parser.parse_args() if settings.DATABASE_ENGINE is 'sqlite3': if not options.db_path: parser.error("You must specify the project directory path.") project_dir = os.path.abspath(options.db_path) # or path to the dir. that the db should be in. settings.DATABASE_NAME = os.path.join( project_dir, settings.DATABASE_NAME ) CSV_FILE = 'ip-to-country.csv' # delete all objects print 'This will take a while, so sit back and relax.' models.Ip2Country.objects.all().delete() reader = csv.reader(open(CSV_FILE)) count = 0 for ipf, ipt, cc2, cc3, cname in reader: count += 1 object = models.Ip2Country(count,ipf, ipt, cc2, cc3, cname) object.save() if count % 10000 == 0: print count print count , "inserted. :)" del reader if __name__ == '__main__': main()
[ [ 1, 0, 0.2857, 0.0159, 0, 0.66, 0, 312, 0, 1, 0, 0, 312, 0, 0 ], [ 1, 0, 0.3016, 0.0159, 0, 0.66, 0.1429, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.3175, 0.0159, 0, ...
[ "import csv", "import os", "import sys", "from optparse import OptionParser", "from django.conf import settings", "from django_ip2country import models", "def main():\n if not 'DJANGO_SETTINGS_MODULE' in os.environ:\n print('DJANGO_SETTINGS_MODULE should be set, exiting.')\n sys.exit(0)...
# This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com import bigint_patch from django.db import models class Ip2Country(models.Model): #ip_from = models.PositiveIntegerField() #ip_to = models.PositiveIntegerField() ip_from = bigint_patch.BigIntegerField() ip_to = bigint_patch.BigIntegerField() country_code2 = models.CharField(max_length = 2) country_code3 = models.CharField(max_length = 3) country_name = models.CharField(max_length = 50) class Meta: ordering = ['ip_from'] def __unicode__(self): return "%s %s %s" % (self.ip_from ,self.ip_to, self.country_name)
[ [ 1, 0, 0.4474, 0.0263, 0, 0.66, 0, 521, 0, 1, 0, 0, 521, 0, 0 ], [ 1, 0, 0.5, 0.0263, 0, 0.66, 0.5, 40, 0, 1, 0, 0, 40, 0, 0 ], [ 3, 0, 0.7632, 0.3947, 0, 0.66, ...
[ "import bigint_patch", "from django.db import models", "class Ip2Country(models.Model):\n #ip_from = models.PositiveIntegerField()\n #ip_to = models.PositiveIntegerField()\n ip_from = bigint_patch.BigIntegerField()\n ip_to = bigint_patch.BigIntegerField()\n \n country_code2 = models.CharField(...
# This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com from django.contrib import admin from django_ip2country.models import Ip2Country admin.site.register(Ip2Country)
[ [ 1, 0, 0.85, 0.05, 0, 0.66, 0, 302, 0, 1, 0, 0, 302, 0, 0 ], [ 1, 0, 0.9, 0.05, 0, 0.66, 0.5, 6, 0, 1, 0, 0, 6, 0, 0 ], [ 8, 0, 1, 0.05, 0, 0.66, 1, 276, ...
[ "from django.contrib import admin", "from django_ip2country.models import Ip2Country", "admin.site.register(Ip2Country)" ]
# This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com from django_ip2country.models import Ip2Country def ip2long(ip): ip_array = ip.split('.') ip_long = int(ip_array[0]) * 16777216 + int(ip_array[1]) * 65536 + int(ip_array[2]) * 256 + int(ip_array[3]) return ip_long def get_country(value): value = ip2long(value) try: # ip of comment has to be in a range of an ip-to-country object IP_FROM and IP_TO iptc = Ip2Country.objects.get(ip_from__lte=value, ip_to__gte=value) except Ip2Country.DoesNotExist: return None return iptc
[ [ 1, 0, 0.5312, 0.0312, 0, 0.66, 0, 6, 0, 1, 0, 0, 6, 0, 0 ], [ 2, 0, 0.6406, 0.125, 0, 0.66, 0.5, 337, 0, 1, 1, 0, 0, 0, 5 ], [ 14, 1, 0.625, 0.0312, 1, 0.04, ...
[ "from django_ip2country.models import Ip2Country", "def ip2long(ip):\n ip_array = ip.split('.')\n ip_long = int(ip_array[0]) * 16777216 + int(ip_array[1]) * 65536 + int(ip_array[2]) * 256 + int(ip_array[3])\n return ip_long", " ip_array = ip.split('.')", " ip_long = int(ip_array[0]) * 16777216 ...
# This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com
[]
[]
# This file is part of django_ip2country. # django_ip2country is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # django_ip2country is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with django_ip2country. If not, see <http://www.gnu.org/licenses/>. # Author: Esteban Feldman esteban.feldman@gmail.com """module mydjangolib.bigint_patch A fix for the rather well-known ticket #399 in the django project. Create and link to auto-incrementing primary keys of type bigint without having to reload the model instance after saving it to get the ID set in the instance. """ from django.core import exceptions from django.conf import settings from django.db import connection from django.db.models import fields from django.utils.translation import ugettext as _ __version__ = "1.0" __author__ = "Florian Leitner" class BigIntegerField(fields.IntegerField): def db_type(self): if settings.DATABASE_ENGINE == 'mysql': return "bigint" elif settings.DATABASE_ENGINE == 'oracle': return "NUMBER(19)" elif settings.DATABASE_ENGINE[:8] == 'postgres': return "bigint" elif settings.DATABASE_ENGINE == 'sqlite3': return "bigint" else: raise NotImplemented def get_internal_type(self): return "BigIntegerField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be a long integer.")) class BigAutoField(fields.AutoField): def db_type(self): if settings.DATABASE_ENGINE == 'mysql': return "bigint AUTO_INCREMENT" elif settings.DATABASE_ENGINE == 'oracle': return "NUMBER(19)" elif settings.DATABASE_ENGINE[:8] == 'postgres': return "bigserial" else: raise NotImplemented def get_internal_type(self): return "BigAutoField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be a long integer.")) class BigForeignKey(fields.related.ForeignKey): def db_type(self): rel_field = self.rel.get_related_field() # next lines are the "bad tooth" in the original code: if (isinstance(rel_field, BigAutoField) or (not connection.features.related_fields_match_type and isinstance(rel_field, BigIntegerField))): # because it continues here in the django code: # return IntegerField().db_type() # thereby fixing any AutoField as IntegerField return BigIntegerField().db_type() return rel_field.db_type()
[ [ 8, 0, 0.2158, 0.0842, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.2737, 0.0105, 0, 0.66, 0.1, 913, 0, 1, 0, 0, 913, 0, 0 ], [ 1, 0, 0.2842, 0.0105, 0, 0.66, ...
[ "\"\"\"module mydjangolib.bigint_patch\n\nA fix for the rather well-known ticket #399 in the django project.\n\nCreate and link to auto-incrementing primary keys of type bigint without\nhaving to reload the model instance after saving it to get the ID set in\nthe instance.\n\"\"\"", "from django.core import excep...
#==================================================================== # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ==================================================================== # # This software consists of voluntary contributions made by many # individuals on behalf of the Apache Software Foundation. For more # information on the Apache Software Foundation, please see # <http://www.apache.org/>. # import os import re import tempfile import shutil ignore_pattern = re.compile('^(.svn|target|bin|classes)') java_pattern = re.compile('^.*\.java') annot_pattern = re.compile('import org\.apache\.http\.annotation\.') def process_dir(dir): files = os.listdir(dir) for file in files: f = os.path.join(dir, file) if os.path.isdir(f): if not ignore_pattern.match(file): process_dir(f) else: if java_pattern.match(file): process_source(f) def process_source(filename): tmp = tempfile.mkstemp() tmpfd = tmp[0] tmpfile = tmp[1] try: changed = False dst = os.fdopen(tmpfd, 'w') try: src = open(filename) try: for line in src: if annot_pattern.match(line): changed = True line = line.replace('import org.apache.http.annotation.', 'import net.jcip.annotations.') dst.write(line) finally: src.close() finally: dst.close(); if changed: shutil.move(tmpfile, filename) else: os.remove(tmpfile) except: os.remove(tmpfile) process_dir('.')
[ [ 1, 0, 0.3514, 0.0135, 0, 0.66, 0, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.3649, 0.0135, 0, 0.66, 0.1111, 540, 0, 1, 0, 0, 540, 0, 0 ], [ 1, 0, 0.3784, 0.0135, 0, ...
[ "import os", "import re", "import tempfile", "import shutil", "ignore_pattern = re.compile('^(.svn|target|bin|classes)')", "java_pattern = re.compile('^.*\\.java')", "annot_pattern = re.compile('import org\\.apache\\.http\\.annotation\\.')", "def process_dir(dir):\n files = os.listdir(dir)\n for...
import numpy as np import householder as hh # fonction auxiliaire : def completeMat(A,n): "fonction qui sert a completer la matrice A avec des '1' sur l'element diagonale des lignes/colonnes deja traitees" #cette fonction ne modifie pas A si taille(A)=n Id=np.identity(n) i=A.shape[0] for j in np.arange(i): for k in np.arange(i): Id[j+n-i,k+n-i]=A[j,k] return Id # fonction principale : def bidiagTransform(A, test = False): "fonction qui transforme la matrice carree A sous la forme bidiagonale ainsi que les changements de base a gauche et a droite" BD=A n=A.shape[0] Id=np.identity(n) Qleft=Id Qright=Id for i in np.arange(n-1): V1=np.squeeze(np.asarray(BD[i:n,i])) #colonne E1=np.linalg.norm(V1)*Id[i,i:n] #vecteur unitaire de norme=norme(V1) Q1=hh.householderOf(V1,E1) Q1=completeMat(Q1,n) Qleft=np.dot(Qleft,Q1) BD=np.dot(Q1,BD) if (i!=n-2): V2=np.squeeze(np.asarray(BD[i,(i+1):n])) #ligne E2=np.linalg.norm(V2)*Id[i,i:n-1] #vecteur unitaire de norme=norme(V2) Q2=hh.householderOf(V2,E2) Q2= completeMat(Q2,n) Qright=np.dot(Q2,Qright) BD=np.dot(BD,Q2) if(test): print "la matrice temoin A-Qleft*BD*Qright= \n", np.around(A-np.dot(Qleft,np.dot(BD,Qright)),3) return (Qleft,BD,Qright) #complexite: O(n^4) en supposant que la complexite de np.dot() est de n^3.
[ [ 1, 0, 0.0213, 0.0213, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0426, 0.0213, 0, 0.66, 0.3333, 583, 0, 1, 0, 0, 583, 0, 0 ], [ 2, 0, 0.2234, 0.2128, 0, ...
[ "import numpy as np", "import householder as hh", "def completeMat(A,n):\n \"fonction qui sert a completer la matrice A avec des '1' sur l'element diagonale des lignes/colonnes deja traitees\"\n #cette fonction ne modifie pas A si taille(A)=n\n \n Id=np.identity(n)\n i=A.shape[0]\n for j in np...
# Partie I : Transformations de Householder import numpy as np from numpy import linalg as la # Question I def computeUtU(U): """Calcule le produit d'un vecteur par sa transposee""" n = len(U) RES = np.zeros([n,n]) for i in range(n): for j in range(n): RES[i,j] = U[i]*U[j] return RES def householderOf(X, Y): """Genere la matrice de Householder associee a X et Y""" V = X-Y # X-Y if (la.norm(V)!=0): # U vaut : ------- U = V/la.norm(V) # ||X-Y|| else: U=X-Y return np.eye(len(X), dtype = int) - 2*computeUtU(U) # Question II def generateU(H): """Calcule le vecteur U a partir du vecteur H""" n = len(H) U = np.zeros(n) T = -(H - np.eye(n))/2 U[0] = np.sqrt(T[0, 0]) for i in np.arange(1,n): U[i] = T[0, i]/U[0] return U def productHX(H, X): """Calcule le produit H.X""" U = generateU(H) RES = np.dot(np.transpose(U),X) return X - 2*U*RES def productHXgen(H, M): RES = np.zeros([len(H), len(H)]) for i in range(len(M)): RES[i] = productHX(H, M[i]) return RES
[ [ 1, 0, 0.0577, 0.0192, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0769, 0.0192, 0, 0.66, 0.1667, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 2, 0, 0.2212, 0.1538, 0, ...
[ "import numpy as np", "from numpy import linalg as la", "def computeUtU(U):\n \"\"\"Calcule le produit d'un vecteur par sa transposee\"\"\"\n n = len(U)\n RES = np.zeros([n,n])\n for i in range(n):\n for j in range(n):\n RES[i,j] = U[i]*U[j]\n return RES", " \"\"\"Calcule l...
# Fichier pour tester les differentes parties du projet import householder as hh import bidiagonale as bd import decompositionSVD as svd import compression as comp import numpy as np import matplotlib.pyplot as mp print "#############################################" print "# #" print "# Partie I : Transformations de Householder #" print "# #" print "#############################################" print "Remarque:" print "Dans toute la suite des tests nous utiliserons des affichages arrondis" print "a 2 chiffres apres la virgule pour plus de lisibilite ." Y_test = np.array([0, 0, 5]) X_test = np.array([3, 4, 0]) print "\n" print "Question 1" print "\n" print "On pose:" print "X =", X_test print "Y =", Y_test print "\n" print "la matrice de householder qui envoie X sur Y est:" print "H =\n", np.around(hh.householderOf(X_test, Y_test),2) print "On obtient bien la matrice indiquee dans le sujet" print "\n" print "Question 2" print "\n" print "X =", X_test print "H = \n", np.around(hh.householderOf(X_test, Y_test),2) print "Produit de H par X" print "H.X = \n", np.around(hh.productHX(hh.householderOf(X_test, Y_test), X_test),2) print "On obtient bien le vecteur Y" print "\n" print "#############################################" print "# #" print "# Partie II : Mise sous forme bidiagonale #" print "# #" print "#############################################" A=np.mat('1 2 1 4 5; 2 1 1 3 1 ;1 2 5 1 2 ; 5 1 4 8 6 ; 3 10 4 8 9') print "Matrice a bidiagolnaliser" print "A=\n",A print "Deroulement de l'exectution de bidiagTransform:" print "A chaque tour de boucle on a:" (ql,bd,qr)=bd.bidiagTransform(A,True) print "finalement on obtient" print "Qleft :" print np.around(ql,2) print "--" print "Matrice bidiagonnale BD:" print np.around(bd,2) print "--" print "Qright :" print np.around(qr,2) print "Qleft*BD*Qright=" print np.around(ql*bd*qr,2) print "A=\n",A print "On a bien A=Qleft*BD*Qright" print "\n" print "#############################################" print "# #" print "# Partie III : Transformations QR et SVD #" print "# #" print "#############################################" print "Question 1 et 2:" print "Decomposition SVD en utilisant linalg.qr" M = np.mat('-1 -2 0 0 0;0 -5 6 0 0;0 0 15 -2 0 ; 0 0 0 10 -9') print"M=\n",M U, S, V = svd.linalgSVD(M, 20, True) print "Decomposition SVD de M:" print "U =\n",np.around(U,1),"\nS =\n",np.around(S,1),"\nV =\n",np.around(V,1) print "U*S*V=\n",np.around(U*S*V,1) print "M=\n", M print "On a bien U*S*V=M" print "\n" print "Question 4:" print "Decomposition SVD de M en utilisant 'QRdecompForBD'" print "et 'ajust' qui trie la matrice S tout en modifiant U." U, S, V = svd.decompSVD(M, 20) print "U =\n",np.around(U,1),"\nS =\n",np.around(S,1),"\nV =\n",np.around(V,1) print "U*S*V=\n",np.around(U*S*V,1) print "M=\n", np.around(M,1) print "On a bien U*S*V=M" print "\n" print "Question 5:" print "Test de la fonction 'ajust' qui modifie les matrices U de S de tel" print "sorte que les elemnts de S soient positifs, ordonnes de maniere" print "decroissante." print "Soient:" S = np.mat('5 0 0 0 0 ; 0 2 0 0 0 ; 0 0 18 0 0 ; 0 0 0 6 0 ; 0 0 0 0 9') U = np.mat('5 -1 2 -1 0 ; 1 2 8 0 9 ; -2 -8 18 10 -2 ; 20 3 2 6 0 ; 0 2 -8 0 9') print "S=" print S print "U=" print U Ubis, Sbis = svd.ajust(U, S) print "Apres l'execution de 'ajust' sur U et S on obtient:" print "Sbis=" print Sbis print "Ubis=" print Ubis print "U*S :\n",U*S,"\nUbis*Sbis =\n", Ubis*Sbis print "on a bien U*S=Ubis*Sbis" print "\n" print "#####################################################" print "# #" print "# Partie IV : Application a la compression d'image #" print "# #" print "#####################################################" print "Compression de l'image ./earth.png au rang k=30 (par exemple)," print "nMax etant fixe a 30 :" mp.figure(2) img_full = mp.imread("./earth.png") img0=comp.application_img(img_full,30,30) mp.imshow(img0) mp.savefig("earth_30") print "\n" print "Les tests sont maintenant termines." print "Vous pouvez aussi tester la fonction qui calcule l'efficacite." print "Nous avons mis ce test en commentaire car le temps de calcul" print "est assez consequent." #Tests pour l'efficacite en fonction du rang k #mp.figure(3) #img_full = mp.imread("../doc/earth.png") #comp.efficiency(img_full,20)
[ [ 1, 0, 0.0267, 0.0067, 0, 0.66, 0, 583, 0, 1, 0, 0, 583, 0, 0 ], [ 1, 0, 0.0333, 0.0067, 0, 0.66, 0.0083, 885, 0, 1, 0, 0, 885, 0, 0 ], [ 1, 0, 0.04, 0.0067, 0, 0....
[ "import householder as hh", "import bidiagonale as bd", "import decompositionSVD as svd", "import compression as comp", "import numpy as np", "import matplotlib.pyplot as mp", "print(\"#############################################\")", "print(\"# #\")", "pri...
# Partie III : Transformations QR et decompostion SVD import numpy as np import householder as hh import matplotlib.pyplot as plt # Question I,II def externDiagNorme(A): """Affiche la norme-2 hors diagonale de la matrice""" n = len(A) p = len(A[0]) res = 0. for i in range(n): for j in range(n): if (i != j): res += A[i,j]**2 return np.sqrt(res) def linalgSVD(BD, nMax, plot = False): """Forme la decomposition SVD de la matrice bidiagonale BD en utilisant linalg.qr""" (n,m) = np.shape(BD) U = np.identity(n) V = np.identity(m) S = np.matrix(BD) tab = [] # va contenir les valeur pour le plot for i in range(nMax): (Q1,R1) = np.linalg.qr(np.matrix.transpose(S)) (Q2,R2) = np.linalg.qr(np.matrix.transpose(R1)) Q1 = np.matrix(Q1) Q2 = np.matrix(Q2) R1 = np.matrix(R1) tab.append(externDiagNorme(R2)) S = np.matrix(R2) U = np.matrix(U*Q2) V = np.matrix(np.matrix.transpose(Q1)*V) #print "V :\n", V #print "S=\n",np.around(S,1), "\n" if(plot): plt.xlabel("nMax") plt.ylabel("norme hors diagonale") plt.title("convergence de la matrice R vers une matrice diagonale") plt.plot(range(nMax), tab) plt.show() plt.savefig("figureQR.png") U,S=ajust(U,S) return U, S, V #complexite: O(nMax*max(complexite(linalg.qr),n^3) # Question IV #fonction auxiliaire def insertMat(A,i,n): """insert la matrice A dans la diagonale de la matrice identite de taille n a la i eme position""" Id=np.identity(n) h=A.shape[0] for j in np.arange(h): for k in np.arange(h): Id[j+i,k+i]=A[j,k] return Id def QRdecompForBD(BD): ### Forme la decomposition QR de la matrice bidiagonale inferieure BD ### R=np.mat(BD,dtype=float) n,m=BD.shape mini=min(n,m) I=np.identity(2) Q=np.identity(n) e=np.array([1,0]) for i in np.arange(mini-1*(n<=m)): Mat=R[i:(i+2),i:(i+2)] Vect=np.squeeze(np.asarray(Mat[0:2,0])) #transformer la colonne en array Q1=hh.householderOf(Vect,np.linalg.norm(Vect)*e) I=np.dot(Q1,Mat) R[i:(i+2),i:(i+2)]=I Q1=insertMat(Q1,i,n) Q=np.dot(Q,Q1) if(n>m): #redimentionnement de Q et R si la matrice initial n'est pas carree for i in np.arange(n-m): Q=np.delete(Q,n-i-1,1) R=np.delete(R,n-i-1,0) return (Q,R) #compelexite: O(min(n,m)*n^3) def decompSVD(BD, nMax): """Forme la decomposition SVD de la matrice bidiagonale BD en utilisant QRdecompForBD""" n,m=BD.shape U=np.asmatrix(np.eye(n)) V=np.asmatrix(np.eye(m)) S=BD for i in np.arange(nMax): (Q1,R1)=QRdecompForBD ( np.transpose(S) ) (Q2,R2)=QRdecompForBD ( np.transpose(R1)) S=R2 U=U*Q2 V= np.transpose(Q1)*V U,S=ajust(U,S) #fonction de la question 5 return U, S, V #compelexite: O(nMax*min(n,m)*n^3) # Question V def ajust(U,S): """Modifier les matrices U,S de la decomposition SVD afin que les elements de la matrice S soient positifs, ordonnes de maniere decroissante""" n=S.shape[0] m=S.shape[1] S_diag=np.diag(S) S_trie=sorted(np.abs(S_diag), reverse=True) S_diag=S_diag/S_trie for i in np.arange(min(n,m)): S[i,i]=S_trie[i] U[:,i]=U[:,i]*S_diag[i] return U,S #complexite: O(n^2)
[ [ 1, 0, 0.0238, 0.0079, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0317, 0.0079, 0, 0.66, 0.125, 583, 0, 1, 0, 0, 583, 0, 0 ], [ 1, 0, 0.0397, 0.0079, 0, 0...
[ "import numpy as np", "import householder as hh", "import matplotlib.pyplot as plt", "def externDiagNorme(A):\n \"\"\"Affiche la norme-2 hors diagonale de la matrice\"\"\"\n n = len(A)\n p = len(A[0])\n res = 0.\n for i in range(n):\n for j in range(n):\n if (i != j):", " ...
# Partie 4: Application a la compression d'image import numpy as np import bidiagonale as bd import decompositionSVD as svd import matplotlib.pyplot as mp #Question 1 def clean(compressed_img): """mise zero tous les elements inferieurs a valeur e""" e=pow(10,-5) img=compressed_img n=img.shape[0] for i in np.arange(n): for j in np.arange(n): if ( img[i,j,0]<e): img[i,j,0]=0 if ( img[i,j,1]<e): img[i,j,0]=0 if ( img[i,j,2]<e): img[i,j,2]=0 return img def compression(A, nMax, k): (Ql,BD,Qr)=bd.bidiagTransform(A) print " bidiagonalisation: ok" (U, S, V) = svd.decompSVD(BD,nMax) print " decomposition SVD: ok" U = U[:,:k]; S = S[:k,:k]; V = V[:k,:] print " compression: ok" return (np.dot(Ql,U), np.dot(np.dot(S, V), Qr)) def application_img(img_full,nMax,k): """compresser l'image au rang k avec nMax fois transformations QR""" img_k=img_full print "etape 0: extraction des composantes RGB de l'image ..." R=np.asmatrix(img_k[:,:,0]) G=np.asmatrix(img_k[:,:,1]) B=np.asmatrix(img_k[:,:,2]) print "etape 1: compression" print " R ..." (R_Ql, R_Qr) = compression(R,nMax,k) print " G ..." (G_Ql, G_Qr) = compression(G,nMax,k) print " B ..." (B_Ql, B_Qr) = compression(B,nMax,k) print "etape 2: reconstruction de l'image" R = np.dot(R_Ql,R_Qr) G = np.dot(G_Ql,G_Qr) B = np.dot(B_Ql,B_Qr) img_k[:,:,0]=R img_k[:,:,1]=G img_k[:,:,2]=B print "etape 3: nettoyage des couleurs indesirables" img_k=clean(img_k) return img_k #Question 3 def distance(img_full,img_compressed): "calcule la <<distance>> entre l'image reelle et l'image compressee" norm=0 n=img_full.shape[0] for i in np.arange(n): for j in np.arange(n): for k in np.arange(3): norm += pow(img_full[i,j,k]-img_compressed[i,j,k],2) return norm def efficiency(img_full,k): "Fonction qui trace l'efficacite de la compression en fonction du rang k" tab=[] for i in np.arange(k): img0=np.copy(img_full) tab.append(distance(img_full,application_img(img0,20,i))) mp.xlabel("k") mp.ylabel("Distance entre l'image reelle et l'image k-compressee") mp.title("Efficacite de la compression en fonction du rang k") mp.plot(range(k),tab) mp.show() mp.savefig("efficacite")
[ [ 1, 0, 0.0385, 0.0096, 0, 0.66, 0, 954, 0, 1, 0, 0, 954, 0, 0 ], [ 1, 0, 0.0481, 0.0096, 0, 0.66, 0.125, 885, 0, 1, 0, 0, 885, 0, 0 ], [ 1, 0, 0.0577, 0.0096, 0, 0...
[ "import numpy as np", "import bidiagonale as bd", "import decompositionSVD as svd", "import matplotlib.pyplot as mp", "def clean(compressed_img):\n\t\"\"\"mise zero tous les elements inferieurs a valeur e\"\"\"\n\te=pow(10,-5) \n\timg=compressed_img\n\tn=img.shape[0]\n\t\n\tfor i in np.arange(n):\n\t\tfor j...
from google.appengine.api import urlfetch from django.utils import simplejson as json class Mendeley: '''Mendeley open client for algos''' REQUEST_TOKEN_URL = 'http://www.mendeley.com/oauth/request_token/' ACCESS_TOKEN_URL = 'http://www.mendeley.com/oauth/access_token/' AUTHORIZATION_URL = 'http://www.mendeley.com/oauth/authorize/' CONSUMER_KEY = "consumer_key=291612bf38c356c144e6c2d146125cba04d8c4a71" CONSUMER_SECRET = "614e9d5d29f5c505aadde5223ba7ed94" GROUP_ID = '981951' PUBLIC_GROUP_METHODS = { "details" : 'http://api.mendeley.com/oapi/documents/groups/' + GROUP_ID , "documents" : 'http://api.mendeley.com/oapi/documents/groups/' + GROUP_ID + '/docs/' } def getGroupDocuments (self): ''' JSON object with the following structure: 'current_page': int. 'documents': array of documents. Each document is a JSON object with the following fields (if available): 'abstract', 'authors' (array), 'editors' (array), 'id', 'issue', 'keywords' (array), 'pages', 'publisher', 'tags' (array), 'title', 'type', 'url', 'uuid' (canonical id), 'volume', 'year'. 'items_per_page': number of documents returned per page. 'total_pages': number of total pages 'total_results': total documents in the group. ''' result = None url = self.PUBLIC_GROUP_METHODS['documents'] + '?details=true&' + self.CONSUMER_KEY response = urlfetch.fetch(url) self.last_status_code = response.status_code if response.status_code == 200: result = json.loads(response.content); return result; def processGroupDocumentsToExhibitJson(self): s = {} content = self.getGroupDocuments(); if content: docs_list = content['documents'] s = {"items" : map(lambda item: item.update({"label" : item.get('id')}) or item , docs_list)} return json.dumps(s) def __init__(self): self.last_status_code = None;
[ [ 1, 0, 0.02, 0.02, 0, 0.66, 0, 279, 0, 1, 0, 0, 279, 0, 0 ], [ 1, 0, 0.04, 0.02, 0, 0.66, 0.5, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 3, 0, 0.54, 0.94, 0, 0.66, 1, ...
[ "from google.appengine.api import urlfetch", "from django.utils import simplejson as json", "class Mendeley:\n '''Mendeley open client for algos'''\n \n REQUEST_TOKEN_URL = 'http://www.mendeley.com/oauth/request_token/'\n ACCESS_TOKEN_URL = 'http://www.mendeley.com/oauth/access_token/'\n AUTHORI...
""" The MIT License Copyright (c) 2007 Leah Culver Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import cgi import urllib import time import random import urlparse import hmac import binascii VERSION = '1.0' # Hi Blaine! HTTP_METHOD = 'GET' SIGNATURE_METHOD = 'PLAINTEXT' class OAuthError(RuntimeError): """Generic exception class.""" def __init__(self, message='OAuth error occured.'): self.message = message def build_authenticate_header(realm=''): """Optional WWW-Authenticate header (401 error)""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} def escape(s): """Escape a URL including any /.""" return urllib.quote(s, safe='~') def _utf8_str(s): """Convert unicode to utf-8.""" if isinstance(s, unicode): return s.encode("utf-8") else: return str(s) def generate_timestamp(): """Get seconds since epoch (UTC).""" return int(time.time()) def generate_nonce(length=8): """Generate pseudorandom number.""" return ''.join([str(random.randint(0, 9)) for i in range(length)]) def generate_verifier(length=8): """Generate pseudorandom number.""" return ''.join([str(random.randint(0, 9)) for i in range(length)]) class OAuthConsumer(object): """Consumer of OAuth authentication. OAuthConsumer is a data type that represents the identity of the Consumer via its shared secret with the Service Provider. """ key = None secret = None def __init__(self, key, secret): self.key = key self.secret = secret class OAuthToken(object): """OAuthToken is a data type that represents an End User via either an access or request token. key -- the token secret -- the token secret """ key = None secret = None callback = None callback_confirmed = None verifier = None def __init__(self, key, secret): self.key = key self.secret = secret def set_callback(self, callback): self.callback = callback self.callback_confirmed = 'true' def set_verifier(self, verifier=None): if verifier is not None: self.verifier = verifier else: self.verifier = generate_verifier() def get_callback_url(self): if self.callback and self.verifier: # Append the oauth_verifier. parts = urlparse.urlparse(self.callback) scheme, netloc, path, params, query, fragment = parts[:6] if query: query = '%s&oauth_verifier=%s' % (query, self.verifier) else: query = 'oauth_verifier=%s' % self.verifier return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) return self.callback def to_string(self): data = { 'oauth_token': self.key, 'oauth_token_secret': self.secret, } if self.callback_confirmed is not None: data['oauth_callback_confirmed'] = self.callback_confirmed return urllib.urlencode(data) def from_string(s): """ Returns a token from something like: oauth_token_secret=xxx&oauth_token=xxx """ params = cgi.parse_qs(s, keep_blank_values=False) key = params['oauth_token'][0] secret = params['oauth_token_secret'][0] token = OAuthToken(key, secret) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except KeyError: pass # 1.0, no callback confirmed. return token from_string = staticmethod(from_string) def __str__(self): return self.to_string() class OAuthRequest(object): """OAuthRequest represents the request and can be serialized. OAuth parameters: - oauth_consumer_key - oauth_token - oauth_signature_method - oauth_signature - oauth_timestamp - oauth_nonce - oauth_version - oauth_verifier ... any additional parameters, as defined by the Service Provider. """ parameters = None # OAuth parameters. http_method = HTTP_METHOD http_url = None version = VERSION def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None): self.http_method = http_method self.http_url = http_url self.parameters = parameters or {} def set_parameter(self, parameter, value): self.parameters[parameter] = value def get_parameter(self, parameter): try: return self.parameters[parameter] except: raise OAuthError('Parameter not found: %s' % parameter) def _get_timestamp_nonce(self): return self.get_parameter('oauth_timestamp'), self.get_parameter( 'oauth_nonce') def get_nonoauth_parameters(self): """Get any non-OAuth parameters.""" parameters = {} for k, v in self.parameters.iteritems(): # Ignore oauth parameters. if k.find('oauth_') < 0: parameters[k] = v return parameters def to_header(self, realm=''): """Serialize as a header for an HTTPAuth request.""" auth_header = 'OAuth realm="%s"' % realm # Add the oauth parameters. if self.parameters: for k, v in self.parameters.iteritems(): if k[:6] == 'oauth_': auth_header += ', %s="%s"' % (k, escape(str(v))) return {'Authorization': auth_header} def to_postdata(self): """Serialize as post data for a POST request.""" return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) \ for k, v in self.parameters.iteritems()]) def to_url(self): """Serialize as a URL for a GET request.""" return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata()) def get_normalized_parameters(self): """Return a string that contains the parameters that must be signed.""" params = self.parameters try: # Exclude the signature if it exists. del params['oauth_signature'] except: pass # Escape key values before sorting. key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) \ for k,v in params.items()] # Sort lexicographically, first after key, then after value. key_values.sort() # Combine key value pairs into a string. return '&'.join(['%s=%s' % (k, v) for k, v in key_values]) def get_normalized_http_method(self): """Uppercases the http method.""" return self.http_method.upper() def get_normalized_http_url(self): """Parses the URL and rebuilds it to be scheme://host/path.""" parts = urlparse.urlparse(self.http_url) scheme, netloc, path = parts[:3] # Exclude default port numbers. if scheme == 'http' and netloc[-3:] == ':80': netloc = netloc[:-3] elif scheme == 'https' and netloc[-4:] == ':443': netloc = netloc[:-4] return '%s://%s%s' % (scheme, netloc, path) def sign_request(self, signature_method, consumer, token): """Set the signature parameter to the result of build_signature.""" # Set the signature method. self.set_parameter('oauth_signature_method', signature_method.get_name()) # Set the signature. self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token)) def build_signature(self, signature_method, consumer, token): """Calls the build signature method within the signature method.""" return signature_method.build_signature(self, consumer, token) def from_request(http_method, http_url, headers=None, parameters=None, query_string=None): """Combines multiple parameter sources.""" if parameters is None: parameters = {} # Headers if headers and 'Authorization' in headers: auth_header = headers['Authorization'] # Check that the authorization header is OAuth. if auth_header[:6] == 'OAuth ': auth_header = auth_header[6:] try: # Get the parameters from the header. header_params = OAuthRequest._split_header(auth_header) parameters.update(header_params) except: raise OAuthError('Unable to parse OAuth parameters from ' 'Authorization header.') # GET or POST query string. if query_string: query_params = OAuthRequest._split_url_string(query_string) parameters.update(query_params) # URL parameters. param_str = urlparse.urlparse(http_url)[4] # query url_params = OAuthRequest._split_url_string(param_str) parameters.update(url_params) if parameters: return OAuthRequest(http_method, http_url, parameters) return None from_request = staticmethod(from_request) def from_consumer_and_token(oauth_consumer, token=None, callback=None, verifier=None, http_method=HTTP_METHOD, http_url=None, parameters=None): if not parameters: parameters = {} defaults = { 'oauth_consumer_key': oauth_consumer.key, 'oauth_timestamp': generate_timestamp(), 'oauth_nonce': generate_nonce(), 'oauth_version': OAuthRequest.version, } defaults.update(parameters) parameters = defaults if token: parameters['oauth_token'] = token.key if token.callback: parameters['oauth_callback'] = token.callback # 1.0a support for verifier. if verifier: parameters['oauth_verifier'] = verifier elif callback: # 1.0a support for callback in the request token request. parameters['oauth_callback'] = callback return OAuthRequest(http_method, http_url, parameters) from_consumer_and_token = staticmethod(from_consumer_and_token) def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None): if not parameters: parameters = {} parameters['oauth_token'] = token.key if callback: parameters['oauth_callback'] = callback return OAuthRequest(http_method, http_url, parameters) from_token_and_callback = staticmethod(from_token_and_callback) def _split_header(header): """Turn Authorization: header into parameters.""" params = {} parts = header.split(',') for param in parts: # Ignore realm parameter. if param.find('realm') > -1: continue # Remove whitespace. param = param.strip() # Split key-value. param_parts = param.split('=', 1) # Remove quotes and unescape the value. params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) return params _split_header = staticmethod(_split_header) def _split_url_string(param_str): """Turn URL string into parameters.""" parameters = cgi.parse_qs(param_str, keep_blank_values=False) for k, v in parameters.iteritems(): parameters[k] = urllib.unquote(v[0]) return parameters _split_url_string = staticmethod(_split_url_string) class OAuthServer(object): """A worker to check the validity of a request against a data store.""" timestamp_threshold = 300 # In seconds, five minutes. version = VERSION signature_methods = None data_store = None def __init__(self, data_store=None, signature_methods=None): self.data_store = data_store self.signature_methods = signature_methods or {} def set_data_store(self, data_store): self.data_store = data_store def get_data_store(self): return self.data_store def add_signature_method(self, signature_method): self.signature_methods[signature_method.get_name()] = signature_method return self.signature_methods def fetch_request_token(self, oauth_request): """Processes a request_token request and returns the request token on success. """ try: # Get the request token for authorization. token = self._get_token(oauth_request, 'request') except OAuthError: # No token required for the initial token request. version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: callback = self.get_callback(oauth_request) except OAuthError: callback = None # 1.0, no callback specified. self._check_signature(oauth_request, consumer, None) # Fetch a new token. token = self.data_store.fetch_request_token(consumer, callback) return token def fetch_access_token(self, oauth_request): """Processes an access_token request and returns the access token on success. """ version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: verifier = self._get_verifier(oauth_request) except OAuthError: verifier = None # Get the request token. token = self._get_token(oauth_request, 'request') self._check_signature(oauth_request, consumer, token) new_token = self.data_store.fetch_access_token(consumer, token, verifier) return new_token def verify_request(self, oauth_request): """Verifies an api call and checks all the parameters.""" # -> consumer and token version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) # Get the access token. token = self._get_token(oauth_request, 'access') self._check_signature(oauth_request, consumer, token) parameters = oauth_request.get_nonoauth_parameters() return consumer, token, parameters def authorize_token(self, token, user): """Authorize a request token.""" return self.data_store.authorize_request_token(token, user) def get_callback(self, oauth_request): """Get the callback URL.""" return oauth_request.get_parameter('oauth_callback') def build_authenticate_header(self, realm=''): """Optional support for the authenticate header.""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} def _get_version(self, oauth_request): """Verify the correct version request for this server.""" try: version = oauth_request.get_parameter('oauth_version') except: version = VERSION if version and version != self.version: raise OAuthError('OAuth version %s not supported.' % str(version)) return version def _get_signature_method(self, oauth_request): """Figure out the signature with some defaults.""" try: signature_method = oauth_request.get_parameter( 'oauth_signature_method') except: signature_method = SIGNATURE_METHOD try: # Get the signature method object. signature_method = self.signature_methods[signature_method] except: signature_method_names = ', '.join(self.signature_methods.keys()) raise OAuthError('Signature method %s not supported try one of the ' 'following: %s' % (signature_method, signature_method_names)) return signature_method def _get_consumer(self, oauth_request): consumer_key = oauth_request.get_parameter('oauth_consumer_key') consumer = self.data_store.lookup_consumer(consumer_key) if not consumer: raise OAuthError('Invalid consumer.') return consumer def _get_token(self, oauth_request, token_type='access'): """Try to find the token for the provided request token key.""" token_field = oauth_request.get_parameter('oauth_token') token = self.data_store.lookup_token(token_type, token_field) if not token: raise OAuthError('Invalid %s token: %s' % (token_type, token_field)) return token def _get_verifier(self, oauth_request): return oauth_request.get_parameter('oauth_verifier') def _check_signature(self, oauth_request, consumer, token): timestamp, nonce = oauth_request._get_timestamp_nonce() self._check_timestamp(timestamp) self._check_nonce(consumer, token, nonce) signature_method = self._get_signature_method(oauth_request) try: signature = oauth_request.get_parameter('oauth_signature') except: raise OAuthError('Missing signature.') # Validate the signature. valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature) if not valid_sig: key, base = signature_method.build_signature_base_string( oauth_request, consumer, token) raise OAuthError('Invalid signature. Expected signature base ' 'string: %s' % base) built = signature_method.build_signature(oauth_request, consumer, token) def _check_timestamp(self, timestamp): """Verify that timestamp is recentish.""" timestamp = int(timestamp) now = int(time.time()) lapsed = abs(now - timestamp) if lapsed > self.timestamp_threshold: raise OAuthError('Expired timestamp: given %d and now %s has a ' 'greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold)) def _check_nonce(self, consumer, token, nonce): """Verify that the nonce is uniqueish.""" nonce = self.data_store.lookup_nonce(consumer, token, nonce) if nonce: raise OAuthError('Nonce already used: %s' % str(nonce)) class OAuthClient(object): """OAuthClient is a worker to attempt to execute a request.""" consumer = None token = None def __init__(self, oauth_consumer, oauth_token): self.consumer = oauth_consumer self.token = oauth_token def get_consumer(self): return self.consumer def get_token(self): return self.token def fetch_request_token(self, oauth_request): """-> OAuthToken.""" raise NotImplementedError def fetch_access_token(self, oauth_request): """-> OAuthToken.""" raise NotImplementedError def access_resource(self, oauth_request): """-> Some protected resource.""" raise NotImplementedError class OAuthDataStore(object): """A database abstraction used to lookup consumers and tokens.""" def lookup_consumer(self, key): """-> OAuthConsumer.""" raise NotImplementedError def lookup_token(self, oauth_consumer, token_type, token_token): """-> OAuthToken.""" raise NotImplementedError def lookup_nonce(self, oauth_consumer, oauth_token, nonce): """-> OAuthToken.""" raise NotImplementedError def fetch_request_token(self, oauth_consumer, oauth_callback): """-> OAuthToken.""" raise NotImplementedError def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier): """-> OAuthToken.""" raise NotImplementedError def authorize_request_token(self, oauth_token, user): """-> OAuthToken.""" raise NotImplementedError class OAuthSignatureMethod(object): """A strategy class that implements a signature method.""" def get_name(self): """-> str.""" raise NotImplementedError def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token): """-> str key, str raw.""" raise NotImplementedError def build_signature(self, oauth_request, oauth_consumer, oauth_token): """-> str.""" raise NotImplementedError def check_signature(self, oauth_request, consumer, token, signature): built = self.build_signature(oauth_request, consumer, token) return built == signature class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod): def get_name(self): return 'HMAC-SHA1' def build_signature_base_string(self, oauth_request, consumer, token): sig = ( escape(oauth_request.get_normalized_http_method()), escape(oauth_request.get_normalized_http_url()), escape(oauth_request.get_normalized_parameters()), ) key = '%s&' % escape(consumer.secret) if token: key += escape(token.secret) raw = '&'.join(sig) return key, raw def build_signature(self, oauth_request, consumer, token): """Builds the base signature string.""" key, raw = self.build_signature_base_string(oauth_request, consumer, token) # HMAC object. try: import hashlib # 2.5 hashed = hmac.new(key, raw, hashlib.sha1) except: import sha # Deprecated hashed = hmac.new(key, raw, sha) # Calculate the digest base 64. return binascii.b2a_base64(hashed.digest())[:-1] class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod): def get_name(self): return 'PLAINTEXT' def build_signature_base_string(self, oauth_request, consumer, token): """Concatenates the consumer key and secret.""" sig = '%s&' % escape(consumer.secret) if token: sig = sig + escape(token.secret) return sig, sig def build_signature(self, oauth_request, consumer, token): key, raw = self.build_signature_base_string(oauth_request, consumer, token) return key
[ [ 8, 0, 0.0183, 0.0351, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.0382, 0.0015, 0, 0.66, 0.0385, 934, 0, 1, 0, 0, 934, 0, 0 ], [ 1, 0, 0.0397, 0.0015, 0, 0.66...
[ "\"\"\"\nThe MIT License\n\nCopyright (c) 2007 Leah Culver\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights", "import cgi", ...
from django.conf.urls.defaults import * urlpatterns = patterns('articles.views', (r'^$', 'index'), (r'^articles.js$', 'get_articles_json') )
[ [ 1, 0, 0.1667, 0.1667, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.75, 0.6667, 0, 0.66, 1, 990, 3, 3, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('articles.views',\n (r'^$', 'index'),\n (r'^articles.js$', 'get_articles_json')\n)" ]
from django.core.cache import cache from django import http from django.contrib.auth.forms import UserCreationForm from django.http import HttpResponseRedirect from django.views.generic.simple import direct_to_template from articles.Mendeley import Mendeley def get_articles_json(request): mendeley = Mendeley(); exhibit_data = mendeley.processGroupDocumentsToExhibitJson() return http.HttpResponse(exhibit_data, content_type='application/json') def index(request): title_page = 'Articles' return direct_to_template(request, 'articles/index.html', {'title_page' : title_page} )
[ [ 1, 0, 0.0667, 0.0667, 0, 0.66, 0, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.1333, 0.0667, 0, 0.66, 0.1429, 294, 0, 1, 0, 0, 294, 0, 0 ], [ 1, 0, 0.2, 0.0667, 0, 0.6...
[ "from django.core.cache import cache", "from django import http", "from django.contrib.auth.forms import UserCreationForm", "from django.http import HttpResponseRedirect", "from django.views.generic.simple import direct_to_template", "from articles.Mendeley import Mendeley", "def get_articles_json(reque...
from django.conf.urls.defaults import * urlpatterns = patterns('massmedia.views', (r'^$', 'index') )
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.8, 0.6, 0, 0.66, 1, 990, 3, 2, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('massmedia.views',\n (r'^$', 'index')\n)" ]
# -*- encoding: utf-8 -*- from django.core.cache import cache from django.contrib.auth.forms import UserCreationForm from django.views.generic.simple import direct_to_template from django.http import HttpResponseRedirect from django.views.generic.simple import direct_to_template def index(request): title_page = "Mitjans de comunicació" return direct_to_template(request, 'massmedia/index.html', {'title_page' : title_page} )
[ [ 1, 0, 0.2, 0.1, 0, 0.66, 0, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.3, 0.1, 0, 0.66, 0.2, 579, 0, 1, 0, 0, 579, 0, 0 ], [ 1, 0, 0.4, 0.1, 0, 0.66, 0.4, 95...
[ "from django.core.cache import cache", "from django.contrib.auth.forms import UserCreationForm", "from django.views.generic.simple import direct_to_template", "from django.http import HttpResponseRedirect", "from django.views.generic.simple import direct_to_template", "def index(request):\n title_page ...
#!/usr/bin/env python from django.core.management import execute_manager try: import settings # Assumed to be in the same directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) sys.exit(1) if __name__ == "__main__": execute_manager(settings)
[ [ 1, 0, 0.1818, 0.0909, 0, 0.66, 0, 879, 0, 1, 0, 0, 879, 0, 0 ], [ 7, 0, 0.5, 0.5455, 0, 0.66, 0.5, 0, 0, 1, 0, 0, 0, 0, 2 ], [ 1, 1, 0.3636, 0.0909, 1, 0.99, ...
[ "from django.core.management import execute_manager", "try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj...
from django.conf.urls.defaults import * urlpatterns = patterns('tractament.views', (r'^$', 'index'), )
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.8, 0.6, 0, 0.66, 1, 990, 3, 2, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('tractament.views',\n (r'^$', 'index'),\n)" ]
from django.views.generic.simple import direct_to_template from contact.contact_form import ContactForm def index(request): title_page = 'Tractament' if request.method == 'GET': # If the form has been submitted... form = ContactForm({'subject' : '[web algos] Formulari de tractament'}) # An unbound form return direct_to_template(request, 'tractament/index.html' , {'title_page' : title_page , 'form' : form, 'form_subject': '[web algos] Formulari de tractament'} )
[ [ 1, 0, 0.0769, 0.0769, 0, 0.66, 0, 956, 0, 1, 0, 0, 956, 0, 0 ], [ 1, 0, 0.1538, 0.0769, 0, 0.66, 0.5, 151, 0, 1, 0, 0, 151, 0, 0 ], [ 2, 0, 0.6538, 0.7692, 0, 0.6...
[ "from django.views.generic.simple import direct_to_template", "from contact.contact_form import ContactForm", "def index(request):\n title_page = 'Tractament'\n \n if request.method == 'GET': # If the form has been submitted...\n form = ContactForm({'subject' : '[web algos] Formulari de tractame...
import os try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.conf import settings from django.core.files.base import File from django.core.files.storage import Storage from django.core.files.uploadedfile import UploadedFile from django.core.files.uploadhandler import FileUploadHandler, \ StopFutureHandlers from django.core.exceptions import ImproperlyConfigured from django.http import HttpResponse from django.utils.encoding import smart_str, force_unicode from google.appengine.ext.blobstore import BlobInfo, BlobKey, delete, \ create_upload_url, BLOB_KEY_HEADER, BLOB_RANGE_HEADER, BlobReader def prepare_upload(request, url, **kwargs): return create_upload_url(url), {} def serve_file(request, file, save_as, content_type, **kwargs): if hasattr(file, 'file') and hasattr(file.file, 'blobstore_info'): blobkey = file.file.blobstore_info.key() elif hasattr(file, 'blobstore_info'): blobkey = file.blobstore_info.key() else: raise ValueError("The provided file can't be served via the " "Google App Engine Blobstore.") response = HttpResponse(content_type=content_type) response[BLOB_KEY_HEADER] = str(blobkey) response['Accept-Ranges'] = 'bytes' http_range = request.META.get('HTTP_RANGE') if http_range is not None: response[BLOB_RANGE_HEADER] = http_range if save_as: response['Content-Disposition'] = smart_str(u'attachment; filename=%s' % save_as) if file.size is not None: response['Content-Length'] = file.size return response class BlobstoreStorage(Storage): """Google App Engine Blobstore storage backend""" def _open(self, name, mode='rb'): return BlobstoreFile(name, mode, self) def _save(self, name, content): name = name.replace('\\', '/') if hasattr(content, 'file') and hasattr(content.file, 'blobstore_info'): data = content.file.blobstore_info elif hasattr(content, 'blobstore_info'): data = content.blobstore_info else: raise ValueError("The App Engine storage backend only supports " "BlobstoreFile instances or File instances " "whose file attribute is a BlobstoreFile.") if isinstance(data, (BlobInfo, BlobKey)): # We change the file name to the BlobKey's str() value if isinstance(data, BlobInfo): data = data.key() return '%s/%s' % (data, name.lstrip('/')) else: raise ValueError("The App Engine Blobstore only supports " "BlobInfo values. Data can't be uploaded " "directly. You have to use the file upload " "handler.") def delete(self, name): delete(self._get_key(name)) def exists(self, name): return self._get_blobinfo(name) is not None def size(self, name): return self._get_blobinfo(name).size def url(self, name): raise NotImplementedError() def get_valid_name(self, name): return force_unicode(name).strip().replace('\\', '/') def get_available_name(self, name): return name.replace('\\', '/') def _get_key(self, name): return BlobKey(name.split('/', 1)[0]) def _get_blobinfo(self, name): return BlobInfo.get(self._get_key(name)) class BlobstoreFile(File): def __init__(self, name, mode, storage): self.name = name self._storage = storage self._mode = mode self.blobstore_info = storage._get_blobinfo(name) @property def size(self): return self.blobstore_info.size def write(self, content): raise NotImplementedError() @property def file(self): if not hasattr(self, '_file'): self._file = BlobReader(self.blobstore_info.key()) return self._file class BlobstoreFileUploadHandler(FileUploadHandler): """ File upload handler for the Google App Engine Blobstore """ def new_file(self, *args, **kwargs): super(BlobstoreFileUploadHandler, self).new_file(*args, **kwargs) blobkey = self.content_type_extra.get('blob-key') self.active = blobkey is not None if self.active: self.blobkey = BlobKey(blobkey) raise StopFutureHandlers() def receive_data_chunk(self, raw_data, start): """ Add the data to the StringIO file. """ if not self.active: return raw_data def file_complete(self, file_size): """ Return a file object if we're activated. """ if not self.active: return return BlobstoreUploadedFile( blobinfo=BlobInfo(self.blobkey), charset=self.charset) class BlobstoreUploadedFile(UploadedFile): """ A file uploaded into memory (i.e. stream-to-memory). """ def __init__(self, blobinfo, charset): super(BlobstoreUploadedFile, self).__init__( BlobReader(blobinfo.key()), blobinfo.filename, blobinfo.content_type, blobinfo.size, charset) self.blobstore_info = blobinfo def open(self, mode=None): pass def chunks(self, chunk_size=1024*128): self.file.seek(0) while True: content = self.read(chunk_size) if not content: break yield content def multiple_chunks(self, chunk_size=1024*128): return True
[ [ 1, 0, 0.0059, 0.0059, 0, 0.66, 0, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 7, 0, 0.0266, 0.0237, 0, 0.66, 0.0625, 0, 0, 1, 0, 0, 0, 0, 0 ], [ 1, 1, 0.0237, 0.0059, 1, 0.95...
[ "import os", "try:\n from cStringIO import StringIO\nexcept ImportError:\n from StringIO import StringIO", " from cStringIO import StringIO", " from StringIO import StringIO", "from django.conf import settings", "from django.core.files.base import File", "from django.core.files.storage impor...
from .testmodels import OrderedModel from django.test import TestCase class OrderTest(TestCase): def create_ordered_model_items(self): pks = [] priorities = [5, 2, 9, 1] for pk, priority in enumerate(priorities): pk += 1 model = OrderedModel(pk=pk, priority=priority) model.save() pks.append(model.pk) return pks, priorities def test_default_order(self): pks, priorities = self.create_ordered_model_items() self.assertEquals([item.priority for item in OrderedModel.objects.all()], sorted(priorities, reverse=True)) def test_override_default_order(self): pks, priorities = self.create_ordered_model_items() self.assertEquals([item.priority for item in OrderedModel.objects.all().order_by('priority')], sorted(priorities)) def test_remove_default_order(self): pks, priorities = self.create_ordered_model_items() self.assertEquals([item.pk for item in OrderedModel.objects.all().order_by()], sorted(pks)) def test_order_with_pk_filter(self): pks, priorities = self.create_ordered_model_items() self.assertEquals([item.priority for item in OrderedModel.objects.filter(pk__in=pks)], sorted(priorities, reverse=True)) # test with id__in self.assertEquals([item.priority for item in OrderedModel.objects.filter(id__in=pks)], sorted(priorities, reverse=True)) # test reverse self.assertEquals([item.priority for item in OrderedModel.objects.filter( pk__in=pks).reverse()], sorted(priorities, reverse=False)) def test_remove_default_order_with_pk_filter(self): pks, priorities = self.create_ordered_model_items() self.assertEquals([item.priority for item in OrderedModel.objects.filter(pk__in=pks).order_by()], priorities) # TODO: test multiple orders
[ [ 1, 0, 0.0175, 0.0175, 0, 0.66, 0, 808, 0, 1, 0, 0, 808, 0, 0 ], [ 1, 0, 0.0351, 0.0175, 0, 0.66, 0.5, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 3, 0, 0.5088, 0.8947, 0, 0.6...
[ "from .testmodels import OrderedModel", "from django.test import TestCase", "class OrderTest(TestCase):\n def create_ordered_model_items(self):\n pks = []\n priorities = [5, 2, 9, 1]\n for pk, priority in enumerate(priorities):\n pk += 1\n model = OrderedModel(pk=pk...
from .testmodels import DecimalModel from django.test import TestCase from decimal import Decimal D = Decimal class DecimalTest(TestCase): DECIMALS = D("12345.6789"), D("5"), D("345.67"), D("45.6"), D("2345.678"), def setUp(self): for d in self.DECIMALS: DecimalModel(decimal=d).save() def test_filter(self): d = DecimalModel.objects.get(decimal=D("5.0")) self.assertTrue(isinstance(d.decimal, Decimal)) self.assertEquals(str(d.decimal), "5.00") d = DecimalModel.objects.get(decimal=D("45.60")) self.assertEquals(str(d.decimal), "45.60") # Filter argument should be converted to Decimal with 2 decimal_places d = DecimalModel.objects.get(decimal="0000345.67333333333333333") self.assertEquals(str(d.decimal), "345.67") def test_order(self): rows = DecimalModel.objects.all().order_by('decimal') values = list(d.decimal for d in rows) self.assertEquals(values, sorted(values))
[ [ 1, 0, 0.0333, 0.0333, 0, 0.66, 0, 808, 0, 1, 0, 0, 808, 0, 0 ], [ 1, 0, 0.0667, 0.0333, 0, 0.66, 0.25, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 1, 0, 0.1333, 0.0333, 0, 0....
[ "from .testmodels import DecimalModel", "from django.test import TestCase", "from decimal import Decimal", "D = Decimal", "class DecimalTest(TestCase):\n DECIMALS = D(\"12345.6789\"), D(\"5\"), D(\"345.67\"), D(\"45.6\"), D(\"2345.678\"),\n\n def setUp(self):\n for d in self.DECIMALS:\n ...
from .testmodels import FieldsWithOptionsModel, EmailModel, DateTimeModel, OrderedModel from ..db.utils import get_cursor import datetime, time from django.test import TestCase from django.db.models import Q from django.db.utils import DatabaseError from djangoappengine.db.utils import set_cursor from djangoappengine.tests.testmodels import BlobModel from google.appengine.api.datastore import Get, Key class FilterTest(TestCase): floats = [5.3, 2.6, 9.1, 1.58] emails = ['app-engine@scholardocs.com', 'sharingan@uchias.com', 'rinnengan@sage.de', 'rasengan@naruto.com'] datetimes = [datetime.datetime(2010, 1, 1, 0, 0, 0, 0), datetime.datetime(2010, 12, 31, 23, 59, 59, 999999), datetime.datetime(2011, 1, 1, 0, 0, 0, 0), datetime.datetime(2013, 7, 28, 22, 30, 20, 50)] def setUp(self): for index, (float, email, datetime_value) in enumerate(zip(FilterTest.floats, FilterTest.emails, FilterTest.datetimes)): # ensure distinct times when saving entities time.sleep(0.01) self.last_save_time = datetime.datetime.now().time() ordered_instance = OrderedModel(priority=index, pk=index + 1) ordered_instance.save() FieldsWithOptionsModel(floating_point=float, integer=int(float), email=email, time=self.last_save_time, foreign_key=ordered_instance).save() EmailModel(email=email).save() DateTimeModel(datetime=datetime_value).save() def test_startswith(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__startswith='r').order_by('email')], ['rasengan@naruto.com', 'rinnengan@sage.de']) self.assertEquals([entity.email for entity in EmailModel.objects.filter( email__startswith='r').order_by('email')], ['rasengan@naruto.com', 'rinnengan@sage.de']) def test_gt(self): # test gt on float self.assertEquals([entity.floating_point for entity in FieldsWithOptionsModel.objects.filter( floating_point__gt=3.1).order_by('floating_point')], [5.3, 9.1]) # test gt on integer self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.filter( integer__gt=3).order_by('integer')], [5, 9]) # test filter on primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter(email__gt='as'). order_by('email')], ['rasengan@naruto.com', 'rinnengan@sage.de', 'sharingan@uchias.com', ]) # test ForeignKeys with id self.assertEquals(sorted([entity.email for entity in FieldsWithOptionsModel.objects.filter( foreign_key__gt=2)]), ['rasengan@naruto.com', 'rinnengan@sage.de', ]) # and with instance ordered_instance = OrderedModel.objects.get(priority=1) self.assertEquals(sorted([entity.email for entity in FieldsWithOptionsModel.objects.filter( foreign_key__gt=ordered_instance)]), ['rasengan@naruto.com', 'rinnengan@sage.de', ]) def test_lt(self): # test lt on float self.assertEquals([entity.floating_point for entity in FieldsWithOptionsModel.objects.filter( floating_point__lt=3.1).order_by('floating_point')], [1.58, 2.6]) # test lt on integer self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.filter( integer__lt=3).order_by('integer')], [1, 2]) # test filter on primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter(email__lt='as'). order_by('email')], ['app-engine@scholardocs.com', ]) # filter on datetime self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( time__lt=self.last_save_time).order_by('time')], ['app-engine@scholardocs.com', 'sharingan@uchias.com', 'rinnengan@sage.de',]) # test ForeignKeys with id self.assertEquals(sorted([entity.email for entity in FieldsWithOptionsModel.objects.filter( foreign_key__lt=3)]), ['app-engine@scholardocs.com', 'sharingan@uchias.com']) # and with instance ordered_instance = OrderedModel.objects.get(priority=2) self.assertEquals(sorted([entity.email for entity in FieldsWithOptionsModel.objects.filter( foreign_key__lt=ordered_instance)]), ['app-engine@scholardocs.com', 'sharingan@uchias.com']) def test_gte(self): # test gte on float self.assertEquals([entity.floating_point for entity in FieldsWithOptionsModel.objects.filter( floating_point__gte=2.6).order_by('floating_point')], [2.6, 5.3, 9.1]) # test gte on integer self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.filter( integer__gte=2).order_by('integer')], [2, 5, 9]) # test filter on primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__gte='rinnengan@sage.de').order_by('email')], ['rinnengan@sage.de', 'sharingan@uchias.com', ]) def test_lte(self): # test lte on float self.assertEquals([entity.floating_point for entity in FieldsWithOptionsModel.objects.filter( floating_point__lte=5.3).order_by('floating_point')], [1.58, 2.6, 5.3]) # test lte on integer self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.filter( integer__lte=5).order_by('integer')], [1, 2, 5]) # test filter on primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__lte='rinnengan@sage.de').order_by('email')], ['app-engine@scholardocs.com', 'rasengan@naruto.com', 'rinnengan@sage.de']) def test_equals(self): # test equality filter on primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email='rinnengan@sage.de').order_by('email')], ['rinnengan@sage.de']) def test_is_null(self): self.assertEquals(FieldsWithOptionsModel.objects.filter( floating_point__isnull=True).count(), 0) FieldsWithOptionsModel(integer=5.4, email='shinra.tensai@sixpaths.com', time=datetime.datetime.now().time()).save() self.assertEquals(FieldsWithOptionsModel.objects.filter( floating_point__isnull=True).count(), 1) # XXX: These filters will not work because of a Django bug # self.assertEquals(FieldsWithOptionsModel.objects.filter( # foreign_key=None).count(), 1) # (it uses left outer joins if checked against isnull # self.assertEquals(FieldsWithOptionsModel.objects.filter( # foreign_key__isnull=True).count(), 1) def test_exclude(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.all().exclude( floating_point__lt=9.1).order_by('floating_point')], ['rinnengan@sage.de', ]) # test exclude with foreignKey ordered_instance = OrderedModel.objects.get(priority=1) self.assertEquals(sorted([entity.email for entity in FieldsWithOptionsModel.objects.all().exclude( foreign_key__gt=ordered_instance)]), ['app-engine@scholardocs.com', 'sharingan@uchias.com',]) def test_exclude_pk(self): self.assertEquals([entity.pk for entity in OrderedModel.objects.exclude(pk__in=[2, 3]) .order_by('pk')], [1, 4]) def test_chained_filter(self): # additionally tests count :) self.assertEquals(FieldsWithOptionsModel.objects.filter( floating_point__lt=5.3, floating_point__gt=2.6). count(), 0) # test across multiple columns. On app engine only one filter is allowed # to be an inequality filter self.assertEquals([(entity.floating_point, entity.integer) for entity in FieldsWithOptionsModel.objects.filter( floating_point__lte=5.3, integer=2).order_by( 'floating_point')], [(2.6, 2), ]) # test multiple filters including the primary_key field self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__gte='rinnengan@sage.de', integer=2).order_by( 'email')], ['sharingan@uchias.com', ]) # test in filter on primary key with another arbitrary filter self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__in=['rinnengan@sage.de', 'sharingan@uchias.com'], integer__gt=2).order_by( 'integer')], ['rinnengan@sage.de', ]) # Test exceptions # test multiple filters exception when filtered and not ordered against # the first filter self.assertRaises(DatabaseError, lambda: FieldsWithOptionsModel.objects.filter( email__gte='rinnengan@sage.de', floating_point=5.3).order_by( 'floating_point')[0]) # test exception if filtered across multiple columns with inequality filter self.assertRaises(DatabaseError, FieldsWithOptionsModel.objects.filter( floating_point__lte=5.3, integer__gte=2).order_by( 'floating_point').get) # test exception if filtered across multiple columns with inequality filter # with exclude self.assertRaises(DatabaseError, FieldsWithOptionsModel.objects.filter( email__lte='rinnengan@sage.de').exclude( floating_point__lt=9.1).order_by('email').get) self.assertRaises(DatabaseError, lambda: FieldsWithOptionsModel.objects.all().exclude( floating_point__lt=9.1).order_by('email')[0]) # TODO: Maybe check all possible exceptions def test_slicing(self): # test slicing on filter with primary_key self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__lte='rinnengan@sage.de').order_by('email')[:2]], ['app-engine@scholardocs.com', 'rasengan@naruto.com', ]) self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__lte='rinnengan@sage.de').order_by('email')[1:2]], ['rasengan@naruto.com', ]) # test on non pk field self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.all().order_by( 'integer')[:2]], [1, 2, ]) self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.all().order_by( 'email')[::2]], ['app-engine@scholardocs.com', 'rinnengan@sage.de',]) def test_cursor(self): results = list(FieldsWithOptionsModel.objects.all()) cursor = None for item in results: query = FieldsWithOptionsModel.objects.all()[:1] if cursor is not None: set_cursor(query, cursor) next = query[0] self.assertEqual(next.pk, item.pk) cursor = get_cursor(query) query = FieldsWithOptionsModel.objects.all()[:1] set_cursor(query, cursor) self.assertEqual(list(query), []) def test_Q_objects(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( Q(email__lte='rinnengan@sage.de')).order_by('email')][:2], ['app-engine@scholardocs.com', 'rasengan@naruto.com', ]) self.assertEquals([entity.integer for entity in FieldsWithOptionsModel.objects.exclude(Q(integer__lt=5) | Q(integer__gte=9)).order_by('integer')], [5, ]) self.assertRaises(TypeError, FieldsWithOptionsModel.objects.filter( Q(floating_point=9.1), Q(integer=9) | Q(integer=2))) def test_pk_in(self): # test pk__in with field name email self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( email__in=['app-engine@scholardocs.com', 'rasengan@naruto.com'])], ['app-engine@scholardocs.com', 'rasengan@naruto.com']) def test_in(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( floating_point__in=[5.3, 2.6, 1.58]).filter( integer__in=[1, 5, 9])], ['app-engine@scholardocs.com', 'rasengan@naruto.com']) def test_in_with_pk_in(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( floating_point__in=[5.3, 2.6, 1.58]).filter( email__in=['app-engine@scholardocs.com', 'rasengan@naruto.com'])], ['app-engine@scholardocs.com', 'rasengan@naruto.com']) def test_inequality(self): self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.exclude( floating_point=5.3).filter( integer__in=[1, 5, 9])], ['rasengan@naruto.com', 'rinnengan@sage.de']) def test_values(self): # test values() self.assertEquals([entity['pk'] for entity in FieldsWithOptionsModel.objects.filter(integer__gt=3). order_by('integer').values('pk')], ['app-engine@scholardocs.com', 'rinnengan@sage.de']) self.assertEquals(FieldsWithOptionsModel.objects.filter(integer__gt=3). order_by('integer').values('pk').count(), 2) # these queries first fetch the whole entity and then only return the # desired fields selected in .values self.assertEquals([entity['integer'] for entity in FieldsWithOptionsModel.objects.filter( email__startswith='r').order_by('email').values( 'integer')], [1, 9]) self.assertEquals([entity['floating_point'] for entity in FieldsWithOptionsModel.objects.filter(integer__gt=3). order_by('integer').values('floating_point')], [5.3, 9.1]) # test values_list self.assertEquals([entity[0] for entity in FieldsWithOptionsModel.objects.filter(integer__gt=3). order_by('integer').values_list('pk')], ['app-engine@scholardocs.com', 'rinnengan@sage.de']) def test_range(self): # test range on float self.assertEquals([entity.floating_point for entity in FieldsWithOptionsModel.objects.filter( floating_point__range=(2.6, 9.1)). order_by('floating_point')], [2.6, 5.3, 9.1,]) # test range on pk self.assertEquals([entity.pk for entity in FieldsWithOptionsModel.objects.filter( pk__range=('app-engine@scholardocs.com', 'rinnengan@sage.de')). order_by('pk')], ['app-engine@scholardocs.com', 'rasengan@naruto.com', 'rinnengan@sage.de',]) # test range on date/datetime objects start_time = datetime.time(self.last_save_time.hour, self.last_save_time.minute - 1, self.last_save_time.second, self.last_save_time.microsecond) self.assertEquals([entity.email for entity in FieldsWithOptionsModel.objects.filter( time__range=(start_time, self.last_save_time)).order_by('time')], ['app-engine@scholardocs.com', 'sharingan@uchias.com', 'rinnengan@sage.de', 'rasengan@naruto.com',]) def test_date(self): # test year on date range boundaries self.assertEquals([entity.datetime for entity in DateTimeModel.objects.filter( datetime__year=2010).order_by('datetime')], [datetime.datetime(2010, 1, 1, 0, 0, 0, 0), datetime.datetime(2010, 12, 31, 23, 59, 59, 999999),]) # test year on non boundary date self.assertEquals([entity.datetime for entity in DateTimeModel.objects.filter( datetime__year=2013).order_by('datetime')], [datetime.datetime(2013, 7, 28, 22, 30, 20, 50),]) def test_auto_now(self): time.sleep(0.1) entity = DateTimeModel.objects.all()[0] auto_now = entity.datetime_auto_now entity.save() entity = DateTimeModel.objects.get(pk=entity.pk) self.assertNotEqual(auto_now, entity.datetime_auto_now) def test_auto_now_add(self): time.sleep(0.1) entity = DateTimeModel.objects.all()[0] auto_now_add = entity.datetime_auto_now_add entity.save() entity = DateTimeModel.objects.get(pk=entity.pk) self.assertEqual(auto_now_add, entity.datetime_auto_now_add) def test_latest(self): self.assertEquals(FieldsWithOptionsModel.objects.latest('time').floating_point, 1.58) def test_blob(self): x = BlobModel(data='lalala') x.full_clean() x.save() e = Get(Key.from_path(BlobModel._meta.db_table, x.pk)) self.assertEqual(e['data'], x.data) x = BlobModel.objects.all()[0] self.assertEqual(e['data'], x.data)
[ [ 1, 0, 0.0023, 0.0023, 0, 0.66, 0, 808, 0, 4, 0, 0, 808, 0, 0 ], [ 1, 0, 0.0047, 0.0023, 0, 0.66, 0.1111, 158, 0, 1, 0, 0, 158, 0, 0 ], [ 1, 0, 0.007, 0.0023, 0, 0...
[ "from .testmodels import FieldsWithOptionsModel, EmailModel, DateTimeModel, OrderedModel", "from ..db.utils import get_cursor", "import datetime, time", "from django.test import TestCase", "from django.db.models import Q", "from django.db.utils import DatabaseError", "from djangoappengine.db.utils impor...
from django.db import models from django.test import TestCase from django.db.utils import DatabaseError class A(models.Model): value = models.IntegerField() class B(A): other = models.IntegerField() class BackendTest(TestCase): def test_model_forms(self): from django import forms class F(forms.ModelForm): class Meta: model = A F({'value': '3'}).save() def test_multi_table_inheritance(self): B(value=3, other=5).save() self.assertEqual(A.objects.count(), 1) self.assertEqual(A.objects.all()[0].value, 3) self.assertRaises(DatabaseError, B.objects.count) self.assertRaises(DatabaseError, lambda: B.objects.all()[0])
[ [ 1, 0, 0.04, 0.04, 0, 0.66, 0, 40, 0, 1, 0, 0, 40, 0, 0 ], [ 1, 0, 0.08, 0.04, 0, 0.66, 0.2, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 1, 0, 0.12, 0.04, 0, 0.66, 0.4, ...
[ "from django.db import models", "from django.test import TestCase", "from django.db.utils import DatabaseError", "class A(models.Model):\n value = models.IntegerField()", " value = models.IntegerField()", "class B(A):\n other = models.IntegerField()", " other = models.IntegerField()", "cla...
from .testmodels import FieldsWithOptionsModel, OrderedModel, SelfReferenceModel import datetime from django.test import TestCase from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned class NonReturnSetsTest(TestCase): floats = [5.3, 2.6, 9.1, 1.58, 2.4] emails = ['app-engine@scholardocs.com', 'sharingan@uchias.com', 'rinnengan@sage.de', 'rasengan@naruto.com', 'itachi@uchia.com'] def setUp(self): for index, (float, email) in enumerate(zip(NonReturnSetsTest.floats, NonReturnSetsTest.emails)): self.last_save_time = datetime.datetime.now().time() ordered_instance = OrderedModel(priority=index, pk=index + 1) ordered_instance.save() model = FieldsWithOptionsModel(floating_point=float, integer=int(float), email=email, time=self.last_save_time, foreign_key=ordered_instance) model.save() def test_get(self): self.assertEquals(FieldsWithOptionsModel.objects.get( email='itachi@uchia.com') .email, 'itachi@uchia.com') # test exception when matching multiple entities self.assertRaises(MultipleObjectsReturned, FieldsWithOptionsModel.objects .get, integer=2) # test exception when entity does not exist self.assertRaises(ObjectDoesNotExist, FieldsWithOptionsModel.objects .get, floating_point=5.2) # TODO: test create when djangos model.save_base is refactored # TODO: test get_or_create when refactored def test_count(self): self.assertEquals(FieldsWithOptionsModel.objects.filter( integer=2).count(), 2) def test_in_bulk(self): self.assertEquals([key in ['sharingan@uchias.com', 'itachi@uchia.com'] for key in FieldsWithOptionsModel.objects.in_bulk( ['sharingan@uchias.com', 'itachi@uchia.com']).keys()], [True, ]*2) def test_latest(self): self.assertEquals('itachi@uchia.com', FieldsWithOptionsModel.objects .latest('time').email) def test_exists(self): self.assertEquals(True, FieldsWithOptionsModel.objects.exists()) def test_deletion(self): # TODO: ForeignKeys will not be deleted! This has to be done via # background tasks self.assertEquals(FieldsWithOptionsModel.objects.count(), 5) FieldsWithOptionsModel.objects.get(email='itachi@uchia.com').delete() self.assertEquals(FieldsWithOptionsModel.objects.count(), 4) FieldsWithOptionsModel.objects.filter(email__in=['sharingan@uchias.com', 'itachi@uchia.com', 'rasengan@naruto.com', ]).delete() self.assertEquals(FieldsWithOptionsModel.objects.count(), 2) def test_selfref_deletion(self): entity = SelfReferenceModel() entity.save() entity.delete() def test_foreign_key_fetch(self): # test fetching the ForeignKey ordered_instance = OrderedModel.objects.get(priority=2) self.assertEquals(FieldsWithOptionsModel.objects.get(integer=9).foreign_key, ordered_instance) def test_foreign_key_backward(self): entity = OrderedModel.objects.all()[0] self.assertEquals(entity.keys.count(), 1) # TODO: add should save the added instance transactional via for example # force_insert new_foreign_key = FieldsWithOptionsModel(floating_point=5.6, integer=3, email='temp@temp.com', time=datetime.datetime.now()) entity.keys.add(new_foreign_key) self.assertEquals(entity.keys.count(), 2) # TODO: add test for create entity.keys.remove(new_foreign_key) self.assertEquals(entity.keys.count(), 1) entity.keys.clear() self.assertTrue(not entity.keys.exists()) entity.keys = [new_foreign_key, new_foreign_key] self.assertEquals(entity.keys.count(), 1) self.assertEquals(entity.keys.all()[0].integer, 3)
[ [ 1, 0, 0.0104, 0.0104, 0, 0.66, 0, 808, 0, 3, 0, 0, 808, 0, 0 ], [ 1, 0, 0.0208, 0.0104, 0, 0.66, 0.25, 426, 0, 1, 0, 0, 426, 0, 0 ], [ 1, 0, 0.0312, 0.0104, 0, 0....
[ "from .testmodels import FieldsWithOptionsModel, OrderedModel, SelfReferenceModel", "import datetime", "from django.test import TestCase", "from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned", "class NonReturnSetsTest(TestCase):\n floats = [5.3, 2.6, 9.1, 1.58, 2.4]\n email...
from django.test import TestCase from django.db.utils import DatabaseError from django.db.models.fields import NOT_PROVIDED from .testmodels import FieldsWithOptionsModel from google.appengine.api.datastore import Get from google.appengine.ext.db import Key from google.appengine.api.datastore_types import Text, Category, Email, Link, \ PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \ Rating, BlobKey from google.appengine.api import users import datetime class FieldOptionsTest(TestCase): def test_options(self): entity = FieldsWithOptionsModel() # try to save the entity with non-nullable field time set to None, should # raise an exception self.assertRaises(DatabaseError, entity.save) time = datetime.datetime.now().time() entity.time = time entity.save() # check if primary_key=True is set correctly for the saved entity self.assertEquals(entity.pk, u'app-engine@scholardocs.com') gae_entity = Get(Key.from_path(FieldsWithOptionsModel._meta.db_table, entity.pk)) self.assertTrue(gae_entity is not None) self.assertEquals(gae_entity.key().name(), u'app-engine@scholardocs.com') # check if default values are set correctly on the db level, # primary_key field is not stored at the db level for field in FieldsWithOptionsModel._meta.local_fields: if field.default and field.default != NOT_PROVIDED and not \ field.primary_key: self.assertEquals(gae_entity[field.column], field.default) elif field.column == 'time': self.assertEquals(gae_entity[field.column], datetime.datetime( 1970, 1, 1, time.hour, time.minute, time.second, time.microsecond)) elif field.null and field.editable: self.assertEquals(gae_entity[field.column], None) # check if default values are set correct on the model instance level entity = FieldsWithOptionsModel.objects.get() for field in FieldsWithOptionsModel._meta.local_fields: if field.default and field.default != NOT_PROVIDED: self.assertEquals(getattr(entity, field.column), field.default) elif field.column == 'time': self.assertEquals(getattr(entity, field.column), time) elif field.null and field.editable: self.assertEquals(getattr(entity, field.column), None) # check if nullable field with default values can be set to None entity.slug = None entity.positiv_small_integer = None try: entity.save() except: self.fail() # check if slug and positiv_small_integer will be retrieved with values # set to None (on db level and model instance level) gae_entity = Get(Key.from_path(FieldsWithOptionsModel._meta.db_table, entity.pk)) self.assertEquals(gae_entity[FieldsWithOptionsModel._meta.get_field_by_name( 'slug')[0].column], None) self.assertEquals(gae_entity[FieldsWithOptionsModel._meta.get_field_by_name( 'positiv_small_integer')[0].column], None) # on the model instance level entity = FieldsWithOptionsModel.objects.get() self.assertEquals(getattr(entity, FieldsWithOptionsModel._meta.get_field_by_name( 'slug')[0].column), None) self.assertEquals(getattr(entity, FieldsWithOptionsModel._meta.get_field_by_name( 'positiv_small_integer')[0].column), None) # TODO: check db_column option # TODO: change the primary key and check if a new instance with the # changed primary key will be saved (not in this test class)
[ [ 1, 0, 0.0125, 0.0125, 0, 0.66, 0, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 1, 0, 0.025, 0.0125, 0, 0.66, 0.1111, 495, 0, 1, 0, 0, 495, 0, 0 ], [ 1, 0, 0.0375, 0.0125, 0, 0...
[ "from django.test import TestCase", "from django.db.utils import DatabaseError", "from django.db.models.fields import NOT_PROVIDED", "from .testmodels import FieldsWithOptionsModel", "from google.appengine.api.datastore import Get", "from google.appengine.ext.db import Key", "from google.appengine.api.d...
from .backend import BackendTest from .field_db_conversion import FieldDBConversionTest from .field_options import FieldOptionsTest from .filter import FilterTest from .order import OrderTest from .not_return_sets import NonReturnSetsTest from .decimals import DecimalTest
[ [ 1, 0, 0.1429, 0.1429, 0, 0.66, 0, 631, 0, 1, 0, 0, 631, 0, 0 ], [ 1, 0, 0.2857, 0.1429, 0, 0.66, 0.1667, 83, 0, 1, 0, 0, 83, 0, 0 ], [ 1, 0, 0.4286, 0.1429, 0, 0....
[ "from .backend import BackendTest", "from .field_db_conversion import FieldDBConversionTest", "from .field_options import FieldOptionsTest", "from .filter import FilterTest", "from .order import OrderTest", "from .not_return_sets import NonReturnSetsTest", "from .decimals import DecimalTest" ]
from django.db import models from ..db.db_settings import get_indexes from djangotoolbox.fields import BlobField class EmailModel(models.Model): email = models.EmailField() class DateTimeModel(models.Model): datetime = models.DateTimeField() datetime_auto_now = models.DateTimeField(auto_now=True) datetime_auto_now_add = models.DateTimeField(auto_now_add=True) class FieldsWithoutOptionsModel(models.Model): datetime = models.DateTimeField() date = models.DateField() time = models.TimeField() floating_point = models.FloatField() boolean = models.BooleanField() null_boolean = models.NullBooleanField() text = models.CharField(max_length=3) email = models.EmailField() comma_seperated_integer = models.CommaSeparatedIntegerField(max_length=10) ip_address = models.IPAddressField() slug = models.SlugField() url = models.URLField() # file = models.FileField() # file_path = models.FilePathField() long_text = models.TextField() indexed_text = models.TextField() xml = models.XMLField() integer = models.IntegerField() small_integer = models.SmallIntegerField() positiv_integer = models.PositiveIntegerField() positiv_small_integer = models.PositiveSmallIntegerField() # foreign_key = models.ForeignKey('FieldsWithOptionsModel') # foreign_key = models.ForeignKey('OrderedModel') # one_to_one = models.OneToOneField() # decimal = models.DecimalField() # can be None # image = models.ImageField() get_indexes()[FieldsWithoutOptionsModel] = {'indexed': ('indexed_text',)} class FieldsWithOptionsModel(models.Model): # any type of unique (unique_data, ...) is not supported on GAE, instead you # can use primary_key=True for some special cases. But be carefull: changing # the primary_key of an entity will not result in an updated entity, # instead a new entity will be putted into the datastore. The old one will # not be deleted and all references pointing to the old entitiy will not # point to the new one either datetime = models.DateTimeField(auto_now=True, db_column="birthday") date = models.DateField(auto_now_add=True) time = models.TimeField() floating_point = models.FloatField(null=True) boolean = models.BooleanField() # default is False null_boolean = models.NullBooleanField(default=True) text = models.CharField(default='Hallo', max_length=10) email = models.EmailField(default='app-engine@scholardocs.com', primary_key=True) comma_seperated_integer = models.CommaSeparatedIntegerField(max_length=10) ip_address = models.IPAddressField(default="192.168.0.2") slug = models.SlugField(default="GAGAA", null=True) url = models.URLField(default='http://www.scholardocs.com') # file = FileField() # file_path = FilePathField() long_text = models.TextField(default=1000*'A') xml = models.XMLField(default=2000*'B') integer = models.IntegerField(default=100) small_integer = models.SmallIntegerField(default=-5) positiv_integer = models.PositiveIntegerField(default=80) positiv_small_integer = models.PositiveSmallIntegerField(default=3, null=True) foreign_key = models.ForeignKey('OrderedModel', null=True, related_name='keys') # one_to_one = OneToOneField() # decimal = DecimalField() # image = ImageField() class OrderedModel(models.Model): id = models.IntegerField(primary_key=True) priority = models.IntegerField() class Meta: ordering = ('-priority',) class BlobModel(models.Model): data = BlobField() class DecimalModel(models.Model): decimal = models.DecimalField(max_digits=9, decimal_places=2) class SelfReferenceModel(models.Model): ref = models.ForeignKey('self', null=True)
[ [ 1, 0, 0.0112, 0.0112, 0, 0.66, 0, 40, 0, 1, 0, 0, 40, 0, 0 ], [ 1, 0, 0.0225, 0.0112, 0, 0.66, 0.0909, 87, 0, 1, 0, 0, 87, 0, 0 ], [ 1, 0, 0.0337, 0.0112, 0, 0.66...
[ "from django.db import models", "from ..db.db_settings import get_indexes", "from djangotoolbox.fields import BlobField", "class EmailModel(models.Model):\n email = models.EmailField()", " email = models.EmailField()", "class DateTimeModel(models.Model):\n datetime = models.DateTimeField()\n d...
from .testmodels import FieldsWithoutOptionsModel from django.test import TestCase from google.appengine.api.datastore import Get from google.appengine.ext.db import Key from google.appengine.api.datastore_types import Text, Category, Email, Link, \ PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \ Rating, BlobKey from google.appengine.api import users import datetime class FieldDBConversionTest(TestCase): def test_db_conversion(self): actual_datetime = datetime.datetime.now() entity = FieldsWithoutOptionsModel( datetime=actual_datetime, date=actual_datetime.date(), time=actual_datetime.time(), floating_point=5.97, boolean=True, null_boolean=False, text='Hallo', email='hallo@hallo.com', comma_seperated_integer="5,4,3,2", ip_address='194.167.1.1', slug='you slugy slut :)', url='http://www.scholardocs.com', long_text=1000*'A', indexed_text='hello', xml=2000*'B', integer=-400, small_integer=-4, positiv_integer=400, positiv_small_integer=4) entity.save() # get the gae entity (not the django model instance) and test if the # fields have been converted right to the corresponding gae database types gae_entity = Get(Key.from_path(FieldsWithoutOptionsModel._meta.db_table, entity.pk)) for name, gae_db_type in [('long_text', Text), ('indexed_text', unicode), ('xml', Text), ('text', unicode), ('ip_address', unicode), ('slug', unicode), ('email', unicode),('comma_seperated_integer', unicode), ('url', unicode), ('time', datetime.datetime), ('datetime', datetime.datetime), ('date', datetime.datetime), ('floating_point', float), ('boolean', bool), ('null_boolean', bool), ('integer', (int, long)), ('small_integer', (int, long)), ('positiv_integer', (int, long)), ('positiv_small_integer', (int, long))]: self.assertTrue(type(gae_entity[ FieldsWithoutOptionsModel._meta.get_field_by_name( name)[0].column]) in (isinstance(gae_db_type, (list, tuple)) and \ gae_db_type or (gae_db_type, ))) # get the model instance and check if the fields convert back to the # right types entity = FieldsWithoutOptionsModel.objects.get() for name, expected_type in [('long_text', unicode), ('indexed_text', unicode), ('xml', unicode), ('text', unicode), ('ip_address', unicode), ('slug', unicode), ('email', unicode), ('comma_seperated_integer', unicode), ('url', unicode), ('datetime', datetime.datetime), ('date', datetime.date), ('time', datetime.time), ('floating_point', float), ('boolean', bool), ('null_boolean', bool), ('integer', (int, long)), ('small_integer', (int, long)), ('positiv_integer', (int, long)), ('positiv_small_integer', (int, long))]: self.assertTrue(type(getattr(entity, name)) in (isinstance( expected_type, (list, tuple)) and expected_type or (expected_type, ))) # TODO: Add field conversions for ForeignKeys?
[ [ 1, 0, 0.0159, 0.0159, 0, 0.66, 0, 808, 0, 1, 0, 0, 808, 0, 0 ], [ 1, 0, 0.0317, 0.0159, 0, 0.66, 0.1429, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 1, 0, 0.0476, 0.0159, 0, ...
[ "from .testmodels import FieldsWithoutOptionsModel", "from django.test import TestCase", "from google.appengine.api.datastore import Get", "from google.appengine.ext.db import Key", "from google.appengine.api.datastore_types import Text, Category, Email, Link, \\\n PhoneNumber, PostalAddress, Text, Blob,...
# Initialize App Engine SDK if necessary try: from google.appengine.api import api_proxy_stub_map except ImportError: from .boot import setup_env setup_env() from djangoappengine.utils import on_production_server, have_appserver DEBUG = not on_production_server TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'urls' DATABASES = { 'default': { 'ENGINE': 'djangoappengine.db', }, } if on_production_server: EMAIL_BACKEND = 'djangoappengine.mail.AsyncEmailBackend' else: EMAIL_BACKEND = 'djangoappengine.mail.EmailBackend' PREPARE_UPLOAD_BACKEND = 'djangoappengine.storage.prepare_upload' SERVE_FILE_BACKEND = 'djangoappengine.storage.serve_file' DEFAULT_FILE_STORAGE = 'djangoappengine.storage.BlobstoreStorage' FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 FILE_UPLOAD_HANDLERS = ( 'djangoappengine.storage.BlobstoreFileUploadHandler', 'django.core.files.uploadhandler.MemoryFileUploadHandler', ) CACHE_BACKEND = 'memcached://?timeout=0' SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' if not on_production_server: INTERNAL_IPS = ('127.0.0.1',)
[ [ 7, 0, 0.1026, 0.1282, 0, 0.66, 0, 0, 0, 1, 0, 0, 0, 0, 1 ], [ 1, 1, 0.0769, 0.0256, 1, 0.86, 0, 279, 0, 1, 0, 0, 279, 0, 0 ], [ 1, 1, 0.1282, 0.0256, 1, 0.86, ...
[ "try:\n from google.appengine.api import api_proxy_stub_map\nexcept ImportError:\n from .boot import setup_env\n setup_env()", " from google.appengine.api import api_proxy_stub_map", " from .boot import setup_env", " setup_env()", "from djangoappengine.utils import on_production_server, ha...
import os import sys # Add parent folder to sys.path, so we can import boot. # App Engine causes main.py to be reloaded if an exception gets raised # on the first request of a main.py instance, so don't add project_dir multiple # times. project_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) if project_dir not in sys.path or sys.path.index(project_dir) > 0: while project_dir in sys.path: sys.path.remove(project_dir) sys.path.insert(0, project_dir) for path in sys.path[:]: if path != project_dir and os.path.isdir(os.path.join(path, 'django')): sys.path.remove(path) break # Remove the standard version of Django. if 'django' in sys.modules and sys.modules['django'].VERSION < (1, 2): for k in [k for k in sys.modules if k.startswith('django\.') or k == 'django']: del sys.modules[k] from djangoappengine.boot import setup_env, setup_logging, env_ext setup_env() from django.core.handlers.wsgi import WSGIHandler from google.appengine.ext.webapp.util import run_wsgi_app from django.conf import settings def log_traceback(*args, **kwargs): import logging logging.exception('Exception in request:') from django.core import signals signals.got_request_exception.connect(log_traceback) def real_main(): # Reset path and environment variables global path_backup try: sys.path = path_backup[:] except: path_backup = sys.path[:] os.environ.update(env_ext) setup_logging() # Create a Django application for WSGI. application = WSGIHandler() # Run the WSGI CGI handler with that application. run_wsgi_app(application) def profile_main(): import logging, cProfile, pstats, random, StringIO only_forced_profile = getattr(settings, 'ONLY_FORCED_PROFILE', False) profile_percentage = getattr(settings, 'PROFILE_PERCENTAGE', None) if (only_forced_profile and 'profile=forced' not in os.environ.get('QUERY_STRING')) or \ (not only_forced_profile and profile_percentage and float(profile_percentage) / 100.0 <= random.random()): return real_main() prof = cProfile.Profile() prof = prof.runctx('real_main()', globals(), locals()) stream = StringIO.StringIO() stats = pstats.Stats(prof, stream=stream) sort_by = getattr(settings, 'SORT_PROFILE_RESULTS_BY', 'time') if not isinstance(sort_by, (list, tuple)): sort_by = (sort_by,) stats.sort_stats(*sort_by) restrictions = [] profile_pattern = getattr(settings, 'PROFILE_PATTERN', None) if profile_pattern: restrictions.append(profile_pattern) max_results = getattr(settings, 'MAX_PROFILE_RESULTS', 80) if max_results and max_results != 'all': restrictions.append(max_results) stats.print_stats(*restrictions) extra_output = getattr(settings, 'EXTRA_PROFILE_OUTPUT', None) or () if not isinstance(sort_by, (list, tuple)): extra_output = (extra_output,) if 'callees' in extra_output: stats.print_callees() if 'callers' in extra_output: stats.print_callers() logging.info('Profile data:\n%s', stream.getvalue()) main = getattr(settings, 'ENABLE_PROFILER', False) and profile_main or real_main if __name__ == '__main__': main()
[ [ 1, 0, 0.0106, 0.0106, 0, 0.66, 0, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.0213, 0.0106, 0, 0.66, 0.0588, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 14, 0, 0.0851, 0.0106, 0, ...
[ "import os", "import sys", "project_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))", "if project_dir not in sys.path or sys.path.index(project_dir) > 0:\n while project_dir in sys.path:\n sys.path.remove(project_dir)\n sys.path.insert(0, project_dir)", " w...
from django.core.management import execute_from_command_line from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Runs a command with access to the remote App Engine production ' \ 'server (e.g. manage.py remote shell)' args = 'remotecommand' def run_from_argv(self, argv): from django.db import connections for connection in connections.all(): if hasattr(connection, 'setup_remote'): connection.setup_remote() argv = argv[:1] + argv[2:] execute_from_command_line(argv)
[ [ 1, 0, 0.0667, 0.0667, 0, 0.66, 0, 879, 0, 1, 0, 0, 879, 0, 0 ], [ 1, 0, 0.1333, 0.0667, 0, 0.66, 0.5, 931, 0, 1, 0, 0, 931, 0, 0 ], [ 3, 0, 0.6333, 0.8, 0, 0.66, ...
[ "from django.core.management import execute_from_command_line", "from django.core.management.base import BaseCommand", "class Command(BaseCommand):\n help = 'Runs a command with access to the remote App Engine production ' \\\n 'server (e.g. manage.py remote shell)'\n args = 'remotecommand'\n\n ...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # CHANGED: show warning if profiler is enabled, so you don't mistakenly upload # with non-production settings. Also, added --nosyncdb switch. from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand import logging import sys import time def run_appcfg(argv): # We don't really want to use that one though, it just executes this one from google.appengine.tools import appcfg # Reset the logging level to WARN as appcfg will spew tons of logs on INFO logging.getLogger().setLevel(logging.WARN) new_args = argv[:] new_args[1] = 'update' new_args.append('.') syncdb = True if '--nosyncdb' in new_args: syncdb = False new_args.remove('--nosyncdb') appcfg.main(new_args) if syncdb: print 'Running syncdb.' # Wait a little bit for deployment to finish for countdown in range(9, 0, -1): sys.stdout.write('%s\r' % countdown) time.sleep(1) from django.db import connections for connection in connections.all(): if hasattr(connection, 'setup_remote'): connection.setup_remote() call_command('syncdb', remote=True, interactive=True) if getattr(settings, 'ENABLE_PROFILER', False): print '--------------------------\n' \ 'WARNING: PROFILER ENABLED!\n' \ '--------------------------' class Command(BaseCommand): """Deploys the website to the production server. Any additional arguments are passed directly to appcfg.py update """ help = 'Calls appcfg.py update for the current project.' args = '[any appcfg.py options]' def run_from_argv(self, argv): if 'mediagenerator' in settings.INSTALLED_APPS: call_command('generatemedia') run_appcfg(argv)
[ [ 1, 0, 0.2917, 0.0139, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.3056, 0.0139, 0, 0.66, 0.1429, 879, 0, 1, 0, 0, 879, 0, 0 ], [ 1, 0, 0.3194, 0.0139, 0, ...
[ "from django.conf import settings", "from django.core.management import call_command", "from django.core.management.base import BaseCommand", "import logging", "import sys", "import time", "def run_appcfg(argv):\n # We don't really want to use that one though, it just executes this one\n from goog...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import sys from django.db import connections from ...db.base import DatabaseWrapper from django.core.management.base import BaseCommand from django.core.exceptions import ImproperlyConfigured def start_dev_appserver(argv): """Starts the App Engine dev_appserver program for the Django project. The appserver is run with default parameters. If you need to pass any special parameters to the dev_appserver you will have to invoke it manually. """ from google.appengine.tools import dev_appserver_main progname = argv[0] args = [] # hack __main__ so --help in dev_appserver_main works OK. sys.modules['__main__'] = dev_appserver_main # Set bind ip/port if specified. addr, port = None, '8000' if len(argv) > 2: if not argv[2].startswith('-'): addrport = argv[2] try: addr, port = addrport.split(":") except ValueError: addr = addrport else: args.append(argv[2]) args.extend(argv[3:]) if addr: args.extend(["--address", addr]) if port: args.extend(["--port", port]) # Add email settings from django.conf import settings if '--smtp_host' not in args and '--enable_sendmail' not in args: args.extend(['--smtp_host', settings.EMAIL_HOST, '--smtp_port', str(settings.EMAIL_PORT), '--smtp_user', settings.EMAIL_HOST_USER, '--smtp_password', settings.EMAIL_HOST_PASSWORD]) # Pass the application specific datastore location to the server. for name in connections: connection = connections[name] if isinstance(connection, DatabaseWrapper): p = connection._get_paths() if '--datastore_path' not in args: args.extend(['--datastore_path', p[0]]) if '--blobstore_path' not in args: args.extend(['--blobstore_path', p[1]]) if '--history_path' not in args: args.extend(['--history_path', p[2]]) break # Reset logging level to INFO as dev_appserver will spew tons of debug logs logging.getLogger().setLevel(logging.INFO) # Append the current working directory to the arguments. dev_appserver_main.main([progname] + args + [os.getcwdu()]) class Command(BaseCommand): """Overrides the default Django runserver command. Instead of starting the default Django development server this command fires up a copy of the full fledged App Engine dev_appserver that emulates the live environment your application will be deployed to. """ help = 'Runs a copy of the App Engine development server.' args = '[optional port number, or ipaddr:port]' def run_from_argv(self, argv): start_dev_appserver(argv)
[ [ 1, 0, 0.1935, 0.0108, 0, 0.66, 0, 715, 0, 1, 0, 0, 715, 0, 0 ], [ 1, 0, 0.2043, 0.0108, 0, 0.66, 0.125, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.2151, 0.0108, 0, 0...
[ "import logging", "import os", "import sys", "from django.db import connections", "from ...db.base import DatabaseWrapper", "from django.core.management.base import BaseCommand", "from django.core.exceptions import ImproperlyConfigured", "def start_dev_appserver(argv):\n \"\"\"Starts the App Engine...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # CHANGED: show warning if profiler is enabled, so you don't mistakenly upload # with non-production settings. Also, added --nosyncdb switch. from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand import logging import sys import time def run_appcfg(argv): # We don't really want to use that one though, it just executes this one from google.appengine.tools import appcfg # Reset the logging level to WARN as appcfg will spew tons of logs on INFO logging.getLogger().setLevel(logging.WARN) new_args = argv[:] new_args[1] = 'update' new_args.append('.') syncdb = True if '--nosyncdb' in new_args: syncdb = False new_args.remove('--nosyncdb') appcfg.main(new_args) if syncdb: print 'Running syncdb.' # Wait a little bit for deployment to finish for countdown in range(9, 0, -1): sys.stdout.write('%s\r' % countdown) time.sleep(1) from django.db import connections for connection in connections.all(): if hasattr(connection, 'setup_remote'): connection.setup_remote() call_command('syncdb', remote=True, interactive=True) if getattr(settings, 'ENABLE_PROFILER', False): print '--------------------------\n' \ 'WARNING: PROFILER ENABLED!\n' \ '--------------------------' class Command(BaseCommand): """Deploys the website to the production server. Any additional arguments are passed directly to appcfg.py update """ help = 'Calls appcfg.py update for the current project.' args = '[any appcfg.py options]' def run_from_argv(self, argv): if 'mediagenerator' in settings.INSTALLED_APPS: call_command('generatemedia') run_appcfg(argv)
[ [ 1, 0, 0.2917, 0.0139, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.3056, 0.0139, 0, 0.66, 0.1429, 879, 0, 1, 0, 0, 879, 0, 0 ], [ 1, 0, 0.3194, 0.0139, 0, ...
[ "from django.conf import settings", "from django.core.management import call_command", "from django.core.management.base import BaseCommand", "import logging", "import sys", "import time", "def run_appcfg(argv):\n # We don't really want to use that one though, it just executes this one\n from goog...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys from .runserver import start_dev_appserver from django.core.management.base import BaseCommand from djangoappengine.db.base import destroy_datastore, get_test_datastore_paths class Command(BaseCommand): """Overrides the default Django testserver command. Instead of starting the default Django development server this command fires up a copy of the full fledged App Engine dev_appserver. The appserver is always initialised with a blank datastore with the specified fixtures loaded into it. """ help = 'Runs the development server with data from the given fixtures.' def run_from_argv(self, argv): fixtures = [] for arg in argv[2:]: if arg.startswith('-'): break fixtures.append(arg) argv.remove(arg) try: index = argv.index('--addrport') addrport = argv[index + 1] del argv[index:index+2] argv = argv[:2] + [addrport] + argv[2:index] + argv[index+1:] except: pass # Ensure an on-disk test datastore is used. from django.db import connection connection.use_test_datastore = True connection.test_datastore_inmemory = False # Flush any existing test datastore. connection.flush() # Load the fixtures. from django.core.management import call_command call_command('loaddata', 'initial_data') if fixtures: call_command('loaddata', *fixtures) # Build new arguments for dev_appserver. argv[1] = 'runserver' datastore_path, history_path = get_test_datastore_paths(False) argv.extend(['--datastore_path', datastore_path]) argv.extend(['--history_path', history_path]) start_dev_appserver(argv)
[ [ 1, 0, 0.2466, 0.0137, 0, 0.66, 0, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.2603, 0.0137, 0, 0.66, 0.2, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 1, 0, 0.2877, 0.0137, 0, 0.6...
[ "import os", "import sys", "from .runserver import start_dev_appserver", "from django.core.management.base import BaseCommand", "from djangoappengine.db.base import destroy_datastore, get_test_datastore_paths", "class Command(BaseCommand):\n \"\"\"Overrides the default Django testserver command.\n\n ...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys from .runserver import start_dev_appserver from django.core.management.base import BaseCommand from djangoappengine.db.base import destroy_datastore, get_test_datastore_paths class Command(BaseCommand): """Overrides the default Django testserver command. Instead of starting the default Django development server this command fires up a copy of the full fledged App Engine dev_appserver. The appserver is always initialised with a blank datastore with the specified fixtures loaded into it. """ help = 'Runs the development server with data from the given fixtures.' def run_from_argv(self, argv): fixtures = [] for arg in argv[2:]: if arg.startswith('-'): break fixtures.append(arg) argv.remove(arg) try: index = argv.index('--addrport') addrport = argv[index + 1] del argv[index:index+2] argv = argv[:2] + [addrport] + argv[2:index] + argv[index+1:] except: pass # Ensure an on-disk test datastore is used. from django.db import connection connection.use_test_datastore = True connection.test_datastore_inmemory = False # Flush any existing test datastore. connection.flush() # Load the fixtures. from django.core.management import call_command call_command('loaddata', 'initial_data') if fixtures: call_command('loaddata', *fixtures) # Build new arguments for dev_appserver. argv[1] = 'runserver' datastore_path, history_path = get_test_datastore_paths(False) argv.extend(['--datastore_path', datastore_path]) argv.extend(['--history_path', history_path]) start_dev_appserver(argv)
[ [ 1, 0, 0.2466, 0.0137, 0, 0.66, 0, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.2603, 0.0137, 0, 0.66, 0.2, 509, 0, 1, 0, 0, 509, 0, 0 ], [ 1, 0, 0.2877, 0.0137, 0, 0.6...
[ "import os", "import sys", "from .runserver import start_dev_appserver", "from django.core.management.base import BaseCommand", "from djangoappengine.db.base import destroy_datastore, get_test_datastore_paths", "class Command(BaseCommand):\n \"\"\"Overrides the default Django testserver command.\n\n ...
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import sys from django.db import connections from ...db.base import DatabaseWrapper from django.core.management.base import BaseCommand from django.core.exceptions import ImproperlyConfigured def start_dev_appserver(argv): """Starts the App Engine dev_appserver program for the Django project. The appserver is run with default parameters. If you need to pass any special parameters to the dev_appserver you will have to invoke it manually. """ from google.appengine.tools import dev_appserver_main progname = argv[0] args = [] # hack __main__ so --help in dev_appserver_main works OK. sys.modules['__main__'] = dev_appserver_main # Set bind ip/port if specified. addr, port = None, '8000' if len(argv) > 2: if not argv[2].startswith('-'): addrport = argv[2] try: addr, port = addrport.split(":") except ValueError: addr = addrport else: args.append(argv[2]) args.extend(argv[3:]) if addr: args.extend(["--address", addr]) if port: args.extend(["--port", port]) # Add email settings from django.conf import settings if '--smtp_host' not in args and '--enable_sendmail' not in args: args.extend(['--smtp_host', settings.EMAIL_HOST, '--smtp_port', str(settings.EMAIL_PORT), '--smtp_user', settings.EMAIL_HOST_USER, '--smtp_password', settings.EMAIL_HOST_PASSWORD]) # Pass the application specific datastore location to the server. for name in connections: connection = connections[name] if isinstance(connection, DatabaseWrapper): p = connection._get_paths() if '--datastore_path' not in args: args.extend(['--datastore_path', p[0]]) if '--blobstore_path' not in args: args.extend(['--blobstore_path', p[1]]) if '--history_path' not in args: args.extend(['--history_path', p[2]]) break # Reset logging level to INFO as dev_appserver will spew tons of debug logs logging.getLogger().setLevel(logging.INFO) # Append the current working directory to the arguments. dev_appserver_main.main([progname] + args + [os.getcwdu()]) class Command(BaseCommand): """Overrides the default Django runserver command. Instead of starting the default Django development server this command fires up a copy of the full fledged App Engine dev_appserver that emulates the live environment your application will be deployed to. """ help = 'Runs a copy of the App Engine development server.' args = '[optional port number, or ipaddr:port]' def run_from_argv(self, argv): start_dev_appserver(argv)
[ [ 1, 0, 0.1935, 0.0108, 0, 0.66, 0, 715, 0, 1, 0, 0, 715, 0, 0 ], [ 1, 0, 0.2043, 0.0108, 0, 0.66, 0.125, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.2151, 0.0108, 0, 0...
[ "import logging", "import os", "import sys", "from django.db import connections", "from ...db.base import DatabaseWrapper", "from django.core.management.base import BaseCommand", "from django.core.exceptions import ImproperlyConfigured", "def start_dev_appserver(argv):\n \"\"\"Starts the App Engine...
from .db_settings import get_indexes from djangotoolbox.db.creation import NonrelDatabaseCreation class StringType(object): def __init__(self, internal_type): self.internal_type = internal_type def __mod__(self, field): indexes = get_indexes().get(field['model'], {}) if field['name'] in indexes.get('indexed', ()): return 'text' elif field['name'] in indexes.get('unindexed', ()): return 'longtext' return self.internal_type def get_data_types(): # TODO: Add GAEKeyField and a corresponding db_type string_types = ('text', 'longtext') data_types = NonrelDatabaseCreation.data_types.copy() for name, field_type in data_types.items(): if field_type in string_types: data_types[name] = StringType(field_type) return data_types class DatabaseCreation(NonrelDatabaseCreation): # This dictionary maps Field objects to their associated GAE column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = get_data_types() def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from .base import destroy_datastore, get_test_datastore_paths destroy_datastore(*get_test_datastore_paths())
[ [ 1, 0, 0.0238, 0.0238, 0, 0.66, 0, 236, 0, 1, 0, 0, 236, 0, 0 ], [ 1, 0, 0.0476, 0.0238, 0, 0.66, 0.25, 682, 0, 1, 0, 0, 682, 0, 0 ], [ 3, 0, 0.2143, 0.2619, 0, 0....
[ "from .db_settings import get_indexes", "from djangotoolbox.db.creation import NonrelDatabaseCreation", "class StringType(object):\n def __init__(self, internal_type):\n self.internal_type = internal_type\n\n def __mod__(self, field):\n indexes = get_indexes().get(field['model'], {})\n ...
from google.appengine.datastore.datastore_pb import CompiledCursor import base64 def get_cursor(queryset): # Evaluate QuerySet len(queryset) cursor = getattr(queryset.query, '_gae_cursor', None) return base64.urlsafe_b64encode(cursor.Encode()) def set_cursor(queryset, start=None, end=None): if start is not None: start = base64.urlsafe_b64decode(str(start)) start = CompiledCursor(start) queryset.query._gae_start_cursor = start if end is not None: end = base64.urlsafe_b64decode(str(end)) end = CompiledCursor(end) queryset.query._gae_end_cursor = end # Evaluate QuerySet len(queryset)
[ [ 1, 0, 0.05, 0.05, 0, 0.66, 0, 334, 0, 1, 0, 0, 334, 0, 0 ], [ 1, 0, 0.1, 0.05, 0, 0.66, 0.3333, 177, 0, 1, 0, 0, 177, 0, 0 ], [ 2, 0, 0.3, 0.25, 0, 0.66, 0.66...
[ "from google.appengine.datastore.datastore_pb import CompiledCursor", "import base64", "def get_cursor(queryset):\n # Evaluate QuerySet\n len(queryset)\n cursor = getattr(queryset.query, '_gae_cursor', None)\n return base64.urlsafe_b64encode(cursor.Encode())", " len(queryset)", " cursor = ...
from django.conf import settings from django.utils.importlib import import_module _MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ()) FIELD_INDEXES = None # TODO: add support for eventual consistency setting on specific models def get_indexes(): global FIELD_INDEXES if FIELD_INDEXES is None: field_indexes = {} for name in _MODULE_NAMES: try: field_indexes.update(import_module(name).FIELD_INDEXES) except (ImportError, AttributeError): pass FIELD_INDEXES = field_indexes return FIELD_INDEXES
[ [ 1, 0, 0.05, 0.05, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.1, 0.05, 0, 0.66, 0.25, 118, 0, 1, 0, 0, 118, 0, 0 ], [ 14, 0, 0.2, 0.05, 0, 0.66, 0.5, ...
[ "from django.conf import settings", "from django.utils.importlib import import_module", "_MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ())", "FIELD_INDEXES = None", "def get_indexes():\n global FIELD_INDEXES\n if FIELD_INDEXES is None:\n field_indexes = {}\n for name in _MODU...
from google.appengine.api.memcache import *
[ [ 1, 0, 1, 1, 0, 0.66, 0, 901, 0, 1, 0, 0, 901, 0, 0 ] ]
[ "from google.appengine.api.memcache import *" ]
import os, sys parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) if parent_dir not in sys.path: sys.path.insert(0, parent_dir) # Initialize Django from djangoappengine.main import main as gaemain # Import and run the actual handler from google.appengine.ext.deferred.handler import main if __name__ == '__main__': main()
[ [ 1, 0, 0.0769, 0.0769, 0, 0.66, 0, 688, 0, 2, 0, 0, 688, 0, 0 ], [ 14, 0, 0.2308, 0.0769, 0, 0.66, 0.2, 129, 3, 1, 0, 0, 142, 10, 4 ], [ 4, 0, 0.3462, 0.1538, 0, 0...
[ "import os, sys", "parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))", "if parent_dir not in sys.path:\n sys.path.insert(0, parent_dir)", " sys.path.insert(0, parent_dir)", "from djangoappengine.main import main as gaemain", "from google.appengine.ext.deferred...
#!/usr/bin/env python from django.core.management import execute_manager try: import settings # Assumed to be in the same directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) sys.exit(1) if __name__ == "__main__": execute_manager(settings)
[ [ 1, 0, 0.1818, 0.0909, 0, 0.66, 0, 879, 0, 1, 0, 0, 879, 0, 0 ], [ 7, 0, 0.5, 0.5455, 0, 0.66, 0.5, 0, 0, 1, 0, 0, 0, 0, 2 ], [ 1, 1, 0.3636, 0.0909, 1, 0.21, ...
[ "from django.core.management import execute_manager", "try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj...
from django import forms class ContactForm(forms.Form): message = forms.CharField(widget=forms.Textarea) subject = forms.CharField(widget=forms.HiddenInput) sender = forms.EmailField() cc_myself = forms.BooleanField(required=False)
[ [ 1, 0, 0.1429, 0.1429, 0, 0.66, 0, 294, 0, 1, 0, 0, 294, 0, 0 ], [ 3, 0, 0.7143, 0.7143, 0, 0.66, 1, 340, 0, 0, 0, 0, 953, 0, 4 ], [ 14, 1, 0.5714, 0.1429, 1, 0.88...
[ "from django import forms", "class ContactForm(forms.Form):\n message = forms.CharField(widget=forms.Textarea)\n subject = forms.CharField(widget=forms.HiddenInput)\n sender = forms.EmailField()\n cc_myself = forms.BooleanField(required=False)", " message = forms.CharField(widget=forms.T...
from django.conf.urls.defaults import * urlpatterns = patterns('contact.views', (r'^contactForm', 'contactForm') )
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.8, 0.6, 0, 0.66, 1, 990, 3, 2, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('contact.views',\n (r'^contactForm', 'contactForm')\n)" ]
from django.conf.urls.defaults import * from django.contrib.auth.forms import AuthenticationForm from django.views.generic.simple import direct_to_template from django.views.generic.simple import redirect_to handler500 = 'djangotoolbox.errorviews.server_error' urlpatterns = patterns('', (r'^$', direct_to_template, {'template': 'frontpage.html'}), (r'^home$', direct_to_template, {'template': 'frontpage.html'}), (r'^toolbar_page?', direct_to_template, {'template': 'toolbar_page.html'}), (r'^favicon\.ico$', redirect_to, {'url': '/media/pics/favicon.ico'}), (r'^algos/' , include('algos.urls')), (r'^projects/' , include('projects.urls')), (r'^articles/' , include('articles.urls')), (r'^tractament/' , include('tractament.urls')), (r'^linies/' , include('linies.urls')), (r'^massmedia/' , include('massmedia.urls')), (r'^contact/' , include('contact.urls')) )
[ [ 1, 0, 0.0455, 0.0455, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 1, 0, 0.0909, 0.0455, 0, 0.66, 0.2, 579, 0, 1, 0, 0, 579, 0, 0 ], [ 1, 0, 0.1364, 0.0455, 0, 0.6...
[ "from django.conf.urls.defaults import *", "from django.contrib.auth.forms import AuthenticationForm", "from django.views.generic.simple import direct_to_template", "from django.views.generic.simple import redirect_to", "handler500 = 'djangotoolbox.errorviews.server_error'", "urlpatterns = patterns('',\n ...
from .fields import ListField, SetField, DictField, EmbeddedModelField from django.db import models, connections from django.db.models import Q from django.db.utils import DatabaseError from django.test import TestCase from django.utils import unittest class ListModel(models.Model): floating_point = models.FloatField() names = ListField(models.CharField(max_length=500)) names_with_default = ListField(models.CharField(max_length=500), default=[]) names_nullable = ListField(models.CharField(max_length=500), null=True) class OrderedListModel(models.Model): ordered_ints = ListField(models.IntegerField(max_length=500), default=[], ordering=lambda x: x, null=True) ordered_nullable = ListField(ordering=lambda x:x, null=True) class SetModel(models.Model): setfield = SetField(models.IntegerField()) supports_dicts = getattr(connections['default'].features, 'supports_dicts', False) if supports_dicts: class DictModel(models.Model): dictfield = DictField(models.IntegerField()) dictfield_nullable = DictField(null=True) auto_now = DictField(models.DateTimeField(auto_now=True)) class EmbeddedModel(models.Model): someint = models.IntegerField() auto_now = models.DateTimeField(auto_now=True) auto_now_add = models.DateTimeField(auto_now_add=True) class EmbeddedModelFieldModel(models.Model): simple = EmbeddedModelField(EmbeddedModel, null=True) typed_list = ListField(EmbeddedModelField(SetModel)) untyped_list = ListField(EmbeddedModelField()) untyped_dict = DictField(EmbeddedModelField()) class FilterTest(TestCase): floats = [5.3, 2.6, 9.1, 1.58] names = [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',] unordered_ints = [4, 2, 6, 1] def setUp(self): for i, float in enumerate(FilterTest.floats): ListModel(floating_point=float, names=FilterTest.names[:i+1]).save() def test_startswith(self): self.assertEquals([entity.names for entity in ListModel.objects.filter(names__startswith='Sa')], [['Kakashi', 'Naruto', 'Sasuke',], ['Kakashi', 'Naruto', 'Sasuke', 'Sakura',]]) def test_options(self): self.assertEqual([entity.names_with_default for entity in ListModel.objects.filter(names__startswith='Sa')], [[], []]) self.assertEqual([entity.names_nullable for entity in ListModel.objects.filter(names__startswith='Sa')], [None, None]) def test_default_value(self): # Make sure default value is copied ListModel().names_with_default.append(2) self.assertEqual(ListModel().names_with_default, []) def test_ordering(self): OrderedListModel(ordered_ints=self.unordered_ints).save() self.assertEqual(OrderedListModel.objects.get().ordered_ints, sorted(self.unordered_ints)) def test_gt(self): # test gt on list self.assertEquals([entity.names for entity in ListModel.objects.filter(names__gt='Kakashi')], [[u'Kakashi', u'Naruto',], [u'Kakashi', u'Naruto', u'Sasuke',], [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) def test_lt(self): # test lt on list self.assertEquals([entity.names for entity in ListModel.objects.filter(names__lt='Naruto')], [[u'Kakashi',], [u'Kakashi', u'Naruto',], [u'Kakashi', u'Naruto', u'Sasuke',], [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) def test_gte(self): # test gte on list self.assertEquals([entity.names for entity in ListModel.objects.filter(names__gte='Sakura')], [[u'Kakashi', u'Naruto', u'Sasuke',], [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) def test_lte(self): # test lte on list self.assertEquals([entity.names for entity in ListModel.objects.filter(names__lte='Kakashi')], [[u'Kakashi',], [u'Kakashi', u'Naruto',], [u'Kakashi', u'Naruto', u'Sasuke',], [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) def test_equals(self): # test equality filter on list self.assertEquals([entity.names for entity in ListModel.objects.filter(names='Sakura')], [[u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) # test with additonal pk filter (for DBs that have special pk queries) query = ListModel.objects.filter(names='Sakura') self.assertEquals(query.get(pk=query[0].pk).names, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]) def test_is_null(self): self.assertEquals(ListModel.objects.filter( names__isnull=True).count(), 0) def test_exclude(self): self.assertEquals([entity.names for entity in ListModel.objects.all().exclude( names__lt='Sakura')], [[u'Kakashi', u'Naruto', u'Sasuke',], [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura',]]) def test_chained_filter(self): self.assertEquals([entity.names for entity in ListModel.objects.filter(names='Sasuke').filter( names='Sakura')], [['Kakashi', 'Naruto', 'Sasuke', 'Sakura'],]) self.assertEquals([entity.names for entity in ListModel.objects.filter(names__startswith='Sa').filter( names='Sakura')], [['Kakashi', 'Naruto', 'Sasuke', 'Sakura']]) # test across multiple columns. On app engine only one filter is allowed # to be an inequality filter self.assertEquals([entity.names for entity in ListModel.objects.filter(floating_point=9.1).filter( names__startswith='Sa')], [['Kakashi', 'Naruto', 'Sasuke',],]) def test_setfield(self): setdata = [1, 2, 3, 2, 1] # At the same time test value conversion SetModel(setfield=map(str, setdata)).save() item = SetModel.objects.filter(setfield=3)[0] self.assertEqual(item.setfield, set(setdata)) # This shouldn't raise an error because the default value is # an empty list SetModel().save() @unittest.skipIf(not supports_dicts, "Backend doesn't support dicts") def test_dictfield(self): DictModel(dictfield=dict(a=1, b='55', foo=3.14), auto_now={'a' : None}).save() item = DictModel.objects.get() self.assertEqual(item.dictfield, {u'a' : 1, u'b' : 55, u'foo' : 3}) dt = item.auto_now['a'] self.assertNotEqual(dt, None) item.save() self.assertGreater(DictModel.objects.get().auto_now['a'], dt) # This shouldn't raise an error becaues the default value is # an empty dict DictModel().save() @unittest.skip('Fails with GAE SDK, but passes on production') def test_Q_objects(self): self.assertEquals([entity.names for entity in ListModel.objects.exclude(Q(names__lt='Sakura') | Q(names__gte='Sasuke'))], [['Kakashi', 'Naruto', 'Sasuke', 'Sakura']]) class BaseModel(models.Model): pass class ExtendedModel(BaseModel): name = models.CharField(max_length=20) class BaseModelProxy(BaseModel): class Meta: proxy = True class ExtendedModelProxy(ExtendedModel): class Meta: proxy = True class ProxyTest(TestCase): def test_proxy(self): list(BaseModelProxy.objects.all()) def test_proxy_with_inheritance(self): self.assertRaises(DatabaseError, lambda: list(ExtendedModelProxy.objects.all())) class EmbeddedModelFieldTest(TestCase): def _simple_instance(self): EmbeddedModelFieldModel.objects.create(simple=EmbeddedModel(someint='5')) return EmbeddedModelFieldModel.objects.get() def test_simple(self): instance = self._simple_instance() self.assertIsInstance(instance.simple, EmbeddedModel) # Make sure get_prep_value is called: self.assertEqual(instance.simple.someint, 5) # AutoFields' values should not be populated: self.assertEqual(instance.simple.id, None) def test_pre_save(self): # Make sure field.pre_save is called instance = self._simple_instance() self.assertNotEqual(instance.simple.auto_now, None) self.assertNotEqual(instance.simple.auto_now_add, None) auto_now = instance.simple.auto_now auto_now_add = instance.simple.auto_now_add instance.save() instance = EmbeddedModelFieldModel.objects.get() # auto_now_add shouldn't have changed now, but auto_now should. self.assertEqual(instance.simple.auto_now_add, auto_now_add) self.assertGreater(instance.simple.auto_now, auto_now) def test_typed_listfield(self): EmbeddedModelFieldModel.objects.create( typed_list=[SetModel(setfield=range(3)), SetModel(setfield=range(9))] ) self.assertIn(5, EmbeddedModelFieldModel.objects.get().typed_list[1].setfield) def test_untyped_listfield(self): EmbeddedModelFieldModel.objects.create(untyped_list=[ EmbeddedModel(someint=7), OrderedListModel(ordered_ints=range(5, 0, -1)), SetModel(setfield=[1, 2, 2, 3]) ]) instances = EmbeddedModelFieldModel.objects.get().untyped_list for instance, cls in zip(instances, [EmbeddedModel, OrderedListModel, SetModel]): self.assertIsInstance(instance, cls) self.assertNotEqual(instances[0].auto_now, None) self.assertEqual(instances[1].ordered_ints, range(1, 6)) def test_untyped_dict(self): EmbeddedModelFieldModel.objects.create(untyped_dict={ 'a' : SetModel(setfield=range(3)), 'b' : DictModel(dictfield={'a' : 1, 'b' : 2}), 'c' : DictModel(dictfield={}, auto_now={'y' : 1}) }) data = EmbeddedModelFieldModel.objects.get().untyped_dict self.assertIsInstance(data['a'], SetModel) self.assertNotEqual(data['c'].auto_now['y'], None) EmbeddedModelFieldTest = unittest.skipIf( not supports_dicts, "Backend doesn't support dicts")( EmbeddedModelFieldTest)
[ [ 1, 0, 0.004, 0.004, 0, 0.66, 0, 358, 0, 4, 0, 0, 358, 0, 0 ], [ 1, 0, 0.0079, 0.004, 0, 0.66, 0.0556, 40, 0, 2, 0, 0, 40, 0, 0 ], [ 1, 0, 0.0119, 0.004, 0, 0.66, ...
[ "from .fields import ListField, SetField, DictField, EmbeddedModelField", "from django.db import models, connections", "from django.db.models import Q", "from django.db.utils import DatabaseError", "from django.test import TestCase", "from django.utils import unittest", "class ListModel(models.Model):\n...
from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from django.http import HttpResponse from django.utils import simplejson from django.utils.encoding import force_unicode from django.utils.functional import Promise class LazyEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): return force_unicode(obj) return super(LazyEncoder, self).default(obj) class JSONResponse(HttpResponse): def __init__(self, pyobj, **kwargs): super(JSONResponse, self).__init__( simplejson.dumps(pyobj, cls=LazyEncoder), content_type='application/json; charset=%s' % settings.DEFAULT_CHARSET, **kwargs) class TextResponse(HttpResponse): def __init__(self, string='', **kwargs): super(TextResponse, self).__init__(string, content_type='text/plain; charset=%s' % settings.DEFAULT_CHARSET, **kwargs)
[ [ 1, 0, 0.0385, 0.0385, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.0769, 0.0385, 0, 0.66, 0.125, 521, 0, 1, 0, 0, 521, 0, 0 ], [ 1, 0, 0.1154, 0.0385, 0, 0...
[ "from django.conf import settings", "from django.core.serializers.json import DjangoJSONEncoder", "from django.http import HttpResponse", "from django.utils import simplejson", "from django.utils.encoding import force_unicode", "from django.utils.functional import Promise", "class LazyEncoder(DjangoJSON...
# All fields except for BlobField written by Jonas Haag <jonas@lophus.org> from django.db import models from django.core.exceptions import ValidationError from django.utils.importlib import import_module __all__ = ('RawField', 'ListField', 'DictField', 'SetField', 'BlobField', 'EmbeddedModelField') class _HandleAssignment(object): """ A placeholder class that provides a way to set the attribute on the model. """ def __init__(self, field): self.field = field def __get__(self, obj, type=None): if obj is None: raise AttributeError('Can only be accessed via an instance.') return obj.__dict__[self.field.name] def __set__(self, obj, value): obj.__dict__[self.field.name] = self.field.to_python(value) class RawField(models.Field): """ Generic field to store anything your database backend allows you to. """ def get_internal_type(self): return 'RawField' class AbstractIterableField(models.Field): """ Abstract field for fields for storing iterable data type like ``list``, ``set`` and ``dict``. You can pass an instance of a field as the first argument. If you do, the iterable items will be piped through the passed field's validation and conversion routines, converting the items to the appropriate data type. """ def __init__(self, item_field=None, *args, **kwargs): if item_field is None: item_field = RawField() self.item_field = item_field default = kwargs.get('default', None if kwargs.get('null') else ()) if default is not None and not callable(default): # ensure a new object is created every time the default is accessed kwargs['default'] = lambda: self._type(default) super(AbstractIterableField, self).__init__(*args, **kwargs) def contribute_to_class(self, cls, name): self.item_field.model = cls self.item_field.name = name super(AbstractIterableField, self).contribute_to_class(cls, name) metaclass = getattr(self.item_field, '__metaclass__', None) if issubclass(metaclass, models.SubfieldBase): setattr(cls, self.name, _HandleAssignment(self)) def db_type(self, connection): item_db_type = self.item_field.db_type(connection=connection) return '%s:%s' % (self.__class__.__name__, item_db_type) def _convert(self, func, values, *args, **kwargs): if isinstance(values, (list, tuple, set)): return self._type(func(value, *args, **kwargs) for value in values) return values def to_python(self, value): return self._convert(self.item_field.to_python, value) def pre_save(self, model_instance, add): class fake_instance(object): pass fake_instance = fake_instance() def wrapper(value): assert not hasattr(self.item_field, 'attname') fake_instance.value = value self.item_field.attname = 'value' try: return self.item_field.pre_save(fake_instance, add) finally: del self.item_field.attname return self._convert(wrapper, getattr(model_instance, self.attname)) def get_db_prep_value(self, value, connection, prepared=False): return self._convert(self.item_field.get_db_prep_value, value, connection=connection, prepared=prepared) def get_db_prep_save(self, value, connection): return self._convert(self.item_field.get_db_prep_save, value, connection=connection) def validate(self, values, model_instance): try: iter(values) except TypeError: raise ValidationError('Value of type %r is not iterable' % type(values)) def formfield(self, **kwargs): raise NotImplementedError('No form field implemented for %r' % type(self)) class ListField(AbstractIterableField): """ Field representing a Python ``list``. If the optional keyword argument `ordering` is given, it must be a callable that is passed to :meth:`list.sort` as `key` argument. If `ordering` is given, the items in the list will be sorted before sending them to the database. """ _type = list def __init__(self, *args, **kwargs): self.ordering = kwargs.pop('ordering', None) if self.ordering is not None and not callable(self.ordering): raise TypeError("'ordering' has to be a callable or None, " "not of type %r" % type(self.ordering)) super(ListField, self).__init__(*args, **kwargs) def _convert(self, func, values, *args, **kwargs): values = super(ListField, self)._convert(func, values, *args, **kwargs) if values is not None and self.ordering is not None: values.sort(key=self.ordering) return values class SetField(AbstractIterableField): """ Field representing a Python ``set``. """ _type = set class DictField(AbstractIterableField): """ Field representing a Python ``dict``. The field type conversions described in :class:`AbstractIterableField` only affect values of the dictionary, not keys. Depending on the backend, keys that aren't strings might not be allowed. """ _type = dict def _convert(self, func, values, *args, **kwargs): if values is None: return None return dict((key, func(value, *args, **kwargs)) for key, value in values.iteritems()) def validate(self, values, model_instance): if not isinstance(values, dict): raise ValidationError('Value is of type %r. Should be a dict.' % type(values)) class BlobField(models.Field): """ A field for storing blobs of binary data. The value might either be a string (or something that can be converted to a string), or a file-like object. In the latter case, the object has to provide a ``read`` method from which the blob is read. """ def get_internal_type(self): return 'BlobField' def formfield(self, **kwargs): # A file widget is provided, but use model FileField or ImageField # for storing specific files most of the time from .widgets import BlobWidget from django.forms import FileField defaults = {'form_class': FileField, 'widget': BlobWidget} defaults.update(kwargs) return super(BlobField, self).formfield(**defaults) def get_db_prep_value(self, value, connection, prepared=False): if hasattr(value, 'read'): return value.read() else: return str(value) def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False): raise TypeError("BlobFields do not support lookups") def value_to_string(self, obj): return str(self._get_val_from_obj(obj)) class EmbeddedModelField(models.Field): """ Field that allows you to embed a model instance. :param model: The (optional) model class that shall be embedded """ __metaclass__ = models.SubfieldBase def __init__(self, embedded_model=None, *args, **kwargs): self.embedded_model = embedded_model kwargs.setdefault('default', None) super(EmbeddedModelField, self).__init__(*args, **kwargs) def db_type(self, connection): return 'DictField:RawField' def pre_save(self, model_instance, add): embedded_instance = super(EmbeddedModelField, self).pre_save(model_instance, add) if embedded_instance is None: return None, None if self.embedded_model is not None and \ not isinstance(embedded_instance, self.embedded_model): raise TypeError("Expected instance of type %r, not %r" % (type(self.embedded_model), type(embedded_instance))) data = dict((field.name, field.pre_save(embedded_instance, add)) for field in embedded_instance._meta.fields) return embedded_instance, data def get_db_prep_value(self, (embedded_instance, embedded_dict), **kwargs): if embedded_dict is None: return None values = dict() for name, value in embedded_dict.iteritems(): field = embedded_instance._meta.get_field(name) values[name] = field.get_db_prep_value(value, **kwargs) if self.embedded_model is None: values.update({'_module' : embedded_instance.__class__.__module__, '_model' : embedded_instance.__class__.__name__}) return values def to_python(self, values): if not isinstance(values, dict): return values module, model = values.pop('_module', None), values.pop('_model', None) if module is not None: return getattr(import_module(module), model)(**values) return self.embedded_model(**values)
[ [ 1, 0, 0.0128, 0.0043, 0, 0.66, 0, 40, 0, 1, 0, 0, 40, 0, 0 ], [ 1, 0, 0.0171, 0.0043, 0, 0.66, 0.0909, 160, 0, 1, 0, 0, 160, 0, 0 ], [ 1, 0, 0.0214, 0.0043, 0, 0....
[ "from django.db import models", "from django.core.exceptions import ValidationError", "from django.utils.importlib import import_module", "__all__ = ('RawField', 'ListField', 'DictField', 'SetField',\n 'BlobField', 'EmbeddedModelField')", "class _HandleAssignment(object):\n \"\"\"\n A placeh...
from django.conf import settings from django.http import HttpResponseRedirect from django.utils.cache import patch_cache_control LOGIN_REQUIRED_PREFIXES = getattr(settings, 'LOGIN_REQUIRED_PREFIXES', ()) NO_LOGIN_REQUIRED_PREFIXES = getattr(settings, 'NO_LOGIN_REQUIRED_PREFIXES', ()) class LoginRequiredMiddleware(object): """ Redirects to login page if request path begins with a LOGIN_REQURED_PREFIXES prefix. You can also specify NO_LOGIN_REQUIRED_PREFIXES which take precedence. """ def process_request(self, request): for prefix in NO_LOGIN_REQUIRED_PREFIXES: if request.path.startswith(prefix): return None for prefix in LOGIN_REQUIRED_PREFIXES: if request.path.startswith(prefix) and \ not request.user.is_authenticated(): from django.contrib.auth.views import redirect_to_login return redirect_to_login(request.get_full_path()) return None class RedirectMiddleware(object): """ A static redirect middleware. Mostly useful for hosting providers that automatically setup an alternative domain for your website. You might not want anyone to access the site via those possibly well-known URLs. """ def process_request(self, request): host = request.get_host().split(':')[0] # Turn off redirects when in debug mode, running unit tests, or # when handling an App Engine cron job. if settings.DEBUG or host == 'testserver' or \ not getattr(settings, 'ALLOWED_DOMAINS', None) or \ request.META.get('HTTP_X_APPENGINE_CRON') == 'true': return if host not in settings.ALLOWED_DOMAINS: return HttpResponseRedirect('http://' + settings.ALLOWED_DOMAINS[0]) class NoHistoryCacheMiddleware(object): """ If user is authenticated we disable browser caching of pages in history. """ def process_response(self, request, response): if 'Expires' not in response and \ 'Cache-Control' not in response and \ hasattr(request, 'session') and \ request.user.is_authenticated(): patch_cache_control(response, no_store=True, no_cache=True, must_revalidate=True, max_age=0) return response
[ [ 1, 0, 0.0189, 0.0189, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.0377, 0.0189, 0, 0.66, 0.1429, 779, 0, 1, 0, 0, 779, 0, 0 ], [ 1, 0, 0.0566, 0.0189, 0, ...
[ "from django.conf import settings", "from django.http import HttpResponseRedirect", "from django.utils.cache import patch_cache_control", "LOGIN_REQUIRED_PREFIXES = getattr(settings, 'LOGIN_REQUIRED_PREFIXES', ())", "NO_LOGIN_REQUIRED_PREFIXES = getattr(settings, 'NO_LOGIN_REQUIRED_PREFIXES', ())", "class...
from django.conf import settings from django.core.cache import cache from django.contrib.sites.models import Site from djangotoolbox.utils import make_tls_property _default_site_id = getattr(settings, 'SITE_ID', None) SITE_ID = settings.__class__.SITE_ID = make_tls_property() class DynamicSiteIDMiddleware(object): """Sets settings.SITE_ID based on request's domain""" def process_request(self, request): # Ignore port if it's 80 or 443 if ':' in request.get_host(): domain, port = request.get_host().split(':') if int(port) not in (80, 443): domain = request.get_host() else: domain = request.get_host().split(':')[0] # Domains are case insensitive domain = domain.lower() # We cache the SITE_ID cache_key = 'Site:domain:%s' % domain site = cache.get(cache_key) if site: SITE_ID.value = site else: try: site = Site.objects.get(domain=domain) except Site.DoesNotExist: site = None if not site: # Fall back to with/without 'www.' if domain.startswith('www.'): fallback_domain = domain[4:] else: fallback_domain = 'www.' + domain try: site = Site.objects.get(domain=fallback_domain) except Site.DoesNotExist: site = None # Add site if it doesn't exist if not site and getattr(settings, 'CREATE_SITES_AUTOMATICALLY', True): site = Site(domain=domain, name=domain) site.save() # Set SITE_ID for this thread/request if site: SITE_ID.value = site.pk else: SITE_ID.value = _default_site_id cache.set(cache_key, SITE_ID.value, 5*60)
[ [ 1, 0, 0.0172, 0.0172, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.0345, 0.0172, 0, 0.66, 0.1667, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.0517, 0.0172, 0, ...
[ "from django.conf import settings", "from django.core.cache import cache", "from django.contrib.sites.models import Site", "from djangotoolbox.utils import make_tls_property", "_default_site_id = getattr(settings, 'SITE_ID', None)", "SITE_ID = settings.__class__.SITE_ID = make_tls_property()", "class Dy...
from .utils import object_list_to_table, equal_lists from django.test import TestCase from django.test.simple import DjangoTestSuiteRunner, DjangoTestRunner import sys try: from StringIO import StringIO except ImportError: from cStringIO import StringIO class ModelTestCase(TestCase): """ A test case for models that provides an easy way to validate the DB contents against a given list of row-values. You have to specify the model to validate using the 'model' attribute: class MyTestCase(ModelTestCase): model = MyModel """ def validate_state(self, columns, *state_table): """ Validates that the DB contains exactly the values given in the state table. The list of columns is given in the columns tuple. Example: self.validate_state( ('a', 'b', 'c'), (1, 2, 3), (11, 12, 13), ) validates that the table contains exactly two rows and that their 'a', 'b', and 'c' attributes are 1, 2, 3 for one row and 11, 12, 13 for the other row. The order of the rows doesn't matter. """ current_state = object_list_to_table(columns, self.model.all())[1:] if not equal_lists(current_state, state_table): print 'DB state not valid:' print 'Current state:' print columns for state in current_state: print state print 'Should be:' for state in state_table: print state self.fail('DB state not valid') class CapturingTestRunner(DjangoTestRunner): def _makeResult(self): result = super(CapturingTestRunner, self)._makeResult() stdout = sys.stdout stderr = sys.stderr def extend_error(errors): try: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() except AttributeError: captured_stdout = captured_stderr = '' sys.stdout = stdout sys.stderr = stderr t, e = errors[-1] if captured_stdout: e += '\n--------------- Captured stdout: ---------------\n' e += captured_stdout if captured_stderr: e += '\n--------------- Captured stderr: ---------------\n' e += captured_stderr if captured_stdout or captured_stderr: e += '\n--------------- End captured output ---------------\n\n' errors[-1] = (t, e) def override(func): func.orig = getattr(result, func.__name__) setattr(result, func.__name__, func) return func @override def startTest(test): startTest.orig(test) sys.stdout = StringIO() sys.stderr = StringIO() @override def addSuccess(test): addSuccess.orig(test) sys.stdout = stdout sys.stderr = stderr @override def addError(test, err): addError.orig(test, err) extend_error(result.errors) @override def addFailure(test, err): addFailure.orig(test, err) extend_error(result.failures) return result class CapturingTestSuiteRunner(DjangoTestSuiteRunner): def run_suite(self, suite, **kwargs): return CapturingTestRunner(verbosity=self.verbosity, failfast=self.failfast).run(suite)
[ [ 1, 0, 0.0096, 0.0096, 0, 0.66, 0, 970, 0, 2, 0, 0, 970, 0, 0 ], [ 1, 0, 0.0192, 0.0096, 0, 0.66, 0.1429, 944, 0, 1, 0, 0, 944, 0, 0 ], [ 1, 0, 0.0288, 0.0096, 0, ...
[ "from .utils import object_list_to_table, equal_lists", "from django.test import TestCase", "from django.test.simple import DjangoTestSuiteRunner, DjangoTestRunner", "import sys", "try:\n from StringIO import StringIO\nexcept ImportError:\n from cStringIO import StringIO", " from StringIO import ...
def make_tls_property(default=None): """Creates a class-wide instance property with a thread-specific value.""" class TLSProperty(object): def __init__(self): from threading import local self.local = local() def __get__(self, instance, cls): if not instance: return self return self.value def __set__(self, instance, value): self.value = value def _get_value(self): return getattr(self.local, 'value', default) def _set_value(self, value): self.local.value = value value = property(_get_value, _set_value) return TLSProperty() def getattr_by_path(obj, attr, *default): """Like getattr(), but can go down a hierarchy like 'attr.subattr'""" value = obj for part in attr.split('.'): if not hasattr(value, part) and len(default): return default[0] value = getattr(value, part) if callable(value): value = value() return value def subdict(data, *attrs): """Returns a subset of the keys of a dictionary.""" result = {} result.update([(key, data[key]) for key in attrs]) return result def equal_lists(left, right): """ Compares two lists and returs True if they contain the same elements, but doesn't require that they have the same order. """ right = list(right) if len(left) != len(right): return False for item in left: if item in right: del right[right.index(item)] else: return False return True def object_list_to_table(headings, dict_list): """ Converts objects to table-style list of rows with heading: Example: x.a = 1 x.b = 2 x.c = 3 y.a = 11 y.b = 12 y.c = 13 object_list_to_table(('a', 'b', 'c'), [x, y]) results in the following (dict keys reordered for better readability): [ ('a', 'b', 'c'), (1, 2, 3), (11, 12, 13), ] """ return [headings] + [tuple([getattr_by_path(row, heading, None) for heading in headings]) for row in dict_list] def dict_list_to_table(headings, dict_list): """ Converts dict to table-style list of rows with heading: Example: dict_list_to_table(('a', 'b', 'c'), [{'a': 1, 'b': 2, 'c': 3}, {'a': 11, 'b': 12, 'c': 13}]) results in the following (dict keys reordered for better readability): [ ('a', 'b', 'c'), (1, 2, 3), (11, 12, 13), ] """ return [headings] + [tuple([row[heading] for heading in headings]) for row in dict_list]
[ [ 2, 0, 0.1223, 0.234, 0, 0.66, 0, 317, 0, 1, 1, 0, 0, 0, 4 ], [ 8, 1, 0.0213, 0.0106, 1, 0.92, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 3, 1, 0.1223, 0.1915, 1, 0.92, 0....
[ "def make_tls_property(default=None):\n \"\"\"Creates a class-wide instance property with a thread-specific value.\"\"\"\n class TLSProperty(object):\n def __init__(self):\n from threading import local\n self.local = local()\n\n def __get__(self, instance, cls):", " \"...
from django import forms from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User, Group class UserForm(forms.ModelForm): class Meta: model = User fields = ('username', 'email', 'first_name', 'last_name', 'is_active', 'is_staff', 'is_superuser') class CustomUserAdmin(UserAdmin): fieldsets = None form = UserForm admin.site.unregister(User) admin.site.unregister(Group) admin.site.register(User, CustomUserAdmin)
[ [ 1, 0, 0.0556, 0.0556, 0, 0.66, 0, 294, 0, 1, 0, 0, 294, 0, 0 ], [ 1, 0, 0.1111, 0.0556, 0, 0.66, 0.125, 302, 0, 1, 0, 0, 302, 0, 0 ], [ 1, 0, 0.1667, 0.0556, 0, 0...
[ "from django import forms", "from django.contrib import admin", "from django.contrib.auth.admin import UserAdmin", "from django.contrib.auth.models import User, Group", "class UserForm(forms.ModelForm):\n class Meta:\n model = User\n fields = ('username', 'email', 'first_name', 'last_name',...
from django.forms import widgets from django.template.defaultfilters import filesizeformat from django.utils.safestring import mark_safe class BlobWidget(widgets.FileInput): def render(self, name, value, attrs=None): try: blob_size = len(value) except: blob_size = 0 blob_size = filesizeformat(blob_size) original = super(BlobWidget, self).render(name, value, attrs=None) return mark_safe('%s<p>Current size: %s</p>' % (original, blob_size))
[ [ 1, 0, 0.0714, 0.0714, 0, 0.66, 0, 666, 0, 1, 0, 0, 666, 0, 0 ], [ 1, 0, 0.1429, 0.0714, 0, 0.66, 0.3333, 913, 0, 1, 0, 0, 913, 0, 0 ], [ 1, 0, 0.2143, 0.0714, 0, ...
[ "from django.forms import widgets", "from django.template.defaultfilters import filesizeformat", "from django.utils.safestring import mark_safe", "class BlobWidget(widgets.FileInput):\n def render(self, name, value, attrs=None):\n try:\n blob_size = len(value)\n except:\n ...
from django.db.backends.creation import BaseDatabaseCreation class NonrelDatabaseCreation(BaseDatabaseCreation): data_types = { 'AutoField': 'integer', 'BigIntegerField': 'long', 'BooleanField': 'bool', 'CharField': 'text', 'CommaSeparatedIntegerField': 'text', 'DateField': 'date', 'DateTimeField': 'datetime', 'DecimalField': 'decimal:%(max_digits)s,%(decimal_places)s', 'EmailField': 'text', 'FileField': 'text', 'FilePathField': 'text', 'FloatField': 'float', 'ImageField': 'text', 'IntegerField': 'integer', 'IPAddressField': 'text', 'NullBooleanField': 'bool', 'OneToOneField': 'integer', 'PositiveIntegerField': 'integer', 'PositiveSmallIntegerField': 'integer', 'SlugField': 'text', 'SmallIntegerField': 'integer', 'TextField': 'longtext', 'TimeField': 'time', 'URLField': 'text', 'XMLField': 'longtext', 'BlobField': 'blob', 'RawField': 'raw', }
[ [ 1, 0, 0.0303, 0.0303, 0, 0.66, 0, 340, 0, 1, 0, 0, 340, 0, 0 ], [ 3, 0, 0.5455, 0.9394, 0, 0.66, 1, 2, 0, 0, 0, 0, 484, 0, 0 ], [ 14, 1, 0.5606, 0.9091, 1, 0.65, ...
[ "from django.db.backends.creation import BaseDatabaseCreation", "class NonrelDatabaseCreation(BaseDatabaseCreation):\n data_types = {\n 'AutoField': 'integer',\n 'BigIntegerField': 'long',\n 'BooleanField': 'bool',\n 'CharField': 'text',\n 'CommaSeparat...
from django.conf import settings from django.db.models.sql import aggregates as sqlaggregates from django.db.models.sql.compiler import SQLCompiler from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE from django.db.models.sql.where import AND, OR, Constraint from django.db.utils import DatabaseError, IntegrityError from django.utils.tree import Node import random EMULATED_OPS = { 'exact': lambda x, y: y in x if isinstance(x, (list,tuple)) else x == y, 'iexact': lambda x, y: x.lower() == y.lower(), 'startswith': lambda x, y: x.startswith(y), 'istartswith': lambda x, y: x.lower().startswith(y.lower()), 'isnull': lambda x, y: x is None if y else x is not None, 'in': lambda x, y: x in y, 'lt': lambda x, y: x < y, 'lte': lambda x, y: x <= y, 'gt': lambda x, y: x > y, 'gte': lambda x, y: x >= y, } class NonrelQuery(object): # ---------------------------------------------- # Public API # ---------------------------------------------- def __init__(self, compiler, fields): self.fields = fields self.compiler = compiler self.connection = compiler.connection self.query = self.compiler.query self._negated = False def fetch(self, low_mark=0, high_mark=None): raise NotImplementedError('Not implemented') def count(self, limit=None): raise NotImplementedError('Not implemented') def delete(self): raise NotImplementedError('Not implemented') def order_by(self, ordering): raise NotImplementedError('Not implemented') # Used by add_filters() def add_filter(self, column, lookup_type, negated, db_type, value): raise NotImplementedError('Not implemented') # This is just a default implementation. You might want to override this # in case your backend supports OR queries def add_filters(self, filters): """Traverses the given Where tree and adds the filters to this query""" if filters.negated: self._negated = not self._negated if not self._negated and filters.connector != AND: raise DatabaseError('Only AND filters are supported') # Remove unneeded children from tree children = self._get_children(filters.children) if self._negated and filters.connector != OR and len(children) > 1: raise DatabaseError("When negating a whole filter subgroup " "(e.g., a Q object) the subgroup filters must " "be connected via OR, so the non-relational " "backend can convert them like this: " '"not (a OR b) => (not a) AND (not b)".') for child in children: if isinstance(child, Node): self.add_filters(child) continue column, lookup_type, db_type, value = self._decode_child(child) self.add_filter(column, lookup_type, self._negated, db_type, value) if filters.negated: self._negated = not self._negated # ---------------------------------------------- # Internal API for reuse by subclasses # ---------------------------------------------- def _decode_child(self, child): constraint, lookup_type, annotation, value = child packed, value = constraint.process(lookup_type, value, self.connection) alias, column, db_type = packed if alias and alias != self.query.model._meta.db_table: raise DatabaseError("This database doesn't support JOINs " "and multi-table inheritance.") value = self._normalize_lookup_value(value, annotation, lookup_type) return column, lookup_type, db_type, value def _normalize_lookup_value(self, value, annotation, lookup_type): # Django fields always return a list (see Field.get_db_prep_lookup) # except if get_db_prep_lookup got overridden by a subclass if lookup_type not in ('in', 'range', 'year') and isinstance(value, (tuple, list)): if len(value) > 1: raise DatabaseError('Filter lookup type was: %s. Expected the ' 'filters value not to be a list. Only "in"-filters ' 'can be used with lists.' % lookup_type) elif lookup_type == 'isnull': value = annotation else: value = value[0] if isinstance(value, unicode): value = unicode(value) elif isinstance(value, str): value = str(value) if lookup_type in ('startswith', 'istartswith'): value = value[:-1] elif lookup_type in ('endswith', 'iendswith'): value = value[1:] elif lookup_type in ('contains', 'icontains'): value = value[1:-1] return value def _get_children(self, children): # Filter out nodes that were automatically added by sql.Query, but are # not necessary with emulated negation handling code result = [] for child in children: if isinstance(child, tuple): constraint = child[0] lookup_type = child[1] if lookup_type == 'isnull' and constraint.field is None: continue result.append(child) return result def _matches_filters(self, entity, filters): # Filters without rules match everything if not filters.children: return True result = filters.connector == AND for child in filters.children: if isinstance(child, Node): submatch = self._matches_filters(entity, child) else: constraint, lookup_type, annotation, value = child packed, value = constraint.process(lookup_type, value, self.connection) alias, column, db_type = packed if alias != self.query.model._meta.db_table: raise DatabaseError("This database doesn't support JOINs " "and multi-table inheritance.") # Django fields always return a list (see Field.get_db_prep_lookup) # except if get_db_prep_lookup got overridden by a subclass if lookup_type != 'in' and isinstance(value, (tuple, list)): if len(value) > 1: raise DatabaseError('Filter lookup type was: %s. ' 'Expected the filters value not to be a list. ' 'Only "in"-filters can be used with lists.' % lookup_type) elif lookup_type == 'isnull': value = annotation else: value = value[0] submatch = EMULATED_OPS[lookup_type](entity[column], value) if filters.connector == OR and submatch: result = True break elif filters.connector == AND and not submatch: result = False break if filters.negated: return not result return result def _order_in_memory(self, lhs, rhs): for order in self.compiler._get_ordering(): if LOOKUP_SEP in order: raise DatabaseError("JOINs in ordering not supported (%s)" % order) if order == '?': result = random.choice([1, 0, -1]) else: column = order.lstrip('-') result = cmp(lhs.get(column), rhs.get(column)) if order.startswith('-'): result *= -1 if result != 0: return result return 0 def convert_value_from_db(self, db_type, value): return self.compiler.convert_value_from_db(db_type, value) def convert_value_for_db(self, db_type, value): return self.compiler.convert_value_for_db(db_type, value) class NonrelCompiler(SQLCompiler): """ Base class for non-relational compilers. Provides in-memory filter matching and ordering. Entities are assumed to be dictionaries where the keys are column names. """ # ---------------------------------------------- # Public API # ---------------------------------------------- def results_iter(self): """ Returns an iterator over the results from executing this query. """ self.check_query() fields = self.get_fields() low_mark = self.query.low_mark high_mark = self.query.high_mark for entity in self.build_query(fields).fetch(low_mark, high_mark): yield self._make_result(entity, fields) def has_results(self): return self.get_count(check_exists=True) def execute_sql(self, result_type=MULTI): """ Handles aggregate/count queries """ aggregates = self.query.aggregate_select.values() # Simulate a count() if aggregates: assert len(aggregates) == 1 aggregate = aggregates[0] assert isinstance(aggregate, sqlaggregates.Count) meta = self.query.get_meta() assert aggregate.col == '*' or aggregate.col == (meta.db_table, meta.pk.column) count = self.get_count() if result_type is SINGLE: return [count] elif result_type is MULTI: return [[count]] raise NotImplementedError('The database backend only supports count() queries') # ---------------------------------------------- # Additional NonrelCompiler API # ---------------------------------------------- def _make_result(self, entity, fields): result = [] for field in fields: if not field.null and entity.get(field.column, field.get_default()) is None: raise DatabaseError("Non-nullable field %s can't be None!" % field.name) result.append(self.convert_value_from_db(field.db_type( connection=self.connection), entity.get(field.column, field.get_default()))) return result def check_query(self): if (len([a for a in self.query.alias_map if self.query.alias_refcount[a]]) > 1 or self.query.distinct or self.query.extra or self.query.having): raise DatabaseError('This query is not supported by the database.') def get_count(self, check_exists=False): """ Counts matches using the current filter constraints. """ if check_exists: high_mark = 1 else: high_mark = self.query.high_mark return self.build_query().count(high_mark) def build_query(self, fields=None): if fields is None: fields = self.get_fields() query = self.query_class(self, fields) query.add_filters(self.query.where) query.order_by(self._get_ordering()) # This at least satisfies the most basic unit tests if settings.DEBUG: self.connection.queries.append({'sql': repr(query)}) return query def get_fields(self): """ Returns the fields which should get loaded from the backend by self.query """ # We only set this up here because # related_select_fields isn't populated until # execute_sql() has been called. if self.query.select_fields: fields = self.query.select_fields + self.query.related_select_fields else: fields = self.query.model._meta.fields # If the field was deferred, exclude it from being passed # into `resolve_columns` because it wasn't selected. only_load = self.deferred_to_columns() if only_load: db_table = self.query.model._meta.db_table fields = [f for f in fields if db_table in only_load and f.column in only_load[db_table]] query_model = self.query.model if query_model._meta.proxy: query_model = query_model._meta.proxy_for_model for field in fields: if field.model._meta != query_model._meta: raise DatabaseError('Multi-table inheritance is not supported ' 'by non-relational DBs.') return fields def _get_ordering(self): if not self.query.default_ordering: ordering = self.query.order_by else: ordering = self.query.order_by or self.query.get_meta().ordering result = [] for order in ordering: if LOOKUP_SEP in order: raise DatabaseError("Ordering can't span tables on non-relational backends (%s)" % order) if order == '?': raise DatabaseError("Randomized ordering isn't supported by the backend") order = order.lstrip('+') descending = order.startswith('-') name = order.lstrip('-') if name == 'pk': name = self.query.get_meta().pk.name order = '-' + name if descending else name if self.query.standard_ordering: result.append(order) else: if descending: result.append(name) else: result.append('-' + name) return result class NonrelInsertCompiler(object): def execute_sql(self, return_id=False): data = {} for (field, value), column in zip(self.query.values, self.query.columns): if field is not None: if not field.null and value is None: raise DatabaseError("You can't set %s (a non-nullable " "field) to None!" % field.name) value = self.convert_value_for_db(field.db_type(connection=self.connection), value) data[column] = value return self.insert(data, return_id=return_id) class NonrelUpdateCompiler(object): def execute_sql(self, result_type=MULTI): # TODO: We don't yet support QuerySet.update() in Django-nonrel raise NotImplementedError('No updates') class NonrelDeleteCompiler(object): def execute_sql(self, result_type=MULTI): self.build_query([self.query.get_meta().pk]).delete()
[ [ 1, 0, 0.0028, 0.0028, 0, 0.66, 0, 128, 0, 1, 0, 0, 128, 0, 0 ], [ 1, 0, 0.0055, 0.0028, 0, 0.66, 0.0769, 841, 0, 1, 0, 0, 841, 0, 0 ], [ 1, 0, 0.0083, 0.0028, 0, ...
[ "from django.conf import settings", "from django.db.models.sql import aggregates as sqlaggregates", "from django.db.models.sql.compiler import SQLCompiler", "from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE", "from django.db.models.sql.where import AND, OR, Constraint", "from django.db...
import datetime from django.db.backends import BaseDatabaseFeatures, BaseDatabaseOperations, \ BaseDatabaseWrapper, BaseDatabaseClient, BaseDatabaseValidation, \ BaseDatabaseIntrospection from .creation import NonrelDatabaseCreation class NonrelDatabaseFeatures(BaseDatabaseFeatures): can_return_id_from_insert = True supports_unspecified_pk = False supports_regex_backreferencing = True supports_date_lookup_using_string = False supports_timezones = False distinguishes_insert_from_update = False supports_deleting_related_objects = False string_based_auto_field = False supports_dicts = False def _supports_transactions(self): return False class NonrelDatabaseOperations(BaseDatabaseOperations): def __init__(self, connection): self.connection = connection super(NonrelDatabaseOperations, self).__init__() def quote_name(self, name): return name def value_to_db_date(self, value): # value is a date here, no need to check it return value def value_to_db_datetime(self, value): # value is a datetime here, no need to check it return value def value_to_db_time(self, value): # value is a time here, no need to check it return value def prep_for_like_query(self, value): return value def prep_for_iexact_query(self, value): return value def check_aggregate_support(self, aggregate): from django.db.models.sql.aggregates import Count if not isinstance(aggregate, Count): raise NotImplementedError("This database does not support %r " "aggregates" % type(aggregate)) def year_lookup_bounds(self, value): return [datetime.datetime(value, 1, 1, 0, 0, 0, 0), datetime.datetime(value+1, 1, 1, 0, 0, 0, 0)] def value_to_db_auto(self, value): """ Transform a value to an object compatible with the AutoField required by the backend driver for auto columns. """ if self.connection.features.string_based_auto_field: if value is None: return None return unicode(value) return super(NonrelDatabaseOperations, self).value_to_db_auto(value) class NonrelDatabaseClient(BaseDatabaseClient): pass class NonrelDatabaseValidation(BaseDatabaseValidation): pass class NonrelDatabaseIntrospection(BaseDatabaseIntrospection): def table_names(self): """Returns a list of names of all tables that exist in the database.""" return self.django_table_names() class FakeCursor(object): def __getattribute__(self, name): raise NotImplementedError('Cursors not supported') def __setattr__(self, name, value): raise NotImplementedError('Cursors not supported') class NonrelDatabaseWrapper(BaseDatabaseWrapper): def _cursor(self): return FakeCursor()
[ [ 1, 0, 0.0111, 0.0111, 0, 0.66, 0, 426, 0, 1, 0, 0, 426, 0, 0 ], [ 1, 0, 0.0333, 0.0333, 0, 0.66, 0.1111, 981, 0, 6, 0, 0, 981, 0, 0 ], [ 1, 0, 0.0667, 0.0111, 0, ...
[ "import datetime", "from django.db.backends import BaseDatabaseFeatures, BaseDatabaseOperations, \\\n BaseDatabaseWrapper, BaseDatabaseClient, BaseDatabaseValidation, \\\n BaseDatabaseIntrospection", "from .creation import NonrelDatabaseCreation", "class NonrelDatabaseFeatures(BaseDatabaseFeatures):\n ...
from django import http from django.template import Context, RequestContext, loader def server_error(request, template_name='500.html'): """ 500 error handler. Templates: `500.html` Context: request_path The path of the requested URL (e.g., '/app/pages/bad_page/') """ t = loader.get_template(template_name) # You need to create a 500.html template. return http.HttpResponseServerError(t.render(RequestContext(request, {'request_path': request.path})))
[ [ 1, 0, 0.0714, 0.0714, 0, 0.66, 0, 294, 0, 1, 0, 0, 294, 0, 0 ], [ 1, 0, 0.1429, 0.0714, 0, 0.66, 0.5, 213, 0, 3, 0, 0, 213, 0, 0 ], [ 2, 0, 0.6429, 0.7857, 0, 0.6...
[ "from django import http", "from django.template import Context, RequestContext, loader", "def server_error(request, template_name='500.html'):\n \"\"\"\n 500 error handler.\n\n Templates: `500.html`\n Context:\n request_path\n The path of the requested URL (e.g., '/app/pages/bad_p...
from djangoappengine.settings_base import * import os SECRET_KEY = '=r-$b*8hglm+858&9t043hlm6-&6-3d3vfc4((7yd0dbrakhvi' LANGUAGE_CODE = 'ca' INSTALLED_APPS = ( 'djangoappengine', 'djangotoolbox', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'projects', 'articles', 'algos', 'tractament', 'linies', 'massmedia', 'contact' ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'django.core.context_processors.request', ) LOGIN_REDIRECT_URL = '/' ADMIN_MEDIA_PREFIX = '/media/admin/' MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media') TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), 'templates'),) ROOT_URLCONF = 'urls'
[ [ 1, 0, 0.025, 0.025, 0, 0.66, 0, 423, 0, 1, 0, 0, 423, 0, 0 ], [ 1, 0, 0.075, 0.025, 0, 0.66, 0.0909, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 14, 0, 0.125, 0.025, 0, 0.66,...
[ "from djangoappengine.settings_base import *", "import os", "SECRET_KEY = '=r-$b*8hglm+858&9t043hlm6-&6-3d3vfc4((7yd0dbrakhvi'", "LANGUAGE_CODE = 'ca'", "INSTALLED_APPS = (\n 'djangoappengine',\n 'djangotoolbox',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sess...
""" Mendeley Open API Example Client Copyright (c) 2010, Mendeley Ltd. <copyright@mendeley.com> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. For details of the Mendeley Open API see http://dev.mendeley.com/ Example usage: >>> from pprint import pprint >>> from mendeley_client import MendeleyClient >>> mendeley = MendeleyClient('<consumer_key>', '<secret_key>') >>> try: >>> mendeley.load_keys() >>> except IOError: >>> mendeley.get_required_keys() >>> mendeley.save_keys() >>> results = mendeley.search('science') >>> pprint(results['documents'][0]) {u'authors': None, u'doi': None, u'id': u'8c18bd50-6f07-11df-b8f0-001e688e2dcb', u'mendeley_url': u'http://localhost/research//', u'publication_outlet': None, u'title': None, u'year': None} >>> documents = mendeley.library() >>> pprint(documents) {u'current_page': 0, u'document_ids': [u'86175', u'86176', u'86174', u'86177'], u'items_per_page': 20, u'total_pages': 1, u'total_results': 4} >>> details = mendeley.document_details(documents['document_ids'][0]) >>> pprint(details) {u'authors': [u'Ben Dowling'], u'discipline': {u'discipline': u'Computer and Information Science', u'subdiscipline': None}, u'tags': ['nosql'], u'title': u'NoSQL(EU) Write Up', u'year': 2010} """ from pprint import pprint import oauth2 as oauth import pickle import httplib import json import urllib class OAuthClient(object): """General purpose OAuth client""" def __init__(self, consumer_key, consumer_secret, options = {}): # Set values based on provided options, or revert to defaults self.host = options.get('host', 'www.mendeley.com') self.port = options.get('port', 80) self.access_token_url = options.get('access_token_url', '/oauth/access_token/') self.request_token_url = options.get('access_token_url', '/oauth/request_token/') self.authorize_url = options.get('access_token_url', '/oauth/authorize/') if self.port == 80: self.authority = self.host else: self.authority = "%s:%d" % (self.host, self.port) self.consumer = oauth.Consumer(consumer_key, consumer_secret) def get(self, path, token=None): url = "http://%s%s" % (self.host, path) request = oauth.Request.from_consumer_and_token( self.consumer, token, http_method='GET', http_url=url, ) return self._send_request(request, token) def post(self, path, post_params, token=None): url = "http://%s%s" % (self.host, path) request = oauth.Request.from_consumer_and_token( self.consumer, token, http_method='POST', http_url=url, parameters=post_params ) return self._send_request(request, token) def delete(self, path, token=None): url = "http://%s%s" % (self.host, path) request = oauth.Request.from_consumer_and_token( self.consumer, token, http_method='DELETE', http_url=url, ) return self._send_request(request, token) def put(self, path, token=None, body=None, body_hash=None, headers=None): url = "http://%s%s" % (self.host, path) request = oauth.Request.from_consumer_and_token( self.consumer, token, http_method='PUT', http_url=url, parameters={'oauth_body_hash': body_hash} ) return self._send_request(request, token, body, headers) def request_token(self): response = self.get(self.request_token_url).read() token = oauth.Token.from_string(response) return token def authorize(self, token, callback_url = "oob"): http_url='http://%s%s' % (self.authority, self.authorize_url) request = oauth.Request.from_token_and_callback(token=token, callback=callback_url, http_url='http://%s%s' % (self.authority, self.authorize_url)) return request.to_url() def access_token(self, request_token): response = self.get(self.access_token_url, request_token).read() return oauth.Token.from_string(response) def _send_request(self, request, token=None): request.sign_request(oauth.SignatureMethod_HMAC_SHA1(), self.consumer, token) conn = self._get_conn() if request.method == 'POST': body=request.to_postdata() conn.request('POST', request.url, body=request.to_postdata(), headers={"Content-type": "application/x-www-form-urlencoded"}) elif request.method == 'PUT': final_headers = request.to_header() if extra_headers is not None: final_headers.update(extra_headers) conn.request('PUT', request.url, body, headers=final_headers) elif request.method == 'DELETE': conn.request('DELETE', request.url, headers=request.to_header()) else: conn.request('GET', request.url, headers=request.to_header()) return conn.getresponse() def _get_conn(self): return httplib.HTTPConnection("%s:%d" % (self.host, self.port)) class MendeleyRemoteMethod(object): """Call a Mendeley OpenAPI method and parse and handle the response""" def __init__(self, details, callback): self.details = details # Argument, URL and additional details. self.callback = callback # Callback to actually do the remote call def __call__(self, *args, **kwargs): url = self.details['url'] # Get the required arguments if self.details.get('required'): required_args = dict(zip(self.details.get('required'), args)) if len(required_args) < len(self.details.get('required')): raise ValueError('Missing required args') for (key, value) in required_args.items(): required_args[key] = urllib.quote_plus(str(value)) url = url % required_args # Optional arguments must be provided as keyword args optional_args = {} for optional in self.details.get('optional', []): if kwargs.has_key(optional): optional_args[optional] = kwargs[optional] # Do the callback - will return a HTTPResponse object response = self.callback(url, self.details.get('access_token_required', False), self.details.get('method', 'get'), optional_args) status = response.status body = response.read() if status == 500: raise Exception(body) if status != 204: data = json.loads(body) return data class MendeleyClient(object): # API method definitions. Used to create MendeleyRemoteMethod instances methods = { ######## Public Resources ######## 'details': { 'required': ['id'], 'optional': ['type'], 'url': '/oapi/documents/details/%(id)s/', }, 'categories': { 'url': '/oapi/documents/categories/', }, 'subcategories': { 'url': '/oapi/documents/subcategories/%(id)s/', 'required': ['id'], }, 'search': { 'url': '/oapi/documents/search/%(query)s/', 'required': ['query'], 'optional': ['page', 'items'], }, 'tagged': { 'url': '/oapi/documents/tagged/%(tag)s/', 'required': ['tag'], 'optional': ['cat', 'subcat', 'page', 'items'], }, 'related': { 'url': '/oapi/documents/related/%(id)s/', 'required': ['id'], 'optional': ['page', 'items'], }, 'authored': { 'url': '/oapi/documents/authored/%(author)s/', 'required': ['author'], 'optional': ['page', 'items'], }, 'public_groups': { 'url': '/oapi/documents/groups/', 'optional': ['page', 'items', 'cat'] }, 'public_group_details': { 'url': '/oapi/documents/groups/%(id)s/', 'required': ['id'], }, 'public_group_docs': { 'url': '/oapi/documents/groups/%(id)s/docs/', 'required': ['id'], 'optional': ['details', 'page', 'items'], }, 'public_group_people': { 'url': '/oapi/documents/groups/%(id)s/people/', 'required': ['id'], }, 'author_stats': { 'url': '/oapi/stats/authors/', 'optional': ['discipline', 'upandcoming'], }, 'paper_stats': { 'url': '/oapi/stats/papers/', 'optional': ['discipline', 'upandcoming'], }, 'publication_stats': { 'url': '/oapi/stats/publications/', 'optional': ['discipline', 'upandcoming'], }, 'tag_stats': { 'url': '/oapi/stats/tags/%(discipline)s/', 'required': ['discipline'], 'optional': ['upandcoming'], }, ######## User Specific Resources ######## 'library_author_stats': { 'url': '/oapi/library/authors/', 'access_token_required': True, }, 'library_tag_stats': { 'url': '/oapi/library/tags/', 'access_token_required': True, }, 'library_publication_stats': { 'url': '/oapi/library/publications/', 'access_token_required': True, }, 'library': { 'url': '/oapi/library/', 'optional': ['page', 'items'], 'access_token_required': True, }, 'create_document': { 'url': '/oapi/library/documents/', # HACK: 'document' is required, but by making it optional here it'll get POSTed # Unfortunately that means it needs to be a named param when calling this method 'optional': ['document'], 'access_token_required': True, 'method': 'post', }, 'document_details': { 'url': '/oapi/library/documents/%(id)s/', 'required': ['id'], 'access_token_required': True, }, 'documents_authored': { 'url': '/oapi/library/documents/authored/', 'access_token_required': True, }, 'delete_library_document': { 'url': '/oapi/library/documents/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, 'contacts': { 'url': '/oapi/profiles/contacts/', 'access_token_required': True, 'method': 'get', }, 'contacts_of_contact': { 'url': '/oapi/profiles/contacts/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'get', }, 'add_contact': { 'url': '/oapi/profiles/contacts/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'post', }, # Folders methods # 'folders': { 'url': '/oapi/library/folders/', 'access_token_required': True, }, 'folder_documents': { 'url': '/oapi/library/folders/%(id)s/', 'required': ['id'], 'optional': ['page', 'items'], 'access_token_required': True, }, 'create_folder': { 'url': '/oapi/library/folders/', # HACK: 'collection' is required, but by making it optional here it'll get POSTed # Unfortunately that means it needs to be a named param when calling this method 'optional': ['folder'], 'access_token_required': True, 'method': 'post', }, 'delete_folder': { 'url': '/oapi/library/folders/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, 'add_document_to_folder': { 'url': '/oapi/library/folders/%(folder_id)s/%(document_id)s/', 'required': ['folder_id', 'document_id'], 'access_token_required': True, 'method': 'post', }, 'delete_document_from_folder': { 'url': '/oapi/library/folders/%(folder_id)s/%(document_id)s/', 'required': ['folder_id', 'document_id'], 'access_token_required': True, 'method': 'delete', }, # Groups methods # 'groups': { 'url': '/oapi/library/groups/', 'access_token_required': True, }, 'group_documents': { 'url': '/oapi/library/groups/%(id)s/', 'required': ['id'], 'optional': ['page', 'items'], 'access_token_required': True, }, 'group_doc_details': { 'url': '/oapi/library/groups/%(group_id)s/%(doc_id)s/', 'required': ['group_id', 'doc_id'], 'access_token_required': True, }, 'group_people': { 'url': '/oapi/library/groups/%(id)s/people/', 'required': ['id'], 'access_token_required': True, }, 'create_group': { 'url': '/oapi/library/groups/', 'optional': ['group'], 'access_token_required': True, 'method': 'post', }, 'delete_group': { 'url': '/oapi/library/groups/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, 'leave_group': { 'url': '/oapi/library/groups/%(id)s/leave/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, 'unfollow_group': { 'url': '/oapi/library/groups/%(id)s/unfollow/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, 'delete_group_document': { 'url': '/oapi/library/groups/%(group_id)s/%(document_id)s/', 'required': ['group_id', 'document_id'], 'access_token_required': True, 'method': 'delete', }, ######## DEPRECATED METHODS ######## # Deprecated 'collections': { 'url': '/oapi/library/collections/', 'access_token_required': True, }, # Deprecated 'sharedcollections': { 'url': '/oapi/library/sharedcollections/', 'access_token_required': True, }, # Deprecated 'collection_documents': { 'url': '/oapi/library/collections/%(id)s/', 'required': ['id'], 'optional': ['page', 'items'], 'access_token_required': True, }, # Deprecated 'sharedcollection_documents': { 'url': '/oapi/library/sharedcollections/%(id)s/', 'required': ['id'], 'optional': ['page', 'items'], 'access_token_required': True, }, # Deprecated 'sharedcollection_members': { 'url': '/oapi/library/sharedcollections/%(id)s/members/', 'required': ['id'], 'access_token_required': True, }, # Deprecated 'delete_collection': { 'url': '/oapi/library/collections/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, # Deprecated 'delete_sharedcollection': { 'url': '/oapi/library/sharedcollections/%(id)s/', 'required': ['id'], 'access_token_required': True, 'method': 'delete', }, # Deprecated 'create_collection': { 'url': '/oapi/library/collections/', # HACK: 'collection' is required, but by making it optional here it'll get POSTed # Unfortunately that means it needs to be a named param when calling this method 'optional': ['collection'], 'access_token_required': True, 'method': 'post', }, # Deprecated 'create_sharedcollection': { 'url': '/oapi/library/sharedcollections/', 'optional': ['sharedcollection'], 'access_token_required': True, 'method': 'post', }, # Deprecated 'add_document_to_collection': { 'url': '/oapi/library/collections/add/%(collection_id)s/%(document_id)s/', 'required': ['collection_id', 'document_id'], 'access_token_required': True, 'method': 'post', }, # Deprecated 'remove_document_from_collection': { 'url': '/oapi/library/collections/%(collection_id)s/%(document_id)s/', 'required': ['collection_id', 'document_id'], 'access_token_required': True, 'method': 'delete', }, # Deprecated 'delete_sharedcollection_document': { 'url': '/oapi/library/sharedcollections/%(collection_id)s/%(document_id)s/', 'required': ['collection_id', 'document_id'], 'access_token_required': True, 'method': 'delete', } } def __init__(self, consumer_key, consumer_secret): self.mendeley = OAuthClient(consumer_key, consumer_secret) # Create methods for all of the API calls for method, details in self.methods.items(): setattr(self, method, MendeleyRemoteMethod(details, self.api_request)) def api_request(self, url, access_token_required = False, method = 'get', params = {}): if access_token_required: access_token = self.access_token else: access_token = None if method == 'get': if len(params) > 0: url += "?%s" % urllib.urlencode(params) response = self.mendeley.get(url, access_token) elif method == 'delete': response = self.mendeley.delete(url, access_token) elif method == 'put': headers = {'Content-disposition': 'attachment; filename="%s"' % params.get('file_name')} response = self.mendeley.put(url, access_token, params.get('data'), params.get('oauth_body_hash'), headers) else: response = self.mendeley.post(url, params, access_token) return response def get_required_keys(self): self.request_token = self.mendeley.request_token() auth_url = self.mendeley.authorize(self.request_token) print 'Go to the following url to auth the token:\n%s' % (auth_url,) verifier = raw_input('Enter verification code: ') self.request_token.set_verifier(verifier) self.access_token = self.mendeley.access_token(self.request_token) def load_keys(self): data = pickle.load(open('mendeley_api_keys.pkl', 'r')) self.request_token = data['request_token'] self.access_token = data['access_token'] def save_keys(self): data = {'request_token': self.request_token, 'access_token': self.access_token} pickle.dump(data, open('mendeley_api_keys.pkl', 'w'))
[ [ 8, 0, 0.0523, 0.1027, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.1046, 0.0019, 0, 0.66, 0.1111, 276, 0, 1, 0, 0, 276, 0, 0 ], [ 1, 0, 0.1065, 0.0019, 0, 0.66...
[ "\"\"\"\nMendeley Open API Example Client\n\nCopyright (c) 2010, Mendeley Ltd. <copyright@mendeley.com>\n\nPermission to use, copy, modify, and/or distribute this software for any\npurpose with or without fee is hereby granted, provided that the above\ncopyright notice and this permission notice appear in all copie...
""" The MIT License Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import urllib import time import random import urlparse import hmac import binascii import httplib2 try: from urlparse import parse_qs, parse_qsl except ImportError: from cgi import parse_qs, parse_qsl VERSION = '1.0' # Hi Blaine! HTTP_METHOD = 'GET' SIGNATURE_METHOD = 'PLAINTEXT' class Error(RuntimeError): """Generic exception class.""" def __init__(self, message='OAuth error occurred.'): self._message = message @property def message(self): """A hack to get around the deprecation errors in 2.6.""" return self._message def __str__(self): return self._message class MissingSignature(Error): pass def build_authenticate_header(realm=''): """Optional WWW-Authenticate header (401 error)""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} def build_xoauth_string(url, consumer, token=None): """Build an XOAUTH string for use in SMTP/IMPA authentication.""" request = Request.from_consumer_and_token(consumer, token, "GET", url) signing_method = SignatureMethod_HMAC_SHA1() request.sign_request(signing_method, consumer, token) params = [] for k, v in sorted(request.iteritems()): if v is not None: params.append('%s="%s"' % (k, escape(v))) return "%s %s %s" % ("GET", url, ','.join(params)) def escape(s): """Escape a URL including any /.""" return urllib.quote(s, safe='~') def generate_timestamp(): """Get seconds since epoch (UTC).""" return int(time.time()) def generate_nonce(length=8): """Generate pseudorandom number.""" return ''.join([str(random.randint(0, 9)) for i in range(length)]) def generate_verifier(length=8): """Generate pseudorandom number.""" return ''.join([str(random.randint(0, 9)) for i in range(length)]) class Consumer(object): """A consumer of OAuth-protected services. The OAuth consumer is a "third-party" service that wants to access protected resources from an OAuth service provider on behalf of an end user. It's kind of the OAuth client. Usually a consumer must be registered with the service provider by the developer of the consumer software. As part of that process, the service provider gives the consumer a *key* and a *secret* with which the consumer software can identify itself to the service. The consumer will include its key in each request to identify itself, but will use its secret only when signing requests, to prove that the request is from that particular registered consumer. Once registered, the consumer can then use its consumer credentials to ask the service provider for a request token, kicking off the OAuth authorization process. """ key = None secret = None def __init__(self, key, secret): self.key = key self.secret = secret if self.key is None or self.secret is None: raise ValueError("Key and secret must be set.") def __str__(self): data = {'oauth_consumer_key': self.key, 'oauth_consumer_secret': self.secret} return urllib.urlencode(data) class Token(object): """An OAuth credential used to request authorization or a protected resource. Tokens in OAuth comprise a *key* and a *secret*. The key is included in requests to identify the token being used, but the secret is used only in the signature, to prove that the requester is who the server gave the token to. When first negotiating the authorization, the consumer asks for a *request token* that the live user authorizes with the service provider. The consumer then exchanges the request token for an *access token* that can be used to access protected resources. """ key = None secret = None callback = None callback_confirmed = None verifier = None def __init__(self, key, secret): self.key = key self.secret = secret if self.key is None or self.secret is None: raise ValueError("Key and secret must be set.") def set_callback(self, callback): self.callback = callback self.callback_confirmed = 'true' def set_verifier(self, verifier=None): if verifier is not None: self.verifier = verifier else: self.verifier = generate_verifier() def get_callback_url(self): if self.callback and self.verifier: # Append the oauth_verifier. parts = urlparse.urlparse(self.callback) scheme, netloc, path, params, query, fragment = parts[:6] if query: query = '%s&oauth_verifier=%s' % (query, self.verifier) else: query = 'oauth_verifier=%s' % self.verifier return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) return self.callback def to_string(self): """Returns this token as a plain string, suitable for storage. The resulting string includes the token's secret, so you should never send or store this string where a third party can read it. """ data = { 'oauth_token': self.key, 'oauth_token_secret': self.secret, } if self.callback_confirmed is not None: data['oauth_callback_confirmed'] = self.callback_confirmed return urllib.urlencode(data) @staticmethod def from_string(s): """Deserializes a token from a string like one returned by `to_string()`.""" if not len(s): raise ValueError("Invalid parameter string.") params = parse_qs(s, keep_blank_values=False) if not len(params): raise ValueError("Invalid parameter string.") try: key = params['oauth_token'][0] except Exception: raise ValueError("'oauth_token' not found in OAuth request.") try: secret = params['oauth_token_secret'][0] except Exception: raise ValueError("'oauth_token_secret' not found in " "OAuth request.") token = Token(key, secret) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except KeyError: pass # 1.0, no callback confirmed. return token def __str__(self): return self.to_string() def setter(attr): name = attr.__name__ def getter(self): try: return self.__dict__[name] except KeyError: raise AttributeError(name) def deleter(self): del self.__dict__[name] return property(getter, attr, deleter) class Request(dict): """The parameters and information for an HTTP request, suitable for authorizing with OAuth credentials. When a consumer wants to access a service's protected resources, it does so using a signed HTTP request identifying itself (the consumer) with its key, and providing an access token authorized by the end user to access those resources. """ version = VERSION def __init__(self, method=HTTP_METHOD, url=None, parameters=None): self.method = method self.url = url if parameters is not None: self.update(parameters) @setter def url(self, value): self.__dict__['url'] = value if value is not None: scheme, netloc, path, params, query, fragment = urlparse.urlparse(value) # Exclude default port numbers. if scheme == 'http' and netloc[-3:] == ':80': netloc = netloc[:-3] elif scheme == 'https' and netloc[-4:] == ':443': netloc = netloc[:-4] if scheme not in ('http', 'https'): raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) # Normalized URL excludes params, query, and fragment. self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None)) else: self.normalized_url = None self.__dict__['url'] = None @setter def method(self, value): self.__dict__['method'] = value.upper() def _get_timestamp_nonce(self): return self['oauth_timestamp'], self['oauth_nonce'] def get_nonoauth_parameters(self): """Get any non-OAuth parameters.""" return dict([(k, v) for k, v in self.iteritems() if not k.startswith('oauth_')]) def to_header(self, realm=''): """Serialize as a header for an HTTPAuth request.""" oauth_params = ((k, v) for k, v in self.items() if k.startswith('oauth_')) stringy_params = ((k, escape(str(v))) for k, v in oauth_params) header_params = ('%s="%s"' % (k, v) for k, v in stringy_params) params_header = ', '.join(header_params) auth_header = 'OAuth realm="%s"' % realm if params_header: auth_header = "%s, %s" % (auth_header, params_header) return {'Authorization': auth_header} def to_postdata(self): """Serialize as post data for a POST request.""" # tell urlencode to deal with sequence values and map them correctly # to resulting querystring. for example self["k"] = ["v1", "v2"] will # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D return urllib.urlencode(self, True).replace('+', '%20') def to_url(self): """Serialize as a URL for a GET request.""" base_url = urlparse.urlparse(self.url) try: query = base_url.query except AttributeError: # must be python <2.5 query = base_url[4] query = parse_qs(query) for k, v in self.items(): query.setdefault(k, []).append(v) try: scheme = base_url.scheme netloc = base_url.netloc path = base_url.path params = base_url.params fragment = base_url.fragment except AttributeError: # must be python <2.5 scheme = base_url[0] netloc = base_url[1] path = base_url[2] params = base_url[3] fragment = base_url[5] url = (scheme, netloc, path, params, urllib.urlencode(query, True), fragment) return urlparse.urlunparse(url) def get_parameter(self, parameter): ret = self.get(parameter) if ret is None: raise Error('Parameter not found: %s' % parameter) return ret def get_normalized_parameters(self): """Return a string that contains the parameters that must be signed.""" items = [] for key, value in self.iteritems(): if key == 'oauth_signature': continue # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, # so we unpack sequence values into multiple items for sorting. if hasattr(value, '__iter__'): items.extend((key, item) for item in value) else: items.append((key, value)) # Include any query string parameters from the provided URL query = urlparse.urlparse(self.url)[4] url_items = self._split_url_string(query).items() non_oauth_url_items = list([(k, v) for k, v in url_items if not k.startswith('oauth_')]) items.extend(non_oauth_url_items) encoded_str = urllib.urlencode(sorted(items)) # Encode signature parameters per Oauth Core 1.0 protocol # spec draft 7, section 3.6 # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) # Spaces must be encoded with "%20" instead of "+" return encoded_str.replace('+', '%20').replace('%7E', '~') def sign_request(self, signature_method, consumer, token): """Set the signature parameter to the result of sign.""" if 'oauth_consumer_key' not in self: self['oauth_consumer_key'] = consumer.key if token and 'oauth_token' not in self: self['oauth_token'] = token.key self['oauth_signature_method'] = signature_method.name self['oauth_signature'] = signature_method.sign(self, consumer, token) @classmethod def make_timestamp(cls): """Get seconds since epoch (UTC).""" return str(int(time.time())) @classmethod def make_nonce(cls): """Generate pseudorandom number.""" return str(random.randint(0, 100000000)) @classmethod def from_request(cls, http_method, http_url, headers=None, parameters=None, query_string=None): """Combines multiple parameter sources.""" if parameters is None: parameters = {} # Headers if headers and 'Authorization' in headers: auth_header = headers['Authorization'] # Check that the authorization header is OAuth. if auth_header[:6] == 'OAuth ': auth_header = auth_header[6:] try: # Get the parameters from the header. header_params = cls._split_header(auth_header) parameters.update(header_params) except: raise Error('Unable to parse OAuth parameters from ' 'Authorization header.') # GET or POST query string. if query_string: query_params = cls._split_url_string(query_string) parameters.update(query_params) # URL parameters. param_str = urlparse.urlparse(http_url)[4] # query url_params = cls._split_url_string(param_str) parameters.update(url_params) if parameters: return cls(http_method, http_url, parameters) return None @classmethod def from_consumer_and_token(cls, consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None): if not parameters: parameters = {} defaults = { 'oauth_consumer_key': consumer.key, 'oauth_timestamp': cls.make_timestamp(), 'oauth_nonce': cls.make_nonce(), 'oauth_version': cls.version, } defaults.update(parameters) parameters = defaults if token: parameters['oauth_token'] = token.key if token.verifier: parameters['oauth_verifier'] = token.verifier return Request(http_method, http_url, parameters) @classmethod def from_token_and_callback(cls, token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None): if not parameters: parameters = {} parameters['oauth_token'] = token.key if callback: parameters['oauth_callback'] = callback return cls(http_method, http_url, parameters) @staticmethod def _split_header(header): """Turn Authorization: header into parameters.""" params = {} parts = header.split(',') for param in parts: # Ignore realm parameter. if param.find('realm') > -1: continue # Remove whitespace. param = param.strip() # Split key-value. param_parts = param.split('=', 1) # Remove quotes and unescape the value. params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) return params @staticmethod def _split_url_string(param_str): """Turn URL string into parameters.""" parameters = parse_qs(param_str, keep_blank_values=False) for k, v in parameters.iteritems(): parameters[k] = urllib.unquote(v[0]) return parameters class Client(httplib2.Http): """OAuthClient is a worker to attempt to execute a request.""" def __init__(self, consumer, token=None, cache=None, timeout=None, proxy_info=None): if consumer is not None and not isinstance(consumer, Consumer): raise ValueError("Invalid consumer.") if token is not None and not isinstance(token, Token): raise ValueError("Invalid token.") self.consumer = consumer self.token = token self.method = SignatureMethod_HMAC_SHA1() httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info) def set_signature_method(self, method): if not isinstance(method, SignatureMethod): raise ValueError("Invalid signature method.") self.method = method def request(self, uri, method="GET", body=None, headers=None, redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): DEFAULT_CONTENT_TYPE = 'application/x-www-form-urlencoded' if not isinstance(headers, dict): headers = {} is_multipart = method == 'POST' and headers.get('Content-Type', DEFAULT_CONTENT_TYPE) != DEFAULT_CONTENT_TYPE if body and method == "POST" and not is_multipart: parameters = dict(parse_qsl(body)) else: parameters = None req = Request.from_consumer_and_token(self.consumer, token=self.token, http_method=method, http_url=uri, parameters=parameters) req.sign_request(self.method, self.consumer, self.token) if method == "POST": headers['Content-Type'] = headers.get('Content-Type', DEFAULT_CONTENT_TYPE) if is_multipart: headers.update(req.to_header()) else: body = req.to_postdata() elif method == "GET": uri = req.to_url() else: headers.update(req.to_header()) return httplib2.Http.request(self, uri, method=method, body=body, headers=headers, redirections=redirections, connection_type=connection_type) class Server(object): """A skeletal implementation of a service provider, providing protected resources to requests from authorized consumers. This class implements the logic to check requests for authorization. You can use it with your web server or web framework to protect certain resources with OAuth. """ timestamp_threshold = 300 # In seconds, five minutes. version = VERSION signature_methods = None def __init__(self, signature_methods=None): self.signature_methods = signature_methods or {} def add_signature_method(self, signature_method): self.signature_methods[signature_method.name] = signature_method return self.signature_methods def verify_request(self, request, consumer, token): """Verifies an api call and checks all the parameters.""" version = self._get_version(request) self._check_signature(request, consumer, token) parameters = request.get_nonoauth_parameters() return parameters def build_authenticate_header(self, realm=''): """Optional support for the authenticate header.""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} def _get_version(self, request): """Verify the correct version request for this server.""" try: version = request.get_parameter('oauth_version') except: version = VERSION if version and version != self.version: raise Error('OAuth version %s not supported.' % str(version)) return version def _get_signature_method(self, request): """Figure out the signature with some defaults.""" try: signature_method = request.get_parameter('oauth_signature_method') except: signature_method = SIGNATURE_METHOD try: # Get the signature method object. signature_method = self.signature_methods[signature_method] except: signature_method_names = ', '.join(self.signature_methods.keys()) raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)) return signature_method def _get_verifier(self, request): return request.get_parameter('oauth_verifier') def _check_signature(self, request, consumer, token): timestamp, nonce = request._get_timestamp_nonce() self._check_timestamp(timestamp) signature_method = self._get_signature_method(request) try: signature = request.get_parameter('oauth_signature') except: raise MissingSignature('Missing oauth_signature.') # Validate the signature. valid = signature_method.check(request, consumer, token, signature) if not valid: key, base = signature_method.signing_base(request, consumer, token) raise Error('Invalid signature. Expected signature base ' 'string: %s' % base) built = signature_method.sign(request, consumer, token) def _check_timestamp(self, timestamp): """Verify that timestamp is recentish.""" timestamp = int(timestamp) now = int(time.time()) lapsed = now - timestamp if lapsed > self.timestamp_threshold: raise Error('Expired timestamp: given %d and now %s has a ' 'greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold)) class SignatureMethod(object): """A way of signing requests. The OAuth protocol lets consumers and service providers pick a way to sign requests. This interface shows the methods expected by the other `oauth` modules for signing requests. Subclass it and implement its methods to provide a new way to sign requests. """ def signing_base(self, request, consumer, token): """Calculates the string that needs to be signed. This method returns a 2-tuple containing the starting key for the signing and the message to be signed. The latter may be used in error messages to help clients debug their software. """ raise NotImplementedError def sign(self, request, consumer, token): """Returns the signature for the given request, based on the consumer and token also provided. You should use your implementation of `signing_base()` to build the message to sign. Otherwise it may be less useful for debugging. """ raise NotImplementedError def check(self, request, consumer, token, signature): """Returns whether the given signature is the correct signature for the given consumer and token signing the given request.""" built = self.sign(request, consumer, token) return built == signature class SignatureMethod_HMAC_SHA1(SignatureMethod): name = 'HMAC-SHA1' def signing_base(self, request, consumer, token): if request.normalized_url is None: raise ValueError("Base URL for request is not set.") sig = ( escape(request.method), escape(request.normalized_url), escape(request.get_normalized_parameters()), ) key = '%s&' % escape(consumer.secret) if token: key += escape(token.secret) raw = '&'.join(sig) return key, raw def sign(self, request, consumer, token): """Builds the base signature string.""" key, raw = self.signing_base(request, consumer, token) # HMAC object. try: from hashlib import sha1 as sha except ImportError: import sha # Deprecated hashed = hmac.new(key, raw, sha) # Calculate the digest base 64. return binascii.b2a_base64(hashed.digest())[:-1] class SignatureMethod_PLAINTEXT(SignatureMethod): name = 'PLAINTEXT' def signing_base(self, request, consumer, token): """Concatenates the consumer key and secret with the token's secret.""" sig = '%s&' % escape(consumer.secret) if token: sig = sig + escape(token.secret) return sig, sig def sign(self, request, consumer, token): key, raw = self.signing_base(request, consumer, token) return raw
[ [ 8, 0, 0.0158, 0.0303, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.033, 0.0013, 0, 0.66, 0.0357, 614, 0, 1, 0, 0, 614, 0, 0 ], [ 1, 0, 0.0343, 0.0013, 0, 0.66,...
[ "\"\"\"\nThe MIT License\n\nCopyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including withou...
""" The MIT License Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import oauth2 import imaplib class IMAP4_SSL(imaplib.IMAP4_SSL): """IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH.""" def authenticate(self, url, consumer, token): if consumer is not None and not isinstance(consumer, oauth2.Consumer): raise ValueError("Invalid consumer.") if token is not None and not isinstance(token, oauth2.Token): raise ValueError("Invalid token.") imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH', lambda x: oauth2.build_xoauth_string(url, consumer, token))
[ [ 8, 0, 0.3, 0.575, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.625, 0.025, 0, 0.66, 0.3333, 311, 0, 1, 0, 0, 311, 0, 0 ], [ 1, 0, 0.65, 0.025, 0, 0.66, 0.6...
[ "\"\"\"\nThe MIT License\n\nCopyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including withou...
""" The MIT License Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import oauth2 import smtplib import base64 class SMTP(smtplib.SMTP): """SMTP wrapper for smtplib.SMTP that implements XOAUTH.""" def authenticate(self, url, consumer, token): if consumer is not None and not isinstance(consumer, oauth2.Consumer): raise ValueError("Invalid consumer.") if token is not None and not isinstance(token, oauth2.Token): raise ValueError("Invalid token.") self.docmd('AUTH', 'XOAUTH %s' % \ base64.b64encode(oauth2.build_xoauth_string(url, consumer, token)))
[ [ 8, 0, 0.2927, 0.561, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 1, 0, 0.6098, 0.0244, 0, 0.66, 0.25, 311, 0, 1, 0, 0, 311, 0, 0 ], [ 1, 0, 0.6341, 0.0244, 0, 0.66, ...
[ "\"\"\"\nThe MIT License\n\nCopyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including withou...
""" Mendeley Open API Example Client Copyright (c) 2010, Mendeley Ltd. <copyright@mendeley.com> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. For details of the Mendeley Open API see http://dev.mendeley.com/ Example usage: python test.py """ from pprint import pprint from mendeley_client import MendeleyClient import json import os mendeley = MendeleyClient('<insert_consumer_key_here>', '<insert_secret_key_here>') try: mendeley.load_keys() except IOError: mendeley.get_required_keys() mendeley.save_keys() ######################################## ######## Public Resources Tests ######## ######################################## print """ ----------------------------------------------------- Canonical document details -----------------------------------------------------""" response = mendeley.details('cbcca150-6cff-11df-a2b2-0026b95e3eb7') pprint(response) print """ ----------------------------------------------------- Canonical document details DOI look up -----------------------------------------------------""" response = mendeley.details('10.1371%2Fjournal.ppat.1000281', type='doi') pprint(response) print """ ----------------------------------------------------- Canonical document details PubMed Id look up -----------------------------------------------------""" response = mendeley.details('19910365', type='pmid') pprint(response) print """ ----------------------------------------------------- Categories -----------------------------------------------------""" response = mendeley.categories() pprint(response) print """ ----------------------------------------------------- Subcategories -----------------------------------------------------""" response = mendeley.subcategories(3) pprint(response) print """ ----------------------------------------------------- Search -----------------------------------------------------""" response = mendeley.search('phiC31', items=10) pprint(response) print """ ----------------------------------------------------- Tagged 'modularity' -----------------------------------------------------""" response = mendeley.tagged('modularity', items=5) pprint(response) print """ ----------------------------------------------------- Tagged 'test' in category 14 -----------------------------------------------------""" response = mendeley.tagged('test', cat=14) pprint(response) print """ ----------------------------------------------------- Tagged 'modularity' in subcategory 'Bioinformatics' -----------------------------------------------------""" response = mendeley.tagged('modularity', subcat=455) pprint(response) print """ ----------------------------------------------------- Related -----------------------------------------------------""" response = mendeley.related('91df2740-6d01-11df-a2b2-0026b95e3eb7') pprint(response) print """ ----------------------------------------------------- Authored by 'Ann Cowan' -----------------------------------------------------""" response = mendeley.authored('Ann Cowan', items=5) pprint(response) print """ ----------------------------------------------------- Public groups -----------------------------------------------------""" response = mendeley.public_groups() pprint(response) groupId = '536181' print """ ----------------------------------------------------- Public group details -----------------------------------------------------""" response = mendeley.public_group_details(groupId) pprint(response) print """ ----------------------------------------------------- Public group documents -----------------------------------------------------""" response = mendeley.public_group_docs(groupId) pprint(response) print """ ----------------------------------------------------- Public group people -----------------------------------------------------""" response = mendeley.public_group_people(groupId) pprint(response) print """ ----------------------------------------------------- Author statistics -----------------------------------------------------""" response = mendeley.author_stats() pprint(response) print """ ----------------------------------------------------- Papers statistics -----------------------------------------------------""" response = mendeley.paper_stats() pprint(response) print """ ----------------------------------------------------- Publications outlets statistics -----------------------------------------------------""" response = mendeley.publication_stats() pprint(response) ############################################### ######## User Specific Resources Tests ######## ############################################### print """ ----------------------------------------------------- My Library authors statistics -----------------------------------------------------""" response = mendeley.library_author_stats() pprint(response) print """ ----------------------------------------------------- My Library tag statistics -----------------------------------------------------""" response = mendeley.library_tag_stats() pprint(response) print """ ----------------------------------------------------- My Library publication statistics -----------------------------------------------------""" response = mendeley.library_publication_stats() pprint(response) ### Library ### print 'Library' print """ ----------------------------------------------------- My Library documents -----------------------------------------------------""" documents = mendeley.library() pprint(documents) print """ ----------------------------------------------------- Create a new library document -----------------------------------------------------""" response = mendeley.create_document(document=json.dumps({'type' : 'Book','title': 'Document creation test', 'year': 2008})) pprint(response) documentId = response['document_id'] print """ ----------------------------------------------------- Document details -----------------------------------------------------""" response = mendeley.document_details(documentId) pprint(response) print """ ----------------------------------------------------- Delete library document -----------------------------------------------------""" response = mendeley.delete_library_document(documentId) pprint(response) print """ ----------------------------------------------------- Documents authored -----------------------------------------------------""" response = mendeley.documents_authored() pprint(response) print """ ----------------------------------------------------- Create new folder -----------------------------------------------------""" response = mendeley.create_folder(folder=json.dumps({'name': 'Test folder creation'})) pprint(response) folderId = response['folder_id'] print """ ----------------------------------------------------- List folders -----------------------------------------------------""" folders = mendeley.folders() pprint(folders) print """ ----------------------------------------------------- Delete folder -----------------------------------------------------""" response = mendeley.delete_folder(folderId) pprint(response) print """ ----------------------------------------------------- Create public invite only group -----------------------------------------------------""" response = mendeley.create_group(group=json.dumps({'name':'Public invite only group', 'type': 'invite'})) pprint(response) print """ ----------------------------------------------------- Create public open group -----------------------------------------------------""" response = mendeley.create_group(group=json.dumps({'name':'My awesome public group', 'type': 'open'})) pprint(response) print """ ----------------------------------------------------- Create private group -----------------------------------------------------""" response = mendeley.create_group(group=json.dumps({'name':'Private group test', 'type': 'private'})) pprint(response) print """ ----------------------------------------------------- Current user's contacts -----------------------------------------------------""" response = mendeley.contacts() pprint(response)
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 276, 0, 1, 0, 0, 276, 0, 0 ], [ 1, 0, 0.4, 0.2, 0, 0.66, 0.3333, 493, 0, 1, 0, 0, 493, 0, 0 ], [ 1, 0, 0.6, 0.2, 0, 0.66, 0.6667, ...
[ "from pprint import pprint", "from mendeley_client import MendeleyClient", "import json", "import os" ]
class AlgosSection: titlePage = "under construction"
[ [ 3, 0, 0.75, 1, 0, 0.66, 0, 167, 0, 0, 0, 0, 0, 0, 0 ], [ 14, 1, 1, 0.5, 1, 0.36, 0, 54, 1, 0, 0, 0, 0, 3, 0 ] ]
[ "class AlgosSection:\n titlePage = \"under construction\"", " titlePage = \"under construction\"" ]
from django.conf.urls.defaults import * urlpatterns = patterns('algos.views', (r'^$', 'index'), (r'^sobrealgos$', 'index'), (r'^quisom$', 'quisom'), (r'^contactar$', 'contactar') )
[ [ 1, 0, 0.125, 0.125, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.6875, 0.75, 0, 0.66, 1, 990, 3, 5, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('algos.views',\n (r'^$', 'index'),\n (r'^sobrealgos$', 'index'),\n (r'^quisom$', 'quisom'),\n (r'^contactar$', 'contactar')\n)" ]
from django.core.cache import cache from django.contrib.auth.forms import UserCreationForm from django.views.generic.simple import direct_to_template from django.http import HttpResponseRedirect from django.views.generic.simple import direct_to_template from algos.models import AlgosSection def index(request): title_page = 'Sobre el grup' return direct_to_template(request, 'algos/index.html', {'title_page' : title_page} ) def quisom(request): title_page = 'Qui som' return direct_to_template(request, 'algos/quisom.html', {'title_page' : title_page} ) def contactar(request): title_page = 'Contactar' return direct_to_template(request, 'algos/contactar.html', {'title_page' : title_page} )
[ [ 1, 0, 0.0526, 0.0526, 0, 0.66, 0, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.1053, 0.0526, 0, 0.66, 0.125, 579, 0, 1, 0, 0, 579, 0, 0 ], [ 1, 0, 0.1579, 0.0526, 0, 0...
[ "from django.core.cache import cache", "from django.contrib.auth.forms import UserCreationForm", "from django.views.generic.simple import direct_to_template", "from django.http import HttpResponseRedirect", "from django.views.generic.simple import direct_to_template", "from algos.models import AlgosSectio...
from django.conf.urls.defaults import * urlpatterns = patterns('projects.views', (r'^$', 'index') )
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.8, 0.6, 0, 0.66, 1, 990, 3, 2, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('projects.views',\n (r'^$', 'index')\n)" ]
from django.core.cache import cache from django.contrib.auth.forms import UserCreationForm from django.views.generic.simple import direct_to_template from django.http import HttpResponseRedirect from django.views.generic.simple import direct_to_template from algos.models import AlgosSection def index(request): title_page = 'Projectes' return direct_to_template(request, 'projects/index.html', {'title_page' : title_page} )
[ [ 1, 0, 0.1, 0.1, 0, 0.66, 0, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.2, 0.1, 0, 0.66, 0.1667, 579, 0, 1, 0, 0, 579, 0, 0 ], [ 1, 0, 0.3, 0.1, 0, 0.66, 0.3333, ...
[ "from django.core.cache import cache", "from django.contrib.auth.forms import UserCreationForm", "from django.views.generic.simple import direct_to_template", "from django.http import HttpResponseRedirect", "from django.views.generic.simple import direct_to_template", "from algos.models import AlgosSectio...
from django.conf.urls.defaults import * urlpatterns = patterns('linies.views', (r'^$', 'index') )
[ [ 1, 0, 0.2, 0.2, 0, 0.66, 0, 341, 0, 1, 0, 0, 341, 0, 0 ], [ 14, 0, 0.8, 0.6, 0, 0.66, 1, 990, 3, 2, 0, 0, 75, 10, 1 ] ]
[ "from django.conf.urls.defaults import *", "urlpatterns = patterns('linies.views',\n (r'^$', 'index')\n)" ]
from django.core.cache import cache from django.contrib.auth.forms import UserCreationForm from django.views.generic.simple import direct_to_template from django.http import HttpResponseRedirect from django.views.generic.simple import direct_to_template def index(request): title_page = "Recerca" return direct_to_template(request, 'linies/index.html', {'title_page' : title_page} )
[ [ 1, 0, 0.1111, 0.1111, 0, 0.66, 0, 734, 0, 1, 0, 0, 734, 0, 0 ], [ 1, 0, 0.2222, 0.1111, 0, 0.66, 0.2, 579, 0, 1, 0, 0, 579, 0, 0 ], [ 1, 0, 0.3333, 0.1111, 0, 0.6...
[ "from django.core.cache import cache", "from django.contrib.auth.forms import UserCreationForm", "from django.views.generic.simple import direct_to_template", "from django.http import HttpResponseRedirect", "from django.views.generic.simple import direct_to_template", "def index(request):\n title_page ...
import datetime import os import re import sys from django.conf import settings from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound from django.template import (Template, Context, TemplateDoesNotExist, TemplateSyntaxError) from django.utils.html import escape from django.utils.importlib import import_module from django.utils.encoding import smart_unicode, smart_str HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST|SIGNATURE') def linebreak_iter(template_source): yield 0 p = template_source.find('\n') while p >= 0: yield p+1 p = template_source.find('\n', p+1) yield len(template_source) + 1 def cleanse_setting(key, value): """Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary. """ try: if HIDDEN_SETTINGS.search(key): cleansed = '********************' else: if isinstance(value, dict): cleansed = dict((k, cleanse_setting(k, v)) for k,v in value.items()) else: cleansed = value except TypeError: # If the key isn't regex-able, just return as-is. cleansed = value return cleansed def get_safe_settings(): "Returns a dictionary of the settings module, with sensitive settings blurred out." settings_dict = {} for k in dir(settings): if k.isupper(): settings_dict[k] = cleanse_setting(k, getattr(settings, k)) return settings_dict def technical_500_response(request, exc_type, exc_value, tb): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. """ reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() return HttpResponseServerError(html, mimetype='text/html') class ExceptionReporter: """ A class to organize and coordinate reporting on exceptions. """ def __init__(self, request, exc_type, exc_value, tb): self.request = request self.exc_type = exc_type self.exc_value = exc_value self.tb = tb self.template_info = None self.template_does_not_exist = False self.loader_debug_info = None # Handle deprecated string exceptions if isinstance(self.exc_type, basestring): self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type) self.exc_type = type(self.exc_value) def get_traceback_html(self): "Return HTML code for traceback." if issubclass(self.exc_type, TemplateDoesNotExist): from django.template.loader import template_source_loaders self.template_does_not_exist = True self.loader_debug_info = [] for loader in template_source_loaders: try: module = import_module(loader.__module__) source_list_func = module.get_template_sources # NOTE: This assumes exc_value is the name of the template that # the loader attempted to load. template_list = [{'name': t, 'exists': os.path.exists(t)} \ for t in source_list_func(str(self.exc_value))] except (ImportError, AttributeError): template_list = [] if hasattr(loader, '__class__'): loader_name = loader.__module__ + '.' + loader.__class__.__name__ else: loader_name = loader.__module__ + '.' + loader.__name__ self.loader_debug_info.append({ 'loader': loader_name, 'templates': template_list, }) if (settings.TEMPLATE_DEBUG and hasattr(self.exc_value, 'source') and isinstance(self.exc_value, TemplateSyntaxError)): self.get_template_exception_info() frames = self.get_traceback_frames() unicode_hint = '' if issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] unicode_hint = smart_unicode(unicode_str[max(start-5, 0):min(end+5, len(unicode_str))], 'ascii', errors='replace') from django import get_version t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template') c = Context({ 'exception_type': self.exc_type.__name__, 'exception_value': smart_unicode(self.exc_value, errors='replace'), 'unicode_hint': unicode_hint, 'frames': frames, 'lastframe': frames[-1], 'request': self.request, 'settings': get_safe_settings(), 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'server_time': datetime.datetime.now(), 'django_version_info': get_version(), 'sys_path' : sys.path, 'template_info': self.template_info, 'template_does_not_exist': self.template_does_not_exist, 'loader_debug_info': self.loader_debug_info, }) return t.render(c) def get_template_exception_info(self): origin, (start, end) = self.exc_value.source template_source = origin.reload() context_lines = 10 line = 0 upto = 0 source_lines = [] before = during = after = "" for num, next in enumerate(linebreak_iter(template_source)): if start >= upto and end <= next: line = num before = escape(template_source[upto:start]) during = escape(template_source[start:end]) after = escape(template_source[end:next]) source_lines.append( (num, escape(template_source[upto:next])) ) upto = next total = len(source_lines) top = max(1, line - context_lines) bottom = min(total, line + 1 + context_lines) self.template_info = { 'message': self.exc_value.args[0], 'source_lines': source_lines[top:bottom], 'before': before, 'during': during, 'after': after, 'top': top, 'bottom': bottom, 'total': total, 'line': line, 'name': origin.name, } def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ source = None if loader is not None and hasattr(loader, "get_source"): source = loader.get_source(module_name) if source is not None: source = source.splitlines() if source is None: try: f = open(filename) try: source = f.readlines() finally: f.close() except (OSError, IOError): pass if source is None: return None, [], None, [] encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (http://www.python.org/dev/peps/pep-0263/) match = re.search(r'coding[:=]\s*([-\w.]+)', line) if match: encoding = match.group(1) break source = [unicode(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = [line.strip('\n') for line in source[lower_bound:lineno]] context_line = source[lineno].strip('\n') post_context = [line.strip('\n') for line in source[lineno+1:upper_bound]] return lower_bound, pre_context, context_line, post_context def get_traceback_frames(self): frames = [] tb = self.tb while tb is not None: # support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(filename, lineno, 7, loader, module_name) if pre_context_lineno is not None: frames.append({ 'tb': tb, 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': tb.tb_frame.f_locals.items(), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) tb = tb.tb_next if not frames: frames = [{ 'filename': '&lt;unknown&gt;', 'function': '?', 'lineno': '?', 'context_line': '???', }] return frames def format_exception(self): """ Return the same data as from traceback.format_exception. """ import traceback frames = self.get_traceback_frames() tb = [ (f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames ] list = ['Traceback (most recent call last):\n'] list += traceback.format_list(tb) list += traceback.format_exception_only(self.exc_type, self.exc_value) return list def technical_404_response(request, exception): "Create a technical 404 error response. The exception should be the Http404." try: tried = exception.args[0]['tried'] except (IndexError, TypeError, KeyError): tried = [] else: if not tried: # tried exists but is an empty list. The URLconf must've been empty. return empty_urlconf(request) t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template') c = Context({ 'root_urlconf': settings.ROOT_URLCONF, 'request_path': request.path_info[1:], # Trim leading slash 'urlpatterns': tried, 'reason': smart_str(exception, errors='replace'), 'request': request, 'settings': get_safe_settings(), }) return HttpResponseNotFound(t.render(c), mimetype='text/html') def empty_urlconf(request): "Create an empty URLconf 404 error response." t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template') c = Context({ 'project_name': settings.SETTINGS_MODULE.split('.')[0] }) return HttpResponse(t.render(c), mimetype='text/html') # # Templates are embedded in the file so that we know the error handler will # always work even if the template loader is broken. # TECHNICAL_500_TEMPLATE = """ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <meta name="robots" content="NONE,NOARCHIVE"> <title>{{ exception_type }} at {{ request.path_info|escape }}</title> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; } h2 { margin-bottom:.8em; } h2 span { font-size:80%; color:#666; font-weight:normal; } h3 { margin:1em 0 .5em 0; } h4 { margin:0 0 .5em 0; font-weight: normal; } table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } tbody td, tbody th { vertical-align:top; padding:2px 3px; } thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } table.vars { margin:5px 0 2px 40px; } table.vars td, table.req td { font-family:monospace; } table td.code { width:100%; } table td.code div { overflow:hidden; } table.source th { color:#666; } table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; } ul.traceback { list-style-type:none; } ul.traceback li.frame { margin-bottom:1em; } div.context { margin: 10px 0; } div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; } div.context ol li { font-family:monospace; white-space:pre; color:#666; cursor:pointer; } div.context ol.context-line li { color:black; background-color:#ccc; } div.context ol.context-line li span { float: right; } div.commands { margin-left: 40px; } div.commands a { color:black; text-decoration:none; } #summary { background: #ffc; } #summary h2 { font-weight: normal; color: #666; } #explanation { background:#eee; } #template, #template-not-exist { background:#f6f6f6; } #template-not-exist ul { margin: 0 0 0 20px; } #unicode-hint { background:#eee; } #traceback { background:#eee; } #requestinfo { background:#f6f6f6; padding-left:120px; } #summary table { border:none; background:transparent; } #requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; } #requestinfo h3 { margin-bottom:-1em; } .error { background: #ffc; } .specific { color:#cc3300; font-weight:bold; } h2 span.commands { font-size:.7em;} span.commands a:link {color:#5E5694;} pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; } </style> <script type="text/javascript"> //<!-- function getElementsByClassName(oElm, strTagName, strClassName){ // Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com var arrElements = (strTagName == "*" && document.all)? document.all : oElm.getElementsByTagName(strTagName); var arrReturnElements = new Array(); strClassName = strClassName.replace(/\-/g, "\\-"); var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)"); var oElement; for(var i=0; i<arrElements.length; i++){ oElement = arrElements[i]; if(oRegExp.test(oElement.className)){ arrReturnElements.push(oElement); } } return (arrReturnElements) } function hideAll(elems) { for (var e = 0; e < elems.length; e++) { elems[e].style.display = 'none'; } } window.onload = function() { hideAll(getElementsByClassName(document, 'table', 'vars')); hideAll(getElementsByClassName(document, 'ol', 'pre-context')); hideAll(getElementsByClassName(document, 'ol', 'post-context')); hideAll(getElementsByClassName(document, 'div', 'pastebin')); } function toggle() { for (var i = 0; i < arguments.length; i++) { var e = document.getElementById(arguments[i]); if (e) { e.style.display = e.style.display == 'none' ? 'block' : 'none'; } } return false; } function varToggle(link, id) { toggle('v' + id); var s = link.getElementsByTagName('span')[0]; var uarr = String.fromCharCode(0x25b6); var darr = String.fromCharCode(0x25bc); s.innerHTML = s.innerHTML == uarr ? darr : uarr; return false; } function switchPastebinFriendly(link) { s1 = "Switch to copy-and-paste view"; s2 = "Switch back to interactive view"; link.innerHTML = link.innerHTML == s1 ? s2 : s1; toggle('browserTraceback', 'pastebinTraceback'); return false; } //--> </script> </head> <body> <div id="summary"> <h1>{{ exception_type }} at {{ request.path_info|escape }}</h1> <pre class="exception_value">{{ exception_value|force_escape }}</pre> <table class="meta"> <tr> <th>Request Method:</th> <td>{{ request.META.REQUEST_METHOD }}</td> </tr> <tr> <th>Request URL:</th> <td>{{ request.build_absolute_uri|escape }}</td> </tr> <tr> <th>Django Version:</th> <td>{{ django_version_info }}</td> </tr> <tr> <th>Exception Type:</th> <td>{{ exception_type }}</td> </tr> <tr> <th>Exception Value:</th> <td><pre>{{ exception_value|force_escape }}</pre></td> </tr> <tr> <th>Exception Location:</th> <td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td> </tr> <tr> <th>Python Executable:</th> <td>{{ sys_executable|escape }}</td> </tr> <tr> <th>Python Version:</th> <td>{{ sys_version_info }}</td> </tr> <tr> <th>Python Path:</th> <td>{{ sys_path }}</td> </tr> <tr> <th>Server time:</th> <td>{{server_time|date:"r"}}</td> </tr> </table> </div> {% if unicode_hint %} <div id="unicode-hint"> <h2>Unicode error hint</h2> <p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p> </div> {% endif %} {% if template_does_not_exist %} <div id="template-not-exist"> <h2>Template-loader postmortem</h2> {% if loader_debug_info %} <p>Django tried loading these templates, in this order:</p> <ul> {% for loader in loader_debug_info %} <li>Using loader <code>{{ loader.loader }}</code>: <ul>{% for t in loader.templates %}<li><code>{{ t.name }}</code> (File {% if t.exists %}exists{% else %}does not exist{% endif %})</li>{% endfor %}</ul> </li> {% endfor %} </ul> {% else %} <p>Django couldn't find any templates because your <code>TEMPLATE_LOADERS</code> setting is empty!</p> {% endif %} </div> {% endif %} {% if template_info %} <div id="template"> <h2>Template error</h2> <p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p> <h3>{{ template_info.message }}</h3> <table class="source{% if template_info.top %} cut-top{% endif %}{% ifnotequal template_info.bottom template_info.total %} cut-bottom{% endifnotequal %}"> {% for source_line in template_info.source_lines %} {% ifequal source_line.0 template_info.line %} <tr class="error"><th>{{ source_line.0 }}</th> <td>{{ template_info.before }}<span class="specific">{{ template_info.during }}</span>{{ template_info.after }}</td></tr> {% else %} <tr><th>{{ source_line.0 }}</th> <td>{{ source_line.1 }}</td></tr> {% endifequal %} {% endfor %} </table> </div> {% endif %} <div id="traceback"> <h2>Traceback <span class="commands"><a href="#" onclick="return switchPastebinFriendly(this);">Switch to copy-and-paste view</a></span></h2> {% autoescape off %} <div id="browserTraceback"> <ul class="traceback"> {% for frame in frames %} <li class="frame"> <code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code> {% if frame.context_line %} <div class="context" id="c{{ frame.id }}"> {% if frame.pre_context %} <ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">{% for line in frame.pre_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ line|escape }}</li>{% endfor %}</ol> {% endif %} <ol start="{{ frame.lineno }}" class="context-line"><li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ frame.context_line|escape }} <span>...</span></li></ol> {% if frame.post_context %} <ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">{% for line in frame.post_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ line|escape }}</li>{% endfor %}</ol> {% endif %} </div> {% endif %} {% if frame.vars %} <div class="commands"> <a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>&#x25b6;</span> Local vars</a> </div> <table class="vars" id="v{{ frame.id }}"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in frame.vars|dictsort:"0" %} <tr> <td>{{ var.0|force_escape }}</td> <td class="code"><div>{{ var.1|pprint|force_escape }}</div></td> </tr> {% endfor %} </tbody> </table> {% endif %} </li> {% endfor %} </ul> </div> {% endautoescape %} <form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post"> <div id="pastebinTraceback" class="pastebin"> <input type="hidden" name="language" value="PythonConsole"> <input type="hidden" name="title" value="{{ exception_type|escape }} at {{ request.path_info|escape }}"> <input type="hidden" name="source" value="Django Dpaste Agent"> <input type="hidden" name="poster" value="Django"> <textarea name="content" id="traceback_area" cols="140" rows="25"> Environment: Request Method: {{ request.META.REQUEST_METHOD }} Request URL: {{ request.build_absolute_uri|escape }} Django Version: {{ django_version_info }} Python Version: {{ sys_version_info }} Installed Applications: {{ settings.INSTALLED_APPS|pprint }} Installed Middleware: {{ settings.MIDDLEWARE_CLASSES|pprint }} {% if template_does_not_exist %}Template Loader Error: {% if loader_debug_info %}Django tried loading these templates, in this order: {% for loader in loader_debug_info %}Using loader {{ loader.loader }}: {% for t in loader.templates %}{{ t.name }} (File {% if t.exists %}exists{% else %}does not exist{% endif %}) {% endfor %}{% endfor %} {% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty! {% endif %} {% endif %}{% if template_info %} Template error: In template {{ template_info.name }}, error at line {{ template_info.line }} {{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %} {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }} {% else %} {{ source_line.0 }} : {{ source_line.1 }} {% endifequal %}{% endfor %}{% endif %} Traceback: {% for frame in frames %}File "{{ frame.filename|escape }}" in {{ frame.function|escape }} {% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %} {% endfor %} Exception Type: {{ exception_type|escape }} at {{ request.path_info|escape }} Exception Value: {{ exception_value|force_escape }} </textarea> <br><br> <input type="submit" value="Share this traceback on a public Web site"> </div> </form> </div> <div id="requestinfo"> <h2>Request information</h2> <h3 id="get-info">GET</h3> {% if request.GET %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.GET.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No GET data</p> {% endif %} <h3 id="post-info">POST</h3> {% if request.POST %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.POST.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No POST data</p> {% endif %} <h3 id="files-info">FILES</h3> {% if request.FILES %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.FILES.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No FILES data</p> {% endif %} <h3 id="cookie-info">COOKIES</h3> {% if request.COOKIES %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.COOKIES.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No cookie data</p> {% endif %} <h3 id="meta-info">META</h3> <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.META.items|dictsort:"0" %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> <h3 id="settings-info">Settings</h3> <h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4> <table class="req"> <thead> <tr> <th>Setting</th> <th>Value</th> </tr> </thead> <tbody> {% for var in settings.items|dictsort:"0" %} <tr> <td>{{ var.0 }}</td> <td class="code"><div>{{ var.1|pprint }}</div></td> </tr> {% endfor %} </tbody> </table> </div> <div id="explanation"> <p> You're seeing this error because you have <code>DEBUG = True</code> in your Django settings file. Change that to <code>False</code>, and Django will display a standard 500 page. </p> </div> </body> </html> """ TECHNICAL_404_TEMPLATE = """ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <title>Page not found at {{ request.path_info|escape }}</title> <meta name="robots" content="NONE,NOARCHIVE"> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; background:#eee; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; margin-bottom:.4em; } h1 span { font-size:60%; color:#666; font-weight:normal; } table { border:none; border-collapse: collapse; width:100%; } td, th { vertical-align:top; padding:2px 3px; } th { width:12em; text-align:right; color:#666; padding-right:.5em; } #info { background:#f6f6f6; } #info ol { margin: 0.5em 4em; } #info ol li { font-family: monospace; } #summary { background: #ffc; } #explanation { background:#eee; border-bottom: 0px none; } </style> </head> <body> <div id="summary"> <h1>Page not found <span>(404)</span></h1> <table class="meta"> <tr> <th>Request Method:</th> <td>{{ request.META.REQUEST_METHOD }}</td> </tr> <tr> <th>Request URL:</th> <td>{{ request.build_absolute_uri|escape }}</td> </tr> </table> </div> <div id="info"> {% if urlpatterns %} <p> Using the URLconf defined in <code>{{ settings.ROOT_URLCONF }}</code>, Django tried these URL patterns, in this order: </p> <ol> {% for pattern in urlpatterns %} <li> {% for pat in pattern %} {{ pat.regex.pattern }} {% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %} {% endfor %} </li> {% endfor %} </ol> <p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p> {% else %} <p>{{ reason }}</p> {% endif %} </div> <div id="explanation"> <p> You're seeing this error because you have <code>DEBUG = True</code> in your Django settings file. Change that to <code>False</code>, and Django will display a standard 404 page. </p> </div> </body> </html> """ EMPTY_URLCONF_TEMPLATE = """ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html lang="en"><head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <meta name="robots" content="NONE,NOARCHIVE"><title>Welcome to Django</title> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; } h2 { margin-bottom:.8em; } h2 span { font-size:80%; color:#666; font-weight:normal; } h3 { margin:1em 0 .5em 0; } h4 { margin:0 0 .5em 0; font-weight: normal; } table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } tbody td, tbody th { vertical-align:top; padding:2px 3px; } thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } ul { margin-left: 2em; margin-top: 1em; } #summary { background: #e0ebff; } #summary h2 { font-weight: normal; color: #666; } #explanation { background:#eee; } #instructions { background:#f6f6f6; } #summary table { border:none; background:transparent; } </style> </head> <body> <div id="summary"> <h1>It worked!</h1> <h2>Congratulations on your first Django-powered page.</h2> </div> <div id="instructions"> <p>Of course, you haven't actually done any work yet. Here's what to do next:</p> <ul> <li>If you plan to use a database, edit the <code>DATABASES</code> setting in <code>{{ project_name }}/settings.py</code>.</li> <li>Start your first app by running <code>python {{ project_name }}/manage.py startapp [appname]</code>.</li> </ul> </div> <div id="explanation"> <p> You're seeing this message because you have <code>DEBUG = True</code> in your Django settings file and you haven't configured any URLs. Get to work! </p> </div> </body></html> """
[ [ 1, 0, 0.0012, 0.0012, 0, 0.66, 0, 426, 0, 1, 0, 0, 426, 0, 0 ], [ 1, 0, 0.0023, 0.0012, 0, 0.66, 0.05, 688, 0, 1, 0, 0, 688, 0, 0 ], [ 1, 0, 0.0035, 0.0012, 0, 0....
[ "import datetime", "import os", "import re", "import sys", "from django.conf import settings", "from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound", "from django.template import (Template, Context, TemplateDoesNotExist,\n TemplateSyntaxError)", "from django.utils.ht...
""" Decorators for views based on HTTP headers. """ try: from functools import wraps except ImportError: from django.utils.functional import wraps # Python 2.4 fallback. from calendar import timegm from datetime import timedelta from email.Utils import formatdate from django.utils.decorators import decorator_from_middleware, available_attrs from django.utils.http import parse_etags, quote_etag from django.utils.log import getLogger from django.middleware.http import ConditionalGetMiddleware from django.http import HttpResponseNotAllowed, HttpResponseNotModified, HttpResponse conditional_page = decorator_from_middleware(ConditionalGetMiddleware) logger = getLogger('django.request') def require_http_methods(request_method_list): """ Decorator to make a view only accept particular request methods. Usage:: @require_http_methods(["GET", "POST"]) def my_view(request): # I can assume now that only GET or POST requests make it this far # ... Note that request methods should be in uppercase. """ def decorator(func): def inner(request, *args, **kwargs): if request.method not in request_method_list: logger.warning('Method Not Allowed (%s): %s' % (request.method, request.path), extra={ 'status_code': 405, 'request': request } ) return HttpResponseNotAllowed(request_method_list) return func(request, *args, **kwargs) return wraps(func, assigned=available_attrs(func))(inner) return decorator require_GET = require_http_methods(["GET"]) require_GET.__doc__ = "Decorator to require that a view only accept the GET method." require_POST = require_http_methods(["POST"]) require_POST.__doc__ = "Decorator to require that a view only accept the POST method." def condition(etag_func=None, last_modified_func=None): """ Decorator to support conditional retrieval (or change) for a view function. The parameters are callables to compute the ETag and last modified time for the requested resource, respectively. The callables are passed the same parameters as the view itself. The Etag function should return a string (or None if the resource doesn't exist), whilst the last_modified function should return a datetime object (or None if the resource doesn't exist). If both parameters are provided, all the preconditions must be met before the view is processed. This decorator will either pass control to the wrapped view function or return an HTTP 304 response (unmodified) or 412 response (preconditions failed), depending upon the request method. Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match plus If-modified-since headers) will result in the view function being called. """ def decorator(func): def inner(request, *args, **kwargs): # Get HTTP request headers if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE") if_none_match = request.META.get("HTTP_IF_NONE_MATCH") if_match = request.META.get("HTTP_IF_MATCH") if if_none_match or if_match: # There can be more than one ETag in the request, so we # consider the list of values. try: etags = parse_etags(if_none_match or if_match) except ValueError: # In case of invalid etag ignore all ETag headers. # Apparently Opera sends invalidly quoted headers at times # (we should be returning a 400 response, but that's a # little extreme) -- this is Django bug #10681. if_none_match = None if_match = None # Compute values (if any) for the requested resource. if etag_func: res_etag = etag_func(request, *args, **kwargs) else: res_etag = None if last_modified_func: dt = last_modified_func(request, *args, **kwargs) if dt: res_last_modified = formatdate(timegm(dt.utctimetuple()))[:26] + 'GMT' else: res_last_modified = None else: res_last_modified = None response = None if not ((if_match and (if_modified_since or if_none_match)) or (if_match and if_none_match)): # We only get here if no undefined combinations of headers are # specified. if ((if_none_match and (res_etag in etags or "*" in etags and res_etag)) and (not if_modified_since or res_last_modified == if_modified_since)): if request.method in ("GET", "HEAD"): response = HttpResponseNotModified() else: logger.warning('Precondition Failed: %s' % request.path, extra={ 'status_code': 412, 'request': request } ) response = HttpResponse(status=412) elif if_match and ((not res_etag and "*" in etags) or (res_etag and res_etag not in etags)): logger.warning('Precondition Failed: %s' % request.path, extra={ 'status_code': 412, 'request': request } ) response = HttpResponse(status=412) elif (not if_none_match and if_modified_since and request.method == "GET" and res_last_modified == if_modified_since): response = HttpResponseNotModified() if response is None: response = func(request, *args, **kwargs) # Set relevant headers on the response if they don't already exist. if res_last_modified and not response.has_header('Last-Modified'): response['Last-Modified'] = res_last_modified if res_etag and not response.has_header('ETag'): response['ETag'] = quote_etag(res_etag) return response return inner return decorator # Shortcut decorators for common cases based on ETag or Last-Modified only def etag(etag_func): return condition(etag_func=etag_func) def last_modified(last_modified_func): return condition(last_modified_func=last_modified_func)
[ [ 8, 0, 0.0122, 0.0183, 0, 0.66, 0, 0, 1, 0, 0, 0, 0, 0, 0 ], [ 7, 0, 0.0396, 0.0244, 0, 0.66, 0.0526, 0, 0, 1, 0, 0, 0, 0, 0 ], [ 1, 1, 0.0366, 0.0061, 1, 0.9, ...
[ "\"\"\"\nDecorators for views based on HTTP headers.\n\"\"\"", "try:\n from functools import wraps\nexcept ImportError:\n from django.utils.functional import wraps # Python 2.4 fallback.", " from functools import wraps", " from django.utils.functional import wraps # Python 2.4 fallback.", "fro...