index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
6,877
|
calvinwhealton/SALib
|
refs/heads/master
|
/SALib/sample/saltelli.py
|
from __future__ import division
import numpy as np
from . import common_args
from . import sobol_sequence
from ..util import scale_samples, nonuniform_scale_samples, read_param_file
def sample(problem, N, calc_second_order=True):
"""Generates model inputs using Saltelli's extension of the Sobol sequence.
Returns a NumPy matrix containing the model inputs using Saltelli's sampling
scheme. Saltelli's scheme extends the Sobol sequence in a way to reduce
the error rates in the resulting sensitivity index calculations. If
calc_second_order is False, the resulting matrix has N * (D + 2)
rows, where D is the number of parameters. If calc_second_order is True,
the resulting matrix has N * (2D + 2) rows. These model inputs are
intended to be used with :func:`SALib.analyze.sobol.analyze`.
Parameters
----------
problem : dict
The problem definition
N : int
The number of samples to generate
calc_second_order : bool
Calculate second-order sensitivities (default True)
"""
D = problem['num_vars']
if not problem.get('groups'):
groups = False
Dg = problem['num_vars']
else:
groups = True
# condition for when problem was defined from parameter file
# can access the 'groups' tuple (matrix, list of unique group names)
# to determine the number of groups
# if problem defined as a dictionary in the code, find the number
# of unique group names
# also make matrix to account for group names
if len(problem['groups']) == 2:
Dg = len(problem['groups'][1])
else:
Dg = len(np.unique(problem['groups']))
gp_mat = np.zeros([D, Dg])
for i in range(Dg):
# group name to check for equivalency
groupNameIt = np.unique(problem['groups'])[i]
for j in range(D):
if problem['groups'][j] == groupNameIt:
gp_mat[j,i] = 1
# making a tuple similar to the one made by the read_param_file
# for use later in the code
problem['groups'] = (gp_mat,np.unique(problem['groups']))
# How many values of the Sobol sequence to skip
skip_values = 1000
# Create base sequence - could be any type of sampling
base_sequence = sobol_sequence.sample(N + skip_values, 2 * D)
if calc_second_order:
saltelli_sequence = np.empty([(2 * Dg + 2) * N, D])
else:
saltelli_sequence = np.empty([(Dg + 2) * N, D])
index = 0
for i in range(skip_values, N + skip_values):
# Copy matrix "A"
for j in range(D):
saltelli_sequence[index, j] = base_sequence[i, j]
index += 1
# Cross-sample elements of "B" into "A"
# condition for group sampling (groups is True)
if groups:
# method of cross-sampling "B" into "A" for groups
# groups that are "off-diagional" (l != m) will be form "A"
# groups that are "on-diagional" (l = m) will be from "B"
for l in range(Dg):
for m in range(D):
if problem['groups'][0][m,l] == 1:
saltelli_sequence[index, m] = base_sequence[i, m + D]
else:
saltelli_sequence[index, m] = base_sequence[i, m]
index += 1
else:
for k in range(D):
for j in range(D):
if j == k:
saltelli_sequence[index, j] = base_sequence[i, j + D]
else:
saltelli_sequence[index, j] = base_sequence[i, j]
index += 1
# Cross-sample elements of "A" into "B"
# Only needed if you're doing second-order indices (true by default)
if calc_second_order:
# condition for group sampling (groups is True)
if groups:
# method of cross-sampling "A" into "B" for groups
# groups that are "off-diagional" (l != m) will be form "B"
# groups that are "on-diagional" (l = m) will be from "A"
for l in range(Dg):
for m in range(D):
if problem['groups'][0][m,l] == 1:
saltelli_sequence[index, m] = base_sequence[i, m]
else:
saltelli_sequence[index, m] = base_sequence[i, m + D]
index += 1
else:
for k in range(D):
for j in range(D):
if j == k:
saltelli_sequence[index, j] = base_sequence[i, j]
else:
saltelli_sequence[index, j] = base_sequence[i, j + D]
index += 1
# Copy matrix "B"
for j in range(D):
saltelli_sequence[index, j] = base_sequence[i, j + D]
index += 1
if not problem.get('dists'):
# scaling values out of 0-1 range with uniform distributions
scale_samples(saltelli_sequence,problem['bounds'])
return saltelli_sequence
else:
# scaling values to other distributions based on inverse CDFs
scaled_saltelli = nonuniform_scale_samples(saltelli_sequence,problem['bounds'],problem['dists'])
return scaled_saltelli
if __name__ == "__main__":
parser = common_args.create()
parser.add_argument(
'-n', '--samples', type=int, required=True, help='Number of Samples')
parser.add_argument('--max-order', type=int, required=False, default=2,
choices=[1, 2], help='Maximum order of sensitivity indices to calculate')
args = parser.parse_args()
np.random.seed(args.seed)
problem = read_param_file(args.paramfile)
param_values = sample(problem, args.samples, calc_second_order=(args.max_order == 2))
np.savetxt(args.output, param_values, delimiter=args.delimiter,
fmt='%.' + str(args.precision) + 'e')
|
{"/SALib/sample/saltelli.py": ["/SALib/util/__init__.py"]}
|
6,878
|
calvinwhealton/SALib
|
refs/heads/master
|
/SALib/util/__init__.py
|
__all__ = ["scale_samples", "read_param_file"]
from collections import OrderedDict
import csv
from warnings import warn
import numpy as np
import scipy as sp
def scale_samples(params, bounds):
'''
Rescales samples in 0-to-1 range to arbitrary bounds.
Arguments:
bounds - list of lists of dimensions num_params-by-2
params - numpy array of dimensions num_params-by-N,
where N is the number of samples
'''
# Check bounds are legal (upper bound is greater than lower bound)
b = np.array(bounds)
lower_bounds = b[:, 0]
upper_bounds = b[:, 1]
if np.any(lower_bounds >= upper_bounds):
raise ValueError("Bounds are not legal")
# This scales the samples in-place, by using the optional output
# argument for the numpy ufunctions
# The calculation is equivalent to:
# sample * (upper_bound - lower_bound) + lower_bound
np.add(np.multiply(params,
(upper_bounds - lower_bounds),
out=params),
lower_bounds,
out=params)
def unscale_samples(params, bounds):
'''
Rescales samples from arbitrary bounds back to [0,1] range.
Arguments:
bounds - list of lists of dimensions num_params-by-2
params - numpy array of dimensions num_params-by-N,
where N is the number of samples
'''
# Check bounds are legal (upper bound is greater than lower bound)
b = np.array(bounds)
lower_bounds = b[:, 0]
upper_bounds = b[:, 1]
if np.any(lower_bounds >= upper_bounds):
raise ValueError("Bounds are not legal")
# This scales the samples in-place, by using the optional output
# argument for the numpy ufunctions
# The calculation is equivalent to:
# (sample - lower_bound) / (upper_bound - lower_bound)
np.divide(np.subtract(params, lower_bounds, out=params),
np.subtract(upper_bounds, lower_bounds),
out=params)
def nonuniform_scale_samples(params, bounds, dists):
'''
Rescales samples in 0-to-1 range to other distributions.
Arguments:
problem - problem definition including bounds
params - numpy array of dimensions num_params-by-N,
where N is the number of samples
dists-list of distributions, one for each parameter
unif: uniform with lower and upper bounds
triang: triangular with width (scale) and location of peak
location of peak is in percentage of width
lower bound assumed to be zero
norm: normal distribution with mean and standard deviation
lognorm: lognormal with ln-space mean and standard deviation
'''
b = np.array(bounds)
if len(params[0]) != len(dists):
print('Incorrect number of distributions specified')
print('Original parameters returned')
return params
else:
# initializing matrix for converted values
conv_params = np.empty([len(params),len(params[0])])
# loop over the parameters
for i in range(len(conv_params[0])):
# setting first and second arguments for distributions
arg1 = b[i][0]
arg2 = b[i][1]
# triangular distribution
# paramters are width (scale) and location of peak
# location of peak is relative to scale
# e.g., 0.25 means peak is 25% of the width distance from zero
if dists[i] == 'triang':
# checking for correct parameters
if arg1 <= 0:
print('Scale must be greater than zero')
print('Parameter not converted')
conv_params[:,i] = params[:,i]
elif (arg2 <= 0) or (arg2 >= 1):
print('Peak must be on interval [0,1]')
print('Parameter not converted')
conv_params[:,i] = params[:,i]
else:
conv_params[:,i] = sp.stats.triang.ppf(params[:,i],c=arg2,scale=arg1,loc=0)
# uniform distribution
# parameters are lower and upper bounds
elif dists[i] == 'unif':
# checking that upper bound is greater than lower bound
if arg1 >= arg2:
print('Lower bound greater than upper bound')
print('Parameter not converted')
conv_params[:,i] = params[:,i]
else:
conv_params[:,i] = params[:,i]*(arg2-arg1) + arg1
# normal distribution
# paramters are mean and standard deviation
elif dists[i] == 'norm':
# checking for valid parameters
if arg2 <= 0:
print('Scale must be greater than zero')
print('Parameter not converted')
conv_params[:,i] = params[:,i]
else:
conv_params[:,i] = sp.stats.norm.ppf(params[:,i],loc=arg1,scale=arg2)
# lognormal distribution (ln-space, not base-10)
# paramters are ln-space mean and standard deviation
elif dists[i] == 'lognorm':
# checking for valid parameters
if arg2 <= 0:
print('Scale must be greater than zero')
print('Parameter not converted')
conv_params[:,i] = params[:,i]
else:
conv_params[:,i] = np.exp(sp.stats.norm.ppf(params[:,i],loc=arg1,scale=arg2))
else:
print('No valid distribution selected')
return(conv_params)
def read_param_file(filename, delimiter=None):
'''
Reads a parameter file of format:
Param1,0,1,Group1,dist1
Param2,0,1,Group2,dist2
Param3,0,1,Group3,dist3
And returns a dictionary containing:
- names - the names of the parameters
- bounds - a list of lists of lower and upper bounds
- num_vars - a scalar indicating the number of variables
(the length of names)
- groups - a tuple containing i) a group matrix assigning parameters to
groups
ii) a list of unique group names
- dists - a list of distributions for the problem,
None if not specified or all uniform
'''
names = []
bounds = []
group_list = []
dist_list = []
num_vars = 0
fieldnames = ['name', 'lower_bound', 'upper_bound', 'group', 'dist']
dist_none_count = 0 # used when evaluating if non-uniform distributions are specified
with open(filename, 'rU') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=delimiter)
csvfile.seek(0)
reader = csv.DictReader(
csvfile, fieldnames=fieldnames, dialect=dialect)
for row in reader:
if row['name'].strip().startswith('#'):
pass
else:
num_vars += 1
names.append(row['name'])
bounds.append(
[float(row['lower_bound']), float(row['upper_bound'])])
# If the fourth column does not contain a group name, use
# the parameter name
if row['group'] is None:
group_list.append(row['name'])
elif row['group'] is 'NA':
group_list.append(row['name'])
else:
group_list.append(row['group'])
# If the fifth column does not contain a distribution
# use uniform
if row['dist'] is None:
dist_list.append('unif')
dist_none_count += 1
else:
dist_list.append(row['dist'])
group_matrix, group_names = compute_groups_from_parameter_file(
group_list, num_vars)
# setting group_tuple to zero if no groups are defined
# or all groups are 'NA'
if np.all(group_matrix == np.eye(num_vars)):
group_tuple = None
elif len(np.unique(group_list)) == 1:
group_tuple = None
else:
group_tuple = (group_matrix, group_names)
# setting dist list to none if all are uniform
# because non-uniform scaling is not needed
if dist_none_count == num_vars:
dist_list = None
return {'names': names, 'bounds': bounds, 'num_vars': num_vars,
'groups': group_tuple, 'dists': dist_list}
def compute_groups_from_parameter_file(group_list, num_vars):
'''
Computes a k-by-g matrix which notes factor membership of groups
where:
k is the number of variables (factors)
g is the number of groups
Also returns a g-length list of unique group_names whose positions
correspond to the order of groups in the k-by-g matrix
'''
# Get a unique set of the group names
unique_group_names = list(OrderedDict.fromkeys(group_list))
number_of_groups = len(unique_group_names)
indices = dict([(x, i) for (i, x) in enumerate(unique_group_names)])
output = np.zeros((num_vars, number_of_groups), dtype=np.int)
for parameter_row, group_membership in enumerate(group_list):
group_index = indices[group_membership]
output[parameter_row, group_index] = 1
return np.matrix(output), unique_group_names
def requires_gurobipy(_has_gurobi):
'''
Decorator function which takes a boolean _has_gurobi as an argument.
Use decorate any functions which require gurobi.
Raises an import error at runtime if gurobi is not present.
Note that all runtime errors should be avoided in the working code,
using brute force options as preference.
'''
def _outer_wrapper(wrapped_function):
def _wrapper(*args, **kwargs):
if _has_gurobi:
result = wrapped_function(*args, **kwargs)
else:
warn("Gurobi not available", ImportWarning)
result = None
return result
return _wrapper
return _outer_wrapper
|
{"/SALib/sample/saltelli.py": ["/SALib/util/__init__.py"]}
|
6,879
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/py_test.py
|
from datetime import datetime, date
import os
import csv
import sys
#import math
from collections import OrderedDict
import routine # a script with functions
import numpy as np
from my_classes import GazeReader
input_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
#input_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
#print(os.path.isdir(input_folder))
#os.mkdir("C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\50-years visit\\")
allFIlenames =os.listdir(input_folder)
print(len(allFIlenames))
fileNames = [fileName for fileName in os.listdir(input_folder) if fileName.endswith(".gazedata")]
print(len(fileNames))
a = "stringi1"
b = "stringi2"
if a == b:
print("samat")
else:
print("eri")
##folder_path = os.path.split("C:/lasayr/file.txt")
##folder_tail = folder_path[1]
##print(folder_tail)
##
##i = folder_tail.find('.')
##print(folder_tail[0:i])
##print(folder_tail[i])
##
##a=1
##while a <= 3:
## print(a)
## a += 1
##
##
##
##
##
##with open(os.path.join(os.getcwd(), "tiste_file.txt"), "wt", newline = "\n") as outputfile:
## writer = csv.writer(outputfile, delimiter="\t")
## writer.writerow( ["guuguu", "geegee"] )
## writer.writerow( ["guuguu 2",] )
##
#args_pro = 'D:\\lasayr\\Aaltonen\\mi', '353_4.gazedata', None
# make new GazeReader object for reading and processing input file
##f_processor = GazeReader(args_pro) #40 is optional limit for rows
##print(f_processor.get_filename())
##hh = f_processor.get_headers()
##
##print(hh)
##
##hh_num=[]
##tets=[]
##print(len(tets))
##
##for i in range(10):
## print("duck")
##for header in hh:
## print(header)
## hh_num.append(routine.string_or_number(header))
## tets.append(isinstance(hh_num[-1], str)) #[-1] is the last element...!
##
##
##
##
##print("is strin:" + str(tets))#isinstance(hh[0], str))
##
##print("all headers are string:" + str(all(tets)))
##a = [1, 2, 3]
##
##print(a)
##print(np.percentile(a , 50))
##
##
##start_time = date.today()
##print(start_time )
#returns the elapsed milliseconds since the start of the program
##def millis(start_time):
## dtn = datetime.now()
## print(dtn)
## dt = dtn - start_time
##
## print(dt)
## mus =(dt.days * 24 *60 *60 + dt.seconds) * 1000 + dt.microseconds
## ms = mus / 1000
## s = mus / (1000*1000/1)
## minutes = mus /(1000*1000*60)
##
## return ms, s, minutes
##
##for i in range(1,10^9990):
## 10^i
##
##print( str(millis(start_time)))
#from my_classes import MyClass
#print(sys.version)
#foo = routine.string_or_number('neutral2.bmp')
#stim = ['control.bmp', 'neutral2.bmp', 'control.bmp', 'neutral2.bmp']
##stim = ['control.bmp', 1, 'control.bmp', 'neutral2.bmp',
## 'control.bmp', 'neutral2.bmp', 'control.bmp', 'neutral2.bmp',
## 'control.bmp', 'aaneutral2.bmp']
##
##
##print(sorted(list(stim)))
##print()
##
##ab = ["a", 1, "c", "c", "a"]
##bb= []
##for el in ab:
## if isinstance(el, str): bb.append(1)
## else: bb.append(0)
##
##
##print(all(bb))
##
#print(any (isinstance(ab,str)))
#print(isinstance(foo, str))
#headers = rderedDict("a": None, "b": None, "c": None)
#headers = ["a", "b", "c", "c", "a"]
#headers2 = ["Q", "W"]
##print(sorted(headers))
##print(set(headers))
##
##od = OrderedDict.fromkeys(headers)
##
##fod = "a" in od.keys()
##
##print (fod)
##print("aa" == "ab")
##
##od = OrderedDict()
##od['a'] = [1,2,3]
##od['b'] = None
##print(od['a'][len(od['a'])-1])
##od['a'].append(2)
##
##
##print(not od['a'])
##
##aa = list([1,1])
##for el in [2,3]: aa.append(el)
###aa.append([2,3])
##
##a = [10,10]
###aa.append(10,10)
##print(aa)
##
##print(max(aa))
##print(len(aa))
##print(isinstance(1,str))
#print(os.getcwd() )
##
##
##aa = not isinstance(headers[0], str)
##print(aa)
##
##a = {'numbero': [1,2], "wordolo": "nuppi"}
###a = {'numberot': 1, 2, 3, 4, "wordolot": "nuppi", "nappi", "noppi"}
#intti = float('a')
##print( min(a['numbero']) )
##
##
##print(range(0,10))
##
##a = []
##
##for i in range(9):
## a.append(i)
## print(str(a))
#from os.path import join, getsize
##for root, dirs, files in os.walk('C:\\Users\\'):
## if 'testing 7mo,trec2' in root:
## print(root, " ", end=" FOUND! ")
## print("")
##
##
##def funny(argue):
## if not argue:
## print("yell more")
## else:
## print("i agrue")
##
##funny("s")
##
##funny(None)
##
##
##default_input_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
##print(os.path.isdir(default_input_folder))
#C:\\Users\\
#x = MyClass("gillo")
#print(x.f())
#print(x.__doc__)
#round(k*3.14)
#
#y = []
#k = 25
#for i, item in enumerate(range(k+1)):
# print (str(i))
## yx = (i/k)*math.pi
# y.append (math.sin((i/k)*2*math.pi)) #*math.pi
#
#print(x.f())
#print(x.get_i())
##input_file = "headers_tre_5mo_to_7mo.txt"
##input_file = "headers_txt_data.txt"
##
##input_file_delimiter = "\t"
##
##aa = ["q","w","e","r"]
##
##aa = [aa, aa, aa]
#print(list(enumerate(aa)))
##bb = zip( range(20000,20001,1), aa)
#print(list(range(3)))
#print(list(bb))
##for index, element in bb:
## print(element)
#a = routine.wonder(("amo", "rati", "her"))
##d = dict()
##od = OrderedDict()
##
##od["yks"] = 1
##od["kaks"] = 2
##od["kolme"] = 3
##
##d["yks"] = 1
##d["kaks"] = 2
##d["kolme"] = 3
#print(od)
# print(d)
##if ("a" is "a"):
## print("a is a")
##
##b = "abc"
#print("aaaa" + chr(9) + "aaaa") #tab
#print("aaaa" + chr(13) + "aaaa") #CR is a bytecode for carriage return (from the days of typewriters)
#print("aaaa" + chr(10) + "aaaa") #LF similarly, for line feed
# scan through files in a directory
# diritems = os.listdir(os.getcwd())
#table for header pairs
# maptable = {}#dict([('KEY','value')])
#read dictionary key-value pairs from file
#maptable = routine.get_headers(os.getcwd(), input_file)
#print (maptable)
#print (len(maptable.keys()))
#print (maptable.keys())
##print (maptable.values())
#testing a function in imported code
#routine.miracle(5)
#list_my = [1, 2, 3, 4]
#print(len(list_my))
##def file_handle(file):
## print (file)
## print (file.split("."))
##
##for filenum, file in enumerate(diritems):
## file_handle(file)
##
##
##for i, a in enumerate(["uu","jee"]):
## print(i)
## print(a)
##
##
##
##print ("Directory contains "
##+ str(len(diritems)) + " files.")
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,880
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/standardizeGazedata_0.2.py
|
##Script for reading, modifying and re-writing gazedata
##*this will have some algorithms from
##gazedata2gazedata_sy3.py and the like, but should use classes in my_classes.py
##*use DataFolder class to access all gazefiles in folder
## **write new method for writing new data into folder
## *method will loop through files, i.e., GazeReader objects
## *method will read (gettinf from GazeReader),
## change (by accessing separate textfile),
## and write headers to new file (use writer object?)
## *method will read, change, and write datarow one-by-one into new file,
## same logic as with headers, changing might be more tricky?
## *i.e., GazeReader will never have to give up entire data!
## first things 1st
import os
import routine
from my_classes import DataFolder
from my_classes import HeaderReader
#read header conversion map
folder = "C:/Users/lasayr/Documents/GitHub/py_gazedat"
file = "header map 3D.txt"
hr = HeaderReader(folder, file)
### Set folder and data for header conversion map
##folder = "C:/Users/infant/Documents/GitHub/py_gazedat"
##fileModelHM = "header map.txt"
##fileModelCur = "??????.txt"
##
### Read old-new conversion map, for headers
##hmModel = routine.get_headers(folder,fileModelHM)
##hmCurrent = routine.get_headers(folder,fileModelCur)
#vals = list(hm.values()) #list(d.values())
#vals.remove('OBSOLETE')
#print(vals)
home = 'C:\\Users\\lasayr\\Documents\\'
## then do some business
# Source folder:
#input_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
#input_folder = home + "D\\Aaltonen\\ct\\ct_6mo"
#input_folder = home + "D\\Aaltonen\\ct\\ct_18mo_temp"
input_folder = home + "D\\Aaltonen\\ct\\ct_48mo"
#input_folder = "D:\\lasayr\\Aaltonen\\mi"
#input_folder = "D:\\lasayr\\Aaltonen\\24mo,trec2"
inFolderUnique = os.path.split(input_folder)[1]
#output_folder = "D:\\lasayr\\Aaltonen\\TREc2_7mo_std TESTING" #output_folder = "D:\\lasayr\\Aaltonen\\ct\\6mo_TESTING"#output_folder = "D:\\lasayr\\Aaltonen\\mi\\testing"#output_folder = "D:\\lasayr\\Aaltonen\\24mo\\testing"
#output_folder = input_folder + "\\testing"
output_folder = home + "D\\Aaltonen\\" + inFolderUnique + "_std"
# Init DataFolder
data_folder = DataFolder(input_folder, map_header = hr, date_limit = "01 Jan 00",
limit_files = (0,None), limit_rows = None)#, fileModelCur)#, limit_files = (0,3))#, limit_rows = 20, limit_files = (1,3))
# Print header map, conversion table
data_folder.print_header_map()
print("\nFiles selected: " + str(data_folder.get_filelist()))
# Change output folder, default is: C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\testing data
#data_folder.set_output_folder(output_folder)
data_folder.rewrite_data(output_folder)
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,881
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/gazedata2gazedata_sy2 - copy.py
|
import os
import csv
import routine
from collections import OrderedDict
##
# This Jussi's script converts eyetracking data in txt-format to gazedata-format
# It also converts X- and Y- coordinates to relative values for screen size.
# Input folder needs to be relative to the script location in the folder tree.
# In this case the folder where this script is located needs to have a folder
# named "files_to_change" where the files are located.
input_folder = folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
#input_folder = folder = "C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\7mo,trec2"
output_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data"
file_ext = ".gazedata" #input file extension, .txt
output_file_ext = ".gazedata"
input_file_delimiter = "\t"
output_file_delimiter = input_file_delimiter
##null_values = [".", ""] # two possible kinds values for missing samples
##
##null_values_new = "-999999" #
screen_x_dim = 1920.0 #screen width, adjust for differnt sites?
screen_y_dim = 1020.0 #screen height, adjust for differnt sites?
headers_folder = os.getcwd() #path for headers inputfile
headers_inputfile = "headers_tre_5mo_to_7mo.txt"
##
#map for one type of "gazedata" (or txt) headers, values may not apply to all gazedata
map_header = routine.get_headers(headers_folder, headers_inputfile)
# list files in a directory
diritems = os.listdir(input_folder)
##
#subroutine for processing one file
##def file_process(file, maptable):
##
## print (" Filename matches with the specified file_ext -> processing..")
##
## #self.liststore_exp.append([file])
##
## input_file = file
##
##
## #re-initialize file-specific vars
##
## newrows = [] #processed data, process in function
##
## indexed_maptable = {} #processin mpad
##
## imkeys = [] #processing dict_keys
##
##
##
## # input file reading
##
## with open(os.path.join(input_folder, input_file), "rt") as inputfile:
##
## reader = csv.reader(inputfile, delimiter = input_file_delimiter)
##
## # grab header information, into a list
##
## headers = next(reader) #reader.__next__()
##
##
## # calculate list index numbers for map-keys
##
## indexed_maptable = OrderedDict()
##
## for key in maptable:
## #print("key: " + key)
## #print("index of header: " + str(headers.index(key)))
##
## indexed_maptable[key] = headers.index(key)
##
##
## # loop file rows and cols,
##
## imkeys = indexed_maptable.keys()
##
## for r, row in enumerate(reader):
##
## newrow = []
##
## for k, key in enumerate(imkeys):
##
## ncol = k #indexed_maptable[key]
## if r < 20: break#print(ncol)
##
## #If loop goes out of index, print cell
##
## try:
##
## foo = row[ncol]
##
## except(IndexError):
##
## foo = newrows[r-1]
##
## foo = foo[k]
##
#### print("key: " + key)
#### print("index of header: " + str(headers.index(key)))
#### print ("foo: " + str(foo))
#### print ("row: " + str(r) + " col: " + str(ncol))
#### print( str(len(newrows)))
##
##
## # take away the null-values if they exist
##
## if foo not in null_values: #if row[ncol] not in null_values:
##
## if key in ['LEFT_GAZE_X', 'RIGHT_GAZE_X']:
##
## newrow.append(float(foo) / 1920.0) #newrow.append(float(row[ncol]) / 1920.0)
##
## elif key in ['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']:
##
## newrow.append(float(foo) / 1020.0) #newrow.append(float(row[ncol]) / 1020.0)
##
## else:
##
## newrow.append(foo) #newrow.append(row[ncol])
##
## else:
##
## newrow.append(null_values_new)
##
## newrows.append(newrow)
##
## return newrows, list(imkeys)
##
##
##
##
##
## [subcode]
# "main-like" code
print ("Directory contains " + str(len(diritems)) + " files.")
for filenum, file in zip(range(2716), diritems):
#for filenum, file in enumerate(diritems): #diritems
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(file_ext):
print ("Process file " + str(filenum + 1) + '/' + str(len(diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = input_folder, file, map_header
newrows, newheaders = routine.file_process(args_pro)
#print (str(newrows))
print("newrows length: " + str(len(newrows)))
# output file formation
# resolve the output file name
input_filename_parts = file.split(".") #input_file.split(".")
output_file = input_filename_parts[0] + output_file_ext
# open file
with open(os.path.join(output_folder, output_file), "wt") as outputfile:
writer = csv.writer(outputfile, delimiter=output_file_delimiter)
# write header row
writer.writerow(newheaders)
# write datarows
for newrow in newrows:
writer.writerow(newrow)
print (" File processed.")
# else:
#print (" Filename did not match the file_ext -> did nothing.")
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,882
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/check_dataOO_0.1.py
|
import os
import csv
from my_classes import DataFolder
from datetime import datetime
# script for writing headers and data stats from different gazedata to file(s)
# path to data testing eg., 7mo,trec2
tre5mo_old = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\5mo,trec2"
tre7mo_testin = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
tre7mo_old = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
mi = ("D:\\lasayr\\Aaltonen\\mi") #+ folder_name)
folder_path = tre7mo_old
# create new DataFolder to be inspected/processed, limit_files = (19,20) or (0, None)
data_folder = DataFolder(folder_path, limit_rows = 100, limit_files = (2600,2600+10))
# target output to current working directory, cwd
data_folder.set_output_folder(os.getcwd())
##
# list statistics from differt files to output "log"
start_time = datetime.now()
#percentiles parameter for numerical stats
percentiles = (0.1,99.9)
data_folder.write_stats_to_file(percentiles)
print(datetime.now() - start_time) #print time elapsed
headers = data_folder.get_headers()
for header in headers:
print(header + ": " + str(data_folder.get_stats(header)))
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,883
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/gazedataOO_0.2.py
|
import os
import csv
import routine
#from collections import OrderedDict
from itertools import islice
from my_classes import GazeReader
# This Jussi's script converts eyetracking data in txt-format to gazedata-format
# It also converts X- and Y- coordinates to relative values for screen size.
# Input folder needs to be relative to the script location in the folder tree.
# In this case the computer(!) where this script is located needs to have a folder
# named 'testing 7mo,trec2' where the files are located.
# VERSION IS 1st TO ADOPT ObjectOriented programming of our gazedata scripts
input_folder = 'testing 7mo,trec2' #"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
#input_folder = folder = "C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\7mo,trec2"
n_files = 1 # set limit for files to be processed, None if no limit desired
output_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data"
os.makedirs(output_folder,exist_ok=True)
file_ext = ".gazedata" #input file extension, .txt
output_file_ext = ".gazedata"
input_file_delimiter = "\t"
output_file_delimiter = input_file_delimiter
headers_folder = os.getcwd() #path for headers inputfile
headers_inputfile = "headers_tre_5mo_to_7mo.txt"
#map for one type of "gazedata" (or txt) headers, values may not apply to all gazedata
map_header = routine.get_headers(headers_folder, headers_inputfile)
# find directory by "walking" through the system
for root, dirs, files in os.walk('C:\\Users\\'):
if input_folder in root:
print(root, " ", end=" FOUND! ")
print("")
input_folder = root
print (input_folder)
##
# list files in a directory,
diritems = os.listdir(input_folder)
print ("Directory contains " + str(len(diritems)) + " files.")
#loop through files, limit loop by isslice(items, start, stop), can be None
for filenum, file in islice(enumerate(diritems), 0, n_files):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(file_ext):
print ("Process file " + str(filenum + 1) + '/' + str(len(diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = input_folder, file, map_header
# make new GazeReader object for reading and processing input file
f_processor = GazeReader(args_pro, 40) #40 is optional limit for rows
f_processor.set_row_limit(40) # limit rows, good for debugging
print("Newrows length: " + str(f_processor.get_row_count()))
# output file formation
# resolve the output file name
input_filename_parts = file.split(".") #input_file.split(".")
output_file = input_filename_parts[0] + output_file_ext
# open output file
with open(os.path.join(output_folder, output_file), "wt") as outputfile:
writer = csv.writer(outputfile, delimiter=output_file_delimiter)
# write header row
writer.writerow(f_processor.get_headers())
# write datarows, until there are none left
newrow = True
while newrow:
newrow = f_processor.get_new_row()
if (newrow):
writer.writerow(newrow)
print (" File processed.")
# else:
#print (" Filename did not match the file_ext -> did nothing.")
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,884
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/gazedata2gazedata_sy.py
|
import os
import csv
import routine
# This Jussi's script converts eyetracking data in txt-format to gazedata-format
# It also converts X- and Y- coordinates to relative values for screen size.
# Input folder needs to be relative to the script location in the folder tree.
# In this case the folder where this script is located needs to have a folder
# named "files_to_change" where the files are located.
input_folder = folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
#input_folder = folder = "C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\7mo,trec2"
output_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data"
ending = ".gazedata" #input file extension, .txt
output_file_ending = ".gazedata"
input_file_delimiter = "\t"
null_values = [".", ""] # two possible kinds values for missing samples
replace_null_values = "-999999" #
screen_x_dim = 1920.0 #screen width, adjust for differnt sites?
screen_y_dim = 1020.0 #screen height, adjust for differnt sites?
headers_folder = os.getcwd() #path for headers inputfile
headers_inputfile = "headers_tre_5mo_to_7mo.txt"
#map for one type of "gazedata" (or txt) headers, values may not apply to all gazedata
maptable = routine.get_headers(headers_folder, headers_inputfile)
#subroutine for processing one file
def file_process(file):
print (" Filename matches with the specified ending -> processing..")
#self.liststore_exp.append([file])
input_file = file
# input file reading
newrows = []
with open(os.path.join(input_folder, input_file), "rt") as inputfile:
reader = csv.reader(inputfile, delimiter = input_file_delimiter)
#reader.line_num())
# grab header information, into a list
headers = next(reader) #reader.__next__()
#print(headers)
# calculate list index numbers for map-keys
indexed_maptable = {}
for key in maptable:
print("key: " + key)
print("index of header: " + str(headers.index("Subject")))
print ("headers index key: " +headers.index(key))
indexed_maptable[key] = headers.index(key)
# loop file rows and cols,
imkeys = indexed_maptable.keys()
for row in reader:
newrow = []
for key in imkeys:
ncol = indexed_maptable[key]
# take away the null-values if they exist
if row[ncol] not in null_values:
if key in ['LEFT_GAZE_X', 'RIGHT_GAZE_X']:
newrow.append(float(row[ncol]) / 1920.0)
elif key in ['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']:
newrow.append(float(row[ncol]) / 1020.0)
else:
newrow.append(row[ncol])
else:
newrow.append(replace_null_values)
newrows.append(newrow)
# scan through files in a directory
diritems = os.listdir(input_folder)
print ("Directory contains " + str(len(diritems)) + " files.")
for filenum, file in zip(range(10), diritems):
#for filenum, file in enumerate(diritems): #diritems
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(ending):
print ("Process file " + str(filenum + 1) + '/' + str(len(diritems)))
file_process(file)
# output file formation
# resolve the output file name
input_filename_parts = file.split(".") #input_file.split(".")
output_file = input_filename_parts[0] + output_file_ending
# open file
with open(os.path.join(output_folder, output_file), "wb") as outputfile:
writer = csv.writer(outputfile, delimiter='\\t')
# form header row
newheaders = []
for key in imkeys:
newheaders.append(maptable[key])
# write header row
writer.writerow(newheaders)
# write datarows
for newrow in newrows:
writer.writerow(newrow)
print (" File processed.")
# else:
#print (" Filename did not match the ending -> did nothing.")
#map for one type of "gazedata" (or txt) headers, values may not apply to all gazedata
##maptable = {"TIMESTAMP":"TETTime",
##
## "RECORDING_SESSION_LABEL":"Subject",
##
## "LEFT_GAZE_X":"XGazePosLeftEye",
##
## "LEFT_GAZE_Y":"YGazePosLeftEye",
##
## "RIGHT_GAZE_X":"XGazePosRightEye",
##
## "RIGHT_GAZE_Y":"YGazePosRightEye",
##
## "TRIAL_INDEX":"TrialId",
##
## "SAMPLE_MESSAGE":"UserDefined_1",
##
## "RIGHT_PUPIL_SIZE":"DiameterPupilRightEye",
##
## "stimulus_right_2":"Stim",
##
## "__target_x__1":"Target"}
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,885
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/standardizeGazedata_0.1.py
|
##Script for reading, modifying and re-writing gazedata
##*this will have some algorithms from
##gazedata2gazedata_sy3.py and the like, but should use classes in my_classes.py
##*use DataFolder class to access all gazefiles in folder
## **write new method for writing new data into folder
## *method will loop through files, i.e., GazeReader objects
## *method will read (gettinf from GazeReader),
## change (by accessing separate textfile),
## and write headers to new file (use writer object?)
## *method will read, change, and write datarow one-by-one into new file,
## same logic as with headers, changing might be more tricky?
## *i.e., GazeReader will never have to give up entire data!
## first things 1st
import routine
from my_classes import DataFolder
# Set folder and data for header conversion map
folder = "C:/Users/infant/Documents/GitHub/py_gazedat"
file = "header map.txt"
# Read old-new conversion map, for headers
hm = routine.get_headers(folder,file)
#vals = list(hm.values()) #list(d.values())
#vals.remove('OBSOLETE')
#print(vals)
## then do some business
# Source folder:
input_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
output_folder = "D:\\lasayr\\Aaltonen\\TREc2_7mo_std"
# Init DataFolder
data_folder = DataFolder(input_folder, map_header = hm)#, limit_files = (0,3))#, limit_rows = 20, limit_files = (1,3))
# Print header map, conversion table
data_folder.print_header_map()
# Change output folder, default is: C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\testing data
#data_folder.set_output_folder(output_folder)
data_folder.rewrite_data(output_folder)
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,886
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/gazedata2gazedata_sy3.py
|
import os
import csv
import routine
#from collections import OrderedDict
from itertools import islice
# This Jussi's script converts eyetracking data in txt-format to gazedata-format
# It also converts X- and Y- coordinates to relative values for screen size.
# Input folder needs to be relative to the script location in the folder tree.
# In this case the folder where this script is located needs to have a folder
# named "files_to_change" where the files are located.
input_folder = folder = "testing 7mo,trec2" #"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
#input_folder = folder = "C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\7mo,trec2"
output_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data"
os.makedirs(output_folder,exist_ok=True)
#if ~os.path.isdir(output_folder):
file_ext = ".gazedata" #input file extension, .txt
output_file_ext = ".gazedata"
input_file_delimiter = "\t"
output_file_delimiter = input_file_delimiter
headers_folder = os.getcwd() #path for headers inputfile
headers_inputfile = "headers_tre_5mo_to_7mo.txt"
#map for one type of "gazedata" (or txt) headers, values may not apply to all gazedata
map_header = routine.get_headers(headers_folder, headers_inputfile)
# find directory by "walking" through the system
for root, dirs, files in os.walk('C:\\Users\\'):
if input_folder in root:
print(root, " ", end=" FOUND! ")
print("")
input_folder = root
print (input_folder)
# list files in a directory
diritems = os.listdir(input_folder)
print ("Directory contains " + str(len(diritems)) + " files.")
#loop through files, limit loop by isslice(items, start, stop)
for filenum, file in islice(enumerate(diritems), 0, 1):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(file_ext):
print ("Process file " + str(filenum + 1) + '/' + str(len(diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = input_folder, file, map_header
newrows, newheaders = routine.file_process(args_pro)
#print (str(newrows))
print("newrows length: " + str(len(newrows)))
# output file formation
# resolve the output file name
input_filename_parts = file.split(".") #input_file.split(".")
output_file = input_filename_parts[0] + output_file_ext
# open file
with open(os.path.join(output_folder, output_file), "wt") as outputfile:
writer = csv.writer(outputfile, delimiter=output_file_delimiter)
# write header row
writer.writerow(newheaders)
# write datarows
for newrow in newrows:
writer.writerow(newrow)
print (" File processed.")
# else:
#print (" Filename did not match the file_ext -> did nothing.")
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,887
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/check_headersOO_0.1.py
|
import os
import csv
from my_classes import DataFolder
from datetime import datetime
from collections import OrderedDict
# script for writing headers from different gazedata to file(s)
# path to data
folder_path = ("D:\\lasayr\\Aaltonen\\mi") #+ folder_name)
# create new DataFolder to be inspected/processed
data_folder = DataFolder(folder_path)# limit_files = (1,None))#(1,100))
# target output to current working directory, cwd
data_folder.set_output_folder(os.getcwd())
#print(os.getcwd() )
##
# list headers from differt files to output "log"
start_time = datetime.now()
data_folder.write_headers_to_file()
print(datetime.now() - start_time) #print time elapsed
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,888
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/routine.py
|
import os
import csv
from collections import OrderedDict
from itertools import islice
input_file_delimiter = "\t"
null_values = [".", ""] # two possible kinds values for missing samples
null_values_new = "-999999" #
x_coord = ['LEFT_GAZE_X', 'RIGHT_GAZE_X']
y_coord = ['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']
screen_x_dim = 1920.0 #screen width, adjust for differnt sites?
screen_y_dim = 1020.0 #screen height, adjust for differnt sites?
##
def miracle(number):
for x in range(0, number):
print(str(number))
#miracle(3)
##
def wonder(args):
for x, arg in enumerate(args):
print(arg)
#print(str(x))
#print(args(x))
#miracle(3)
## [subcode]
def get_headers(dir_path ,input_file):
#table for header pairs
maptable = OrderedDict()# {}#dict([('KEY','value')])
#read dictionary key-value pairs from file,
#1st column is for key and second/last column for values
with open(os.path.join(dir_path, input_file), "rt", ) as inputfile:
reader = csv.reader(inputfile, delimiter = input_file_delimiter)
# grab header information, into maptable
#all_lines = list(reader)
#print (all_lines)
for row in reader:
#a = next(reader)
#maptable[a[0]] = a[len(a)-1]
maptable[row[0]] = row[len(row)-1]
#print(a)
return maptable
##
#routine for processing gazedata, read, manipulate, return
#takes a list of argument including
#input folder
#input file
#maptable, mapping old data headers (in "file") to new data headers
def file_process(t_args):
input_folder = (t_args[0])
input_file = (t_args[1])
maptable = (t_args[2]) #OrderedDict where key is old and value new header
print (" Filename matches with the specified file_ext -> processing..")
newrows = [] #processed data, process in function
# input file reading
with open(os.path.join(input_folder, input_file), "rt") as inputfile:
reader = csv.reader(inputfile, delimiter = input_file_delimiter)
# grab header information, into a list
data_headers = next(reader) #reader.__next__()
# loop file rows and cols,
for r, row in islice(enumerate(reader), 0, 40): #None
newrow = []
for h, header in enumerate(maptable.keys()):#enumerate(header_keys):
ncol = h #od_headers[key]
try: #try to accces data element
foo = row[ncol]
except(IndexError): #if index oob, use element of previuous row
foo = newrows[r-1]
foo = foo[k]
foo = manipulate(foo, header)
newrow.append(foo)
newrows.append(newrow)
return newrows, maptable.values()#list(header_keys)
##
#routine for processing gazedata, read, manipulate, return
def manipulate(data, header):
# manipulate data
# take away the null-values if they exist
foo = data
if foo not in null_values: #if row[ncol] not in null_values:
if header in x_coord: #['LEFT_GAZE_X', 'RIGHT_GAZE_X']:
foo = float(foo) / screen_x_dim#newrow.append(float(foo) / screen_x_dim) #newrow.append(float(row[ncol]) / 1920.0)
elif header in y_coord: #['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']:
foo = float(foo) / screen_y_dim #newrow.append(float(foo) / screen_y_dim) #newrow.append(float(row[ncol]) / 1020.0)
else:
foo = foo # newrow.append(foo) #newrow.append(row[ncol])
return foo
else:
return null_values_new
##
def string_or_number(s):
try:
z = int(s)
return z
except ValueError:
try:
z = float(s)
return z
except ValueError:
return s
##
from datetime import datetime
from datetime import timedelta
start_time = datetime.now()
#returns the elapsed milliseconds since the start of the program
def millis(start_time):
dt = datetime.now() - start_time
ms = (dt.days * 24 *60 *60 + dt.seconds) * 1000 +dt.microseconds / 1000
return ms
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,889
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/check_data_multi.py
|
import os
import csv
from my_classes import DataFolder
from datetime import datetime, date
from collections import OrderedDict
# script for writing headers and data stats from different gazedata to file(s)
##
#parameter setting
#set output file
output_file_name = ("multi_folder_data_" + str(date.today()) + ".txt")
output_folder = os.getcwd()
#limits data analysis for quick peek...
limit_last_row = None
limit_last_file = None
# percentiles parameter for numerical stats
percentiles = (0.1,99.9)
# paths to data eg., 7mo,trec2
folders = OrderedDict()
folders['tre5mo_old'] = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\5mo,trec2"
folders['tre5mo_new'] = "D:\\lasayr\\Aaltonen\\5mo"
folders['tre7mo_old'] = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
folders['tre24mo_old'] = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\24mo, trec2"
folders['tre24mo_new'] = "D:\\lasayr\\Aaltonen\\24mo"
folders['ct_6mo'] = "D:\\lasayr\\Aaltonen\\ct\\6mo"
folders['ct_18mo'] = "D:\\lasayr\\Aaltonen\\ct\\18mo"
folders['hki'] = "D:\\lasayr\\Aaltonen\\hki"
folders['mi'] = "D:\\lasayr\\Aaltonen\\mi"
#testing with only some folders
folders = OrderedDict()
folders['tre24mo_old'] = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\24mo, trec2"
folders['tre24mo_new'] = "D:\\lasayr\\Aaltonen\\24mo"
folders['ct_6mo'] = "D:\\lasayr\\Aaltonen\\ct\\6mo"
folders['ct_18mo'] = "D:\\lasayr\\Aaltonen\\ct\\18mo"
##
#write "logfile" output to list statistics from differt files
with open(os.path.join(output_folder, output_file_name),
"wt", newline = "\n") as outputfile:
#construct csv.writer based on outputfile
writer = csv.writer(outputfile, delimiter="\t")
#loop through folders containing gazedata files
for folder in folders:
# use timer to time
start_time = datetime.now()
#print folder at hand for tracking process
print("\n" + "process folder: " + folders[folder])
# create new DataFolder to be inspected/processed,
data_folder = DataFolder(folders[folder],
limit_rows = limit_last_row,
limit_files = (0,limit_last_file))
data_folder.set_output_folder(output_folder)
#write stats directly with DataFolder class (to seprate files)
data_folder.write_stats_to_file(percentiles)
#"logfile", based on data headers
writer.writerow( [folder]) #print time elapsed )
writer.writerow( [str(datetime.now() - start_time)]) #print time elapsed )
headers = data_folder.get_headers()
output = []
for header in headers:
writer.writerow([header] + [str(data_folder.get_stats(header))])
writer.writerow([])
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,890
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/my_classes.py
|
import os
import glob
import csv
import time
from collections import OrderedDict
from collections import namedtuple
import routine
from itertools import islice
import numpy as np
from datetime import date
import random
EMPTY_STRING = " "
HEADERFILENAME = 'headers.txt'
OBSOLETE_HEADER = 'OBSOLETE'
NULL_VALUES = [".", "", "1.#INF", "-1.#INF", "1.#IND", "-1.#IND",
"-1.#QNAN" , "1.#QNAN", "-"] # possible kinds values for missing samples
NULL_VALUES_NEW = "-999999" #
INPUT_DELIMITER_DEFAULT = '\t'
X_COORD_HEADERS = ['LEFT_GAZE_X', 'RIGHT_GAZE_X']
Y_COORD_HEADERS = ['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']
SUBJECT_HEADER = 'Filename'#'Subject'
STIM_HEADER = 'Stim'
TIME_HEADER = 'TETTime'
LATERAL_STIM_HEADER = 'LateralStimPos'
SCREEN_X_DIM = 1920.0 #screen width, adjust for differnt sites?
SCREEN_Y_DIM = 1020.0 #screen height, adjust for differnt sites?
class MyClass:
"""A simple example class"""
#i = 12345
def __init__(self, arku = "qwerty"):
self.i = 12345
self.word = arku
def f(self):
if not self.word:
return 'hello world'
else:
return self.word
def get_i(self):
return self.i
class GazeReader:
"""A class for Reading and processing gazedata"""
def __init__(self, t_args, limit = None, percentiles = (1,99), anonymize = False ):
self.input_folder = (t_args[0])
self.input_file = (t_args[1])
self.maptable = (t_args[2]) #OrderedDict where key is old and value new header
#self.mapCurrent = (t_args[3]) #OrderedDict where key is old and value new header
# self.newrows = [] #processed data, process in function
self._limit_row = limit# (t_args[3]) # limit for rows to process, None if no limit
self._input_file_delimiter = INPUT_DELIMITER_DEFAULT
self.output_file = self.input_file
# extension of the file extracted
self.file_ext = os.path.splitext(self.input_file)[-1]
# anonymize if needed
if anonymize:
self._anon, self._timestamp, = self._anonymize_fun() #finds 1st timevalue
else:
self._anon = False
print('Read data to GazeReader')
datrows, datheaders = self._read_data() #reads in data
self.data_rows = datrows
self.data_headers = datheaders
self.data_od = {}#OrderedDict()
# percentile points for extracting nearly min-max range of gazedata values
print(percentiles[0])
self.lo_perc = percentiles[0]
self.hi_perc = percentiles[1]
self.r_ind = -1
def _anonymize_fun(self):
# do anonymization-related procedures
anonymizationBool = True
timevalue = self._find_timestart()
file_anon = 'Anon-' + time.strftime("%H%M%S", time.localtime()) + '%d' % (time.time() % 1 * 1000)
self.set_filename_out(file_anon)
return anonymizationBool, timevalue
def _read_data(self):
# input file reading
# this should be private?
# this doesn't return data if no maptable provided...
# this returns headers anyway
# First row of data values is by default 1,
# that is, first col after headers. If no headers must
# be set 0.
#first_datarow = 1
with open(os.path.join(self.input_folder, self.input_file), "rt") as inputfile:
reader = csv.reader(inputfile, delimiter = self._input_file_delimiter)
# Grab header information, into a list
data_headers = next(reader) #reader.__next__()
# Check if headers are numerical (or strings that can
# be converted to numbers).
headers_numform=[]
truly_strings=[]
for header in data_headers:
# Convert header to num if possible.
headers_numform.append(routine.string_or_number(header))
# Check if header remained a string.
truly_strings.append(isinstance(headers_numform[-1], str)) #[-1] is the last element...!
# If no good headers (i.e., strings) make headers empty.
if not all(truly_strings):
data_headers = self._read_headers()
# "Restart" the reader, so that no data will be missed afterwards.
inputfile.seek(0)
# Data rows storage for this function
newrows = []
if not self.maptable:
newheaders = data_headers
return newrows, data_headers
hdrKeys = self.maptable.getKeys()
#newheaders = [] #['' for i in range(len(hdrKeys))]
newheaders = [" " for i in range(self.maptable.get_n_of_uniqueCols())]
#make new headers
for header in enumerate(hdrKeys):#enumerate(header_keys):
if not self.maptable.get_header_newName(header[1]) == OBSOLETE_HEADER:
newCol = self.maptable.get_header_colNum(header[1])-1
newheaders.pop(newCol)
newheaders.insert(newCol,self.maptable.get_header_newName(header[1]))
# Loop file rows and cols,
for r, row in islice(enumerate(reader), 0, self._limit_row): #None
# initialize newrow as list with elements upto deisred cols
newrow = [EMPTY_STRING for i in range(len(newheaders))]#[" " for i in range(self.maptable.get_n_of_uniqueCols())]
#Loop Headers
for header in enumerate(hdrKeys):#enumerate(header_keys):
# Find column number of Standard Header
# from current input file's headers
try:
col = data_headers.index(header[1])
#print("header: "+header[1])
#print("col: "+ str(col))
except(ValueError):
col = -1
#print("header: "+header[1])
#print("header not found in current input")
if col < len(row) and col > 0:
dataCell = row[col]
else:
dataCell = EMPTY_STRING
a = self._anon
dataCell = self._manipulate(dataCell, self.maptable.get_header_newName(header[1]), a)
if not self.maptable.get_header_newName(header[1]) == OBSOLETE_HEADER:
newCol = self.maptable.get_header_colNum(header[1])-1
#print(header[1])
#print('newCOl: ' + str(newCol))
#newrow.append(dataCell)
if newrow[newCol] == EMPTY_STRING:
newrow.pop(newCol)
newrow.insert(newCol, dataCell)
newrows.append(newrow)
return newrows, newheaders#self.maptable.values()#list(header_keys)
##
def get_data_stats(self, header_key):
# returns ~min, ~max of number (actually lo/hi percentiles)
# or unique of strings of variable
# uses OrderedDict self.data_od as main data structure
# header_key is used to index specific variable
if not header_key:
return []
# initialize data if not alreadey
if not self.data_od:
self._odictionarize_data()
# list of statistics from the current file/variable
stats_file = self.data_od[header_key]
if not stats_file:
print("no data for: " + header_key)
return []
# check if variable includes string values
stats_include_str = []
for stat in stats_file:
if isinstance(stat, str): stats_include_str.append(1)
else: stats_include_str.append(0)
stats_include_str = any(stats_include_str)
# extract min, max of numerical values
if not stats_include_str:
min_value = np.percentile(self.data_od[header_key] , self.lo_perc) #min(self.data_od[header_key])
max_value = np.percentile(self.data_od[header_key] , self.hi_perc)
return_value = [min_value, max_value]
# extract unique strings (no duplicates)
else:
return_value = (list(set(self.data_od[header_key])))
return return_value
##
def _find_timestart(self):
# input file reading
# this is "private" function for GazeReader
# it returns the timestamp of first ET frame
# (Can be used for anonymization, when removing unique timestamp...)
# Only some first data rows are needed for this function
# So set row limit to some small number (2 enuff?)
limit_original = self._limit_row
self._limit_row = 10
# Run data into Ordered Dictionary
# initialize variable to zero
#self._timestamp = 0
self._odictionarize_data(anonymize = False)
# Find 1st timevalue
timeval = self.data_od[TIME_HEADER][0]
#Return things as default
self.data_od = {}#OrderedDict()
self._limit_row = limit_original
# return
return timeval
##
def _odictionarize_data(self, anonymize = False):
# input file reading
# this is "private" function for GazeReader
# it reads fat into OrderedDict
with open(os.path.join(self.input_folder, self.input_file), "rt") as inputfile:
reader = csv.reader(inputfile, delimiter = self._input_file_delimiter)
# grab header information, into a list
headers = next(reader)
# return if no good headers
if not isinstance(headers[0], str):
for i, el in enumerate(headers):
headers[el] = "Header_" + str(i)
return "No string headers"
#initialize od with headers as keys
self.data_od = OrderedDict.fromkeys(headers)
# loop file rows and cols,
for r, row in islice(enumerate(reader), 0, self._limit_row):
#newrow = []
# loop cols
for h, header in enumerate(self.data_od.keys()):
try: # try to accces data element
foo = row[h]
except(IndexError): #if index oob, use element of previuous row
#print("bad row: " + str(r) + " for: " + header)
foo = []
# process data value
foo = self._manipulate(foo, header, anonymize)
# convert to number if possible
foo = routine.string_or_number(foo)
# initialize variable or append new value
if not self.data_od[header]:
self.data_od[header] = [foo]
else:
self.data_od[header].append(foo)
##
def set_row_limit(self, number):
# set limit for how may rows will be read from input file
self._limit_row = number
def set_percentiles(self, lo_percentile, hi_percentile):
# set percentiles for very low and high data values
self.lo_perc = lo_percentile
self.hi_perc = hi_percentile
def _read_headers(self):
# read headers from current folder
print("Writing headers from a separate file for " + self.input_file + ".")
with open(os.path.join(self.input_folder, HEADERFILENAME), "rt") as inputfile:
reader = csv.reader(inputfile, delimiter = self._input_file_delimiter)
headers = next(reader)
return headers
def _manipulate(self, dataIn, header, anonymize = False):
# manipulate data
# more manipulations could be included...
if not dataIn: return NULL_VALUES_NEW
else: dataOut = dataIn
#truth = isinstance("1", int)
#routine.string_or_number(123)
# take away the null-values if they exist
if dataOut not in NULL_VALUES:
if header == SUBJECT_HEADER: #eg. "Subject"
if dataIn == EMPTY_STRING:
dataOut = self.input_file
if anonymize: # set filename to anonynous
dataOut = self.output_file
elif header == STIM_HEADER: #eg. "Stim"
dataIn = routine.string_or_number(dataIn)
if isinstance(dataIn, str):
dataOut = dataIn
else:
dataIn = int(dataIn)
if 1 <= dataIn <= 2:
dataOut = "fearful.bmp"
elif 3 <= dataIn <= 4:
dataOut = "control.bmp"
elif 5 <= dataIn <= 6:
dataOut = "happy.bmp"
elif 7 <= dataIn <= 8:
dataOut = "neutral.bmp"
elif header == LATERAL_STIM_HEADER: #eg. "LateralStimPos"
#print(dataIn)
if routine.string_or_number(dataIn) == 1:
dataOut = "left"
elif routine.string_or_number(dataIn) == 2:
dataOut = "right"
elif header == TIME_HEADER: #eg. "LateralStimPos"
#print(dataIn)
#try: #Possible error caused by non-existing var: self._anon
if anonymize:
dataOut = str(float(dataIn) - self._timestamp)
#except AttributeError:
# pass
#print('variable \"self._anon\" not found')
#print("dataOut = right")
# Currently no need for scaling gaze coordinates...
#elif header in X_COORD_HEADERS: #['LEFT_GAZE_X', 'RIGHT_GAZE_X']:
# dataOut = float(dataOut) / SCREEN_X_DIM#newrow.append(float(dataOut) / SCREEN_X_DIM) #newrow.append(float(row[ncol]) / 1920.0)
#elif header in Y_COORD_HEADERS: #['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']:
# dataOut = float(dataOut) / SCREEN_Y_DIM #newrow.append(float(dataOut) / SCREEN_Y_DIM) #newrow.append(float(row[ncol]) / 1020.0)
else:
dataOut = dataOut
return dataOut
else:
#print(header)
return NULL_VALUES_NEW
def get_filename(self, no_ext = True, in_out = 'out'):
# returns the file being read/processed
# in_out defines whether input or output filename is returned
if in_out == 'out':
fn = self.output_file
elif in_out == 'in':
fn = self.input_file
else:
print("Only in/out are acceptable parameter values!")
if no_ext:
#return filename without extension
#find extendsion start
i_ext = fn.find('.')
return fn[0:i_ext]
else:
return fn
def get_row_count(self):
# returns the number of rows read and stored
return (len(self.data_rows))
def get_headers(self):
# returns list of headers
# if no maptable for headers available, return "plain" headers
if not self.maptable:
return self.data_headers
# if conversion map for new headers available, return new headers
else:
if isinstance(self.maptable, HeaderReader):
newheaders = self.data_headers# list(self.maptable.values()) #list(d.values())
# remove obsolete headers
if OBSOLETE_HEADER in newheaders:
newheaders.remove(OBSOLETE_HEADER)
return newheaders
def get_new_row(self):
# returns a new data row at each call
self.r_ind += 1
if self.r_ind < self.get_row_count():
return self.data_rows[self.r_ind]
else:
return False
def set_filename_out(self, filename_new):
# sets a new name fot the file being read/processed
fn = str(filename_new)
if fn.endswith(self.file_ext):
self.output_file = fn
else:
self.output_file = fn + self.file_ext
def restart(self):
# resets the counter for new data rows, starts over again
self.r_ind = -1
##
class DataFolder:
"""A class for accessing gazedata in a specific folder """
"""We have many folders with vairiable gazedata. The headers, """
"""datavalue scales, tags, and structure may all be variable. """
"""With DataFolder, it is possible to output these things for comparison"""
##
def __init__(self,
path,
limit_rows = None,
limit_files = (0, None),
file_ext = ".gazedata",
input_file_delimiter = '\t',
map_header = None,
date_limit = "1 Jan 00",
date_limit_type = "c", #c=created, m=modified
#map_header_current = None,
output_folder = "C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data",
): #t_args,
self.dirpath = path # input folder path
self.limit_rows = limit_rows # limit data rows per file processed
self.limit_files = limit_files # limit n files
self.file_ext = file_ext # input file type
self.file_delimiter = input_file_delimiter # delimiter e.g., tab "\t"
# first header map is for the exemplary data
self.map_header = map_header # map for old and new headers,
# second header map is for current data folder idiosyncracies
#self.map_header_current = map_header_current # map for old and new headers
self.date_limit = time.strptime(date_limit, "%d %b %y")
self.output_folder = output_folder # output folder
self.headers_folder = os.getcwd() # folder of header transform map
self.folder_level_data = OrderedDict() # data from all files in folder
self.out_stats = OrderedDict() #extracted descriptive stat
# for investigating file headers
# if no header map is provided, the aim is to
# read headers "bottom-up" and store them to outputfile
if not self.map_header:
folder_path = os.path.split(self.dirpath)
folder_tail = folder_path[1]
self.output_file = ( "headers in " + folder_tail + "_" +
str(date.today()) + ".txt")
else:
self.output_file = ( "output_" + str(date.today()) + ".txt")
# get list of files
#self.diritems = os.listdir(path)
self.diritems = [fileName for fileName in os.listdir(path) if fileName.endswith(file_ext)]
self.diritems = self.diritems[self.limit_files[0]:self.limit_files[1]]
self.diritems = self.timethreshold_items(self.diritems,date_limit_type)
print("-------------------------")
print("Files selected: " + str(self.diritems))
print("-------------------------")
#self.diritems = glob.glob(os.path.join(path,"*" ,file_ext)) //not work!
#print ("Directory contains " + str(len(self.diritems)) + " files.")
##
def set_output_folder(self, folder):
# chahnge output folder
print ("Output to: " + folder)
if not os.path.isdir(folder):
os.mkdir(folder)
self.output_folder = folder
##
def write_headers_to_file(self):
# function for storing into outputfile headers used in files in the folder
with open(os.path.join(self.output_folder, self.output_file),
"wt", newline = "\n") as outputfile:
writer = csv.writer(outputfile, delimiter=self.file_delimiter)
for filenum, file in islice(enumerate(self.diritems), self.limit_files[0], self.limit_files[1]):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(self.file_ext):
#print(os.path.join(self.output_folder, self.output_file))
print ("Process file " + str(filenum + 1) + '/' + str(len(self.diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = self.dirpath, file, self.map_header
# make new GazeReader object for reading and processing input file
f_processor = GazeReader(args_pro, self.limit_rows) #40 is optional limit for rows
#f_processor.set_row_limit(40) # limit rows, good for debugging
row_list_to_write = f_processor.get_headers()
row_list_to_write.insert(0, file)
writer.writerow( row_list_to_write )
##
def write_stats_to_file(self, percentiles):
# function for summarizing variable scales, with min,max or string tags
# make specific output file with this function
_output_file = "daata stats and " + self.output_file
# collect statistics from all files in folder
for filenum, file in islice(enumerate(self.diritems), self.limit_files[0], self.limit_files[1]):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(self.diritems)))
if file.endswith(self.file_ext):
#print(os.path.join(self.output_folder, self.output_file))
print ("Process file " + str(filenum + 1) + '/' + str(len(self.diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = self.dirpath, file, self.map_header
# make new GazeReader object for reading and processing input file
f_processor = GazeReader(args_pro, self.limit_rows, percentiles) #40 is optional limit for rows
for header in f_processor.get_headers():
#print("header: " + header)
stats = f_processor.get_data_stats(header)
if header not in self.folder_level_data.keys():
self.folder_level_data[header] = stats
else:
for el in stats:
self.folder_level_data[header].append(el)
#if isinstance(el, str):
#print(header + " has strings")
#!!assign list instead!!!1
#reduce statistical data for outputting
#self.out_stats already defined at __init__()
##
# loop through variables/headers
for header in self.folder_level_data.keys():
stats_folder = self.folder_level_data[header]
if not stats_folder: continue
# check if variable includes string values
stats_include_str = []
for stat in stats_folder:
if isinstance(stat, str): stats_include_str.append(1)
else: stats_include_str.append(0)
# extract min, max of numerical values
if not any(stats_include_str):
min_value = min(self.folder_level_data[header])
max_value = max(self.folder_level_data[header])
self.out_stats[header] = min_value, max_value
# extract unique strings
else:
if all(stats_include_str):
self.out_stats[header] = sorted(list(set(self.folder_level_data[header])))
else:
self.out_stats[header] = list(set(self.folder_level_data[header]))
#print(header + " has strings")
# do the writing
with open(os.path.join(self.output_folder, _output_file),
"wt", newline = "\n") as outputfile:
writer = csv.writer(outputfile, delimiter=self.file_delimiter)
writer.writerow( self.out_stats.keys() )
writer.writerow( self.out_stats.values() )
##
def rewrite_data(self, outputfolderIn = None, anonymize = False):
#function for rewriting data with new format
if outputfolderIn:
self.set_output_folder(outputfolderIn)
if anonymize:
random.shuffle(self.diritems)
# access data from all files in folder
for filenum, file in islice(enumerate(self.diritems), self.limit_files[0], self.limit_files[1]):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(self.file_ext):
#print(os.path.join(self.output_folder, self.output_file))
print ("\nProcess file " + str(filenum + 1) + '/' + str(len(self.diritems)))
print(file + '\n')
print("Write new file to: " + self.output_folder)
#read in data, process, and strore in newrows
args_pro = self.dirpath, file, self.map_header#,
#self.map_header_current #None#self.map_header
# make new GazeReader object for reading and processing input file
f_processor = GazeReader(args_pro, self.limit_rows, anonymize = anonymize)#, percentiles = percentiles) #40 is optional limit for rows
# make name for new gazedata file
_output_file = (f_processor.get_filename(no_ext=True) + "_std.gazedata")
print(_output_file)
# output/gazedatafile opening
with open(os.path.join(self.output_folder, _output_file),
"wt", newline = "\n") as outputfile:
writer = csv.writer(outputfile, delimiter=self.file_delimiter)
#write headers to new file
headers = f_processor.get_headers()
#NEW HEADERS ARE ALREADY IN USE FOR GazeReader, if initialized with a header map!!!
writer.writerow( headers )
#write data rows to new file
found_new_row = f_processor.get_new_row()
while found_new_row:
#print(found_new_row)
writer.writerow( found_new_row )
found_new_row = f_processor.get_new_row()
##
def get_headers(self):
# returns list of headers
#if stats calculated do this
if len(self.out_stats.keys()) > 0:
return self.out_stats.keys()
else:
#read in data from first file to get it's headers
args_pro = self.dirpath, self.diritems[1], self.map_header
# make new GazeReader object for reading and processing input file
limit_rows = 1
f_processor = GazeReader(args_pro, limit_rows)
return f_processor.get_headers()
def get_stats(self, header):
#return stats of specific variable
return self.out_stats[header]
def timethreshold_items(self, items, type):
#return stats of specific variable
items_v2 = []
timeThreshold = time.mktime(self.date_limit)
print("Number of files in folder: " + str(len(items)))
for itemNum, item in (enumerate(items)):
if type == "m":
itemModified = os.path.getmtime(self.dirpath + '\\' + item)
if type == "c":
itemModified = os.path.getctime(self.dirpath + '\\' + item)
if itemModified < timeThreshold:
print(item + " is too old")
else:
filedate = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(itemModified))
items_v2.append(item)
print(item + " has good date: " + filedate)
print("Number of files selected for processing: " + str(len(items_v2)))
return items_v2
def get_filelist(self):
#return stats of specific variable
return self.diritems
def print_header_map(self):
print("List header map, folder: "+ self.dirpath +"; old: new")
#try if the self.map_headers is ordered dictionary
try:
for k in self.map_header.keys():
print (k + ": " + self.map_header[k])
except(AttributeError): #if it is HeaderReader
for k in self.map_header.getKeys():
#print(k)
print (k + ": " + self.map_header.get_header_newName(k) +
", col: " + str(self.map_header.get_header_colNum(k)))
class HeaderReader:
"""A class for Reading and processing Headers"""
def __init__(self, path, file ):
self.file = file #
self.path = path #
self._input_file_delimiter = INPUT_DELIMITER_DEFAULT
self.od = self._read_headers()
def _read_headers(self):
# returns list of headers
od = OrderedDict()
headerInfo = namedtuple('headerInfo', 'newName colNum')
with open(os.path.join(self.path, self.file), "rt", ) as inputfile:
reader = csv.reader(inputfile, delimiter = self._input_file_delimiter)
# grab header information, into
for row in reader:
#print(row)
#maptable[row[0]] = row[len(row)-1]
#od['TitanicThing'] = Strong('FamousName', 88)
od[row[0]] = headerInfo(row[1], row[2])
return od
def getKeys(self):
return self.od.keys()
def get_n_of_uniqueCols(self):
nCols = 0
previousColNum = -1
for key in self.od.keys():
thisColNum = self.get_header_colNum(key)
if thisColNum != previousColNum and thisColNum > 0:
nCols += 1
#print(key)
#print(thisColNum)
previousColNum = thisColNum
return nCols
def get_header_colNum(self, header):
return int(self.od[header].colNum)
def get_header_newName(self, header):
#print(header)
return self.od[header].newName
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,891
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/py_test_2.py
|
from datetime import datetime, date
import os
import time
import csv
import sys
import calendar
#import math
from collections import OrderedDict
from collections import namedtuple
import routine # a script with functions
import numpy as np
import my_classes
#from my_classes import GazeReader
#from my_classes import HeaderReader
##from itertools import islice
import random
random.shuffle(files)
files
print('heillo')
print('acsascscaheillo')
##
##input_folder = "D:\\lasayr\\Aaltonen\\\ct\\6mo"
###"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
##
##diritems = [fileName for fileName in os.listdir(input_folder) if fileName.endswith('.gazedata')]
##
###write date
##tt = time.strptime("19 Oct 17", "%d %b %y")
###convert date to epoch time, seconds
##seconds = time.mktime(tt)
##
##print(diritems[len(diritems)-1])
##print(os.path.getctime(input_folder + '\\' + diritems[1]))
##print(time.time())
##print(tt)
##print(seconds)
#a = os.path.split(input_folder)
#print(os.path.split(input_folder)[1])
##truth = isinstance("1", int)
##routine.string_or_number(123)
##print("truth:" + str(truth))
##
##
##
##input_folder = "D:\\lasayr\\Aaltonen\\24mo"
##print (input_folder + "\\testing")
##
##a=3
##if 1 <= a <= 2:
## print(str(a))
##else:
## print("a not in range")
##
##with open(os.path.join('header map 3D.txt'), "rt") as inputfile:
## reader = csv.reader(inputfile, delimiter = '\t')
## for r, row in islice(enumerate(reader), 0, 20):
## print(str(r) + ": " + str(row))
## inputfile.seek(0)
##folder = "C:/Users/infant/Documents/GitHub/py_gazedat"
##file = "header map 3D.txt"
##hr = HeaderReader(folder, file)
##
##print(hr.get_header_colNum('TETTime'))
##print(hr.get_header_newName('r_cam_y'))
##
##hKeys = hr.getKeys()
##
####for header in enumerate(hKeys):
#### #print(header)
#### print(hr.get_header_newName(header[1]))
##
##headersList = ['' for i in range(4)]#['cl1', 'cl2', 'cl3']
###ind = headersList.index('cl4')
##
##newheaders = []
##
##for header in enumerate(hKeys):#enumerate(header_keys):
## if not hr.get_header_newName(header[1]) == my_classes.OBSOLETE_HEADER:
## newheaders.append(hr.get_header_newName(header[1]))
##
##
##
##print('Rubject' in newheaders)
##print(type(hr))
##val = isinstance(hr, HeaderReader)
##print(val)
##
##n = hr.get_n_of_uniqueCols()
##print(n)
##
##l= ["x" for i in range(9)]
##target = 3
##print(l)
##l.pop(target)
##l.insert(target, "X")
##print(l)
od = OrderedDict([('sape', 4139), ('guido', 4127), ('jack', 4098)])
od = OrderedDict.fromkeys(headers)
header = '6'
#od = OrderedDict.fromkeys(headers)
l = tuple('name')
od['Thing'] = l
print('l:' + str(l))
print('od:' + str(od))
print('od(Thing):' + str(od['Thing']))
print('num in od(Thing):' + str(od['Thing'][1]))
#od['Thing'].append(13)
#od['guido'].append(31)
#od['sape'].append(13)
#od['guido'].append(31)
#di = dict([('sape', 4139), ('guido', 4127), ('jack', 4098)])
#
#folder = os.getcwd() + '\\'
#with open((folder + 'foo.gazedata'),'w') as data: data.write(str(od))
#with open((folder + 'food.gazedata'),'w') as data: data.write(str(di))
##
##Strong = namedtuple('Strong', 'name num')
##
##strong1 = Strong('name', 123)
##od['StrongThing'] = strong1
##od['TitanicThing'] = Strong('FamousName', 88)
##
##print(strong1.name)
##print(od['StrongThing'].num)
##print(od['TitanicThing'].num)
##
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,892
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/standardizeGazedataAnon.py
|
##Script for reading, modifying and re-writing gazedata
#%reset -f
import os
#import routine
from my_classes import DataFolder
from my_classes import HeaderReader
#read header conversion map
folder = "C:/Users/lasayr/Documents/GitHub/py_gazedat"
file = "header map 3D_std.txt"
hr = HeaderReader(folder, file)
# Source folder:
input_folder = "C:\\Users\\lasayr\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\7mo,trec2"
#input_folder = "D:\\lasayr\\Aaltonen\\\ct\\ct_18mo"
#input_folder = "D:\\lasayr\\Aaltonen\\mi"
#input_folder = "D:\\lasayr\\Aaltonen\\24mo,trec2"
inFolderUnique = os.path.split(input_folder)[1]
output_folder = "C:\\Users\\lasayr\\Documents\\D\\Aaltonen\\" + inFolderUnique + "_std\\anon"
folder_an = 'C:\\Users\\lasayr\Documents\\D\\Aaltonen\\7mo,trec2_std\\anon'
input_folder = folder_an
output_folder = folder_an + '\\moreAnon'
# Init DataFolder
dl = "01 Jan 00"
data_folder = DataFolder(input_folder, map_header = hr, date_limit = dl ,limit_files = (0,2), limit_rows = None)#, fileModelCur)#, limit_files = (0,3))#, limit_rows = 20, limit_files = (1,3))
#get files
files = data_folder.get_filelist()
# Print header map, conversion table
data_folder.print_header_map()
headers = data_folder.get_headers()
#data_folder.write_stats_to_file(percentiles = (1,99))
print("\nFiles selected: " + str(data_folder.get_filelist()))
# Change output folder, default is: C:\Users\Public\Documents\Tampereen yliopisto\Eye tracker\TRE Cohort 2\gazeAnalysisLib analyses\testing data
#data_folder.set_output_folder(output_folder)
data_folder.rewrite_data(output_folder, anonymize = True)
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,893
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/check_headers_2.py
|
import os
import csv
#import routine
#from collections import OrderedDict
from itertools import islice
from my_classes import GazeReader
default_input_folder = "D:\lasayr\Aaltonen\mi" #"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
input_folder = "mi" #'testing 7mo,trec2' #"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing 7mo,trec2"
root_folder = "D\\" #"C:\\Users\\"
n_files = None # set limit for files to be processed, None if no limit desired
output_folder = os.getcwd()#"C:\\Users\\Public\\Documents\\Tampereen yliopisto\\Eye tracker\\TRE Cohort 2\\gazeAnalysisLib analyses\\testing data"
output_file = ( "headers in " + input_folder + ".txt")
file_ext = ".gazedata" #input file extension, .txt
input_file_delimiter = "\t"
output_file_delimiter = input_file_delimiter
headers_folder = os.getcwd() #path for headers inputfile
headers_inputfile = "headers_tre_5mo_to_7mo.txt"
# no headers are specified, instead find them in files
map_header = None
# find directory by "walking" through the system
if os.path.isdir(default_input_folder):
start_folder = default_input_folder
else:
start_folder = root_folder
for root, dirs, files in os.walk(start_folder):
if input_folder in root:
print(root, " ", end=" FOUND! ")
print("")
input_folder = root
else:
print(root)
print (input_folder)
# list files in a directory,
diritems = os.listdir(input_folder)
print ("Directory contains " + str(len(diritems)) + " files.")
# open output file
with open(os.path.join(output_folder, output_file), "wt") as outputfile:
writer = csv.writer(outputfile, delimiter=output_file_delimiter)
#headers_in_files = []
#loop through files, limit loop by isslice(items, start, stop), can be None
for filenum, file in islice(enumerate(diritems), 0, n_files):
#print ("Checking file " + str(filenum + 1) + '/' + str(len(diritems)))
if file.endswith(file_ext):
print ("Process file " + str(filenum + 1) + '/' + str(len(diritems)))
print(file)
#read in data, process, and strore in newrows
args_pro = input_folder, file, map_header
# make new GazeReader object for reading and processing input file
f_processor = GazeReader(args_pro, 40) #40 is optional limit for rows
#f_processor.set_row_limit(40) # limit rows, good for debugging
row_list_to_write = f_processor.get_headers()
row_list_to_write.insert(0, file)
writer.writerow( row_list_to_write )
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,894
|
yrttiahoS/py_gazedat
|
refs/heads/master
|
/csv2gazedata-2b-6.py
|
import os
import csv
# Input folder needs to be relative to the script location in the folder tree.
# In this case the folder where this script is located needs to have a folder
# named "files_to_change" where the files are located.
input_folder = "files_to_change"
ending = ".txt"
output_file_ending = ".gazedata"
input_file_delimiter = "\t"
null_values = [".", ""]
replace_null_values = "-999999"
maptable = {"TIMESTAMP":"TETTime",
"RECORDING_SESSION_LABEL":"Subject",
"LEFT_GAZE_X":"XGazePosLeftEye",
"LEFT_GAZE_Y":"YGazePosLeftEye",
"RIGHT_GAZE_X":"XGazePosRightEye",
"RIGHT_GAZE_Y":"YGazePosRightEye",
"TRIAL_INDEX":"TrialId",
"SAMPLE_MESSAGE":"UserDefined_1",
"RIGHT_PUPIL_SIZE":"DiameterPupilRightEye",
"stimulus_right_2":"Stim",
"__target_x__1":"Target"
# scan through files in a directory
diritems = os.listdir(input_folder)
print "Directory contains " + str(len(diritems)) + " files."
for filenum, file in enumerate(diritems):
print "Checking file " + str(filenum + 1) + '/' + str(len(diritems))
if file.endswith(ending):
print " Filename matches with the specified ending -> processing.."
#self.liststore_exp.append([file])
input_file = file
# input file reading
newrows = []
with open(os.path.join(input_folder, input_file), "rb") as inputfile:
reader = csv.reader(inputfile, delimiter='\t')
# grab header information, into a list
headers = reader.next()
# calculate list index numbers for map-keys
indexed_maptable = {}
for key in maptable:
indexed_maptable[key] = headers.index(key)
# loop file rows
imkeys = indexed_maptable.keys()
for row in reader:
newrow = []
for key in imkeys:
ncol = indexed_maptable[key]
# take away the null-values if they exist
if row[ncol] not in null_values:
if key in ['LEFT_GAZE_X', 'RIGHT_GAZE_X']:
newrow.append(float(row[ncol]) / 1920.0)
elif key in ['LEFT_GAZE_Y', 'RIGHT_GAZE_Y']:
newrow.append(float(row[ncol]) / 1020.0)
else:
newrow.append(row[ncol])
else:
newrow.append(replace_null_values)
newrows.append(newrow)
# output file formation
# resolve the output file name
input_filename_parts = input_file.split(".")
output_file = input_filename_parts[0] + output_file_ending
# open file
with open(os.path.join(input_folder, output_file), "wb") as outputfile:
writer = csv.writer(outputfile, delimiter='\t')
# form header row
newheaders = []
for key in imkeys:
newheaders.append(maptable[key])
# write header row
writer.writerow(newheaders)
# write datarows
for newrow in newrows:
writer.writerow(newrow)
print " File processed."
else:
print " Filename did not match the ending -> did nothing."
|
{"/py_test.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedata_0.2.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy2 - copy.py": ["/routine.py"], "/check_dataOO_0.1.py": ["/my_classes.py"], "/gazedata2gazedata_sy.py": ["/routine.py"], "/standardizeGazedata_0.1.py": ["/routine.py", "/my_classes.py"], "/gazedata2gazedata_sy3.py": ["/routine.py"], "/check_headersOO_0.1.py": ["/my_classes.py"], "/check_data_multi.py": ["/my_classes.py"], "/my_classes.py": ["/routine.py"], "/py_test_2.py": ["/routine.py", "/my_classes.py"], "/standardizeGazedataAnon.py": ["/my_classes.py"], "/check_headers_2.py": ["/my_classes.py"]}
|
6,895
|
meantheory/pyinfra-certbot
|
refs/heads/master
|
/pyinfra_certbot/__init__.py
|
from .certbot import provision, certonly, delete
|
{"/pyinfra_certbot/__init__.py": ["/pyinfra_certbot/certbot.py"], "/example/deploy.py": ["/pyinfra_certbot/__init__.py"]}
|
6,896
|
meantheory/pyinfra-certbot
|
refs/heads/master
|
/example/deploy.py
|
import pyinfra_certbot as certbot
SUDO = True
certbot.provision()
# get tls certificates only
certbot.certonly(
cert_name="example.com", domains="example.com,*.example.com", dns_provider="google"
)
|
{"/pyinfra_certbot/__init__.py": ["/pyinfra_certbot/certbot.py"], "/example/deploy.py": ["/pyinfra_certbot/__init__.py"]}
|
6,897
|
meantheory/pyinfra-certbot
|
refs/heads/master
|
/pyinfra_certbot/certbot.py
|
import json
from pyinfra.api import FactBase, MaskString, QuoteString, operation, StringCommand
from pyinfra.api.deploy import deploy
from pyinfra.api.exceptions import DeployError
from pyinfra.api.util import get_arg_value, make_hash
from pyinfra.operations import apt, files, yum
class CertBot:
def __init__(
self, cert_name=None, domains=None, dns_provider=None,
):
self._domains = domains
self.dns_provider = dns_provider
self.cert_name = cert_name
def __call__(self, command):
return self.command(command)
@property
def domains(self):
return ",".join(self._domains)
def command(self, command):
bits = ["certbot", command]
if self.cert_name:
bits.append("--cert-name {0}".format(self.cert_name))
if self.dns_provider:
# create dns flag like, --dns-google
bits.append("--dns-{0}".format(self.dns_provider))
if self._domains:
bits.append("-d {0}".format(self.domains))
return StringCommand(*bits)
class CertBotFactBase(FactBase):
abstract = True
class CertBotCertificates(CertBotFactBase):
def command(self):
cb = CertBot()
return cb("certificates")
def process(self, output):
certificates = {}
this = dict(name=None, fullchain=None, private=None)
for line in output:
try:
rhs = line.split(":")[1].strip()
except IndexError:
continue
if line.startswith("Certificate Name:"):
this["name"] = rhs
elif line.startswith("Certificate Path:"):
this["fullchain"] = rhs
elif line.startswith("Private Key Path:"):
this["private"] = rhs
certificates[this["name"]] = this
this = dict(name=None, fullchain=None, private=None)
return certificates
def _apt_install_certbot(state, host):
# only tested in ubuntu 20.04, may need to add a repo for support elsewhere
apt.packages(
state, host, {"Install certbot via apt"}, "certbot", present=True,
)
def _yum_install_certbot(state, host):
raise NotImplemented("yum implementation needed. pull requests desired.")
def _install_certbot(state, host):
if host.fact.deb_packages:
_apt_install_certbot(state, host)
elif host.fact.rpm_packages:
_yum_install_certbot(state, host)
else:
raise DeployError(("no install method found", "can not install certbot"))
@deploy("deploy certbot")
def provision(state, host, config=None):
_install_certbot(state, host)
# TODO: configure host for existing letsencrypt account?
@operation
def certonly(state, host, cert_name, domains, dns_provider=None):
current_certificates = host.fact.cert_bot_certificates()
present = cert_name in current_certificates
if not present:
cb = CertBot(cert_name=cert_name, domains=domains, dns_provider=dns_provider,)
yield cb("certonly")
@operation
def delete(state, host, cert_name):
cb = CertBot(cert_name=cert_name,)
yield cb("delete")
|
{"/pyinfra_certbot/__init__.py": ["/pyinfra_certbot/certbot.py"], "/example/deploy.py": ["/pyinfra_certbot/__init__.py"]}
|
6,898
|
meantheory/pyinfra-certbot
|
refs/heads/master
|
/setup.py
|
from setuptools import find_packages, setup
if __name__ == "__main__":
setup(
version="0.1",
name="pyinfra-certbot",
packages=find_packages(),
install_requires=("pyinfra>=0.5",),
)
|
{"/pyinfra_certbot/__init__.py": ["/pyinfra_certbot/certbot.py"], "/example/deploy.py": ["/pyinfra_certbot/__init__.py"]}
|
6,924
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/data/__init__.py
|
from .dataIO import DataIO
from .dataObjects import PriceVolume, BookEntry, ArbitrageEntry, read_orders, read_orders_generator
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,925
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/src/backtesting.py
|
from data import *
from csv import reader
from src import *
import matplotlib.pyplot as plt
import numpy as np
def conversion(data1, timestamp, exchange):
i = 0
results = BookEntry([], [], '', '')
results.timestamp = timestamp
results.exchange_id = exchange
while i < 10:
results.bids.append(PriceVolume(data1[i], data1[i+10]))
i += 1
i = 20
while i < 30:
results.asks.append(PriceVolume(data1[i], data1[i + 10]))
i += 1
return results
def backtest(path1, path2, tradebook):
f1 = open(path1)
f2 = open(path2)
book1 = reader(f1)
book2 = reader(f2)
next(book2)
next(book1)
for row1, row2 in zip(book1, book2):
data1 = [float(i) for i in row1[2:]]
data2 = [float(i) for i in row2[2:]]
data1 = conversion(data1, row1[0], row1[1])
data2 = conversion(data2, row2[0], row2[1])
arbitrage(tradebook, None, data1, data1, True)
arbitrage(tradebook, None, data2, data1, True)
def backtesting_main():
total_profits = 0
time = []
profits = []
for order in read_orders_generator():
ask = order.ask
bid = order.bid
volume = min(ask.volume, bid.volume)
priceDiff = bid.price - ask.price
profitCalc = volume * priceDiff
total_profits += profitCalc
print(f"bid: {bid.price:.5f} \t ask: {ask.price:.5f} \t volume: {volume:.5f} \t profit: {profitCalc:.5f}")
time.append(order.timestamp)
profits.append(total_profits)
print(total_profits)
return time, profits
if __name__ == '__main__':
tradebook = []
path1 = '../backtest/orderbook_bnc.csv'
path2 = '../backtest/orderbook_hb.csv'
backtest(path1, path2, tradebook)
with open('../data/tradeorder.txt', 'w') as f:
for item in tradebook:
f.write("%s\n" % item)
x,y = backtesting_main()
plt.plot(x,y)
plt.fill_between(x,y)
temp = ([i for i in x[::len(x)//4]] + [x[-1]])
# temp = [i[-8:] for i in temp]
plt.xticks(temp)
plt.yticks(np.arange(0, 26, step=5))
plt.show()
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,926
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/src/__init__.py
|
from .arbitrageAlgorithm import arbitrage
from .backtesting import backtesting_main
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,927
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/main.py
|
from data import *
from src import arbitrage, backtesting_main
from asyncio import gather, get_event_loop
if __name__ == '__main__':
backtesting_main()
# tradeBook = []
# event_loop = get_event_loop()
# dataIO = DataIO(event_loop)
# while True:
# output = event_loop.run_until_complete(dataIO.get_next_entries())
# arbitrage(tradeBook, dataIO, output[0], output[1])
# arbitrage(tradeBook, dataIO, output[1], output[0])
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,928
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/data/dataObjects.py
|
from typing import List, Generator
from .dataIO import *
from dataclasses import dataclass
import os
@dataclass
class PriceVolume:
price: int
volume: float
@dataclass()
class BookEntry:
bids: List[PriceVolume]
asks: List[PriceVolume]
timestamp: str
exchange_id: str
@dataclass()
class ArbitrageEntry:
bid: PriceVolume
ask: PriceVolume
bidExchangeID: str
askExchangeID: str
timestamp: str
def read_orders():
orders = []
with open('data/tradeorder.txt', 'r') as f:
for line in f.readlines():
order = eval(line)
orders.append(order)
return orders
def read_orders_generator():
print(os.getcwd())
with open('../data/tradeorder.txt', 'r') as f:
for line in f.readlines():
order = eval(line)
yield order
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,929
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/data/dataIO.py
|
from .dataObjects import BookEntry, PriceVolume
from typing import Generator, Tuple
import os
import sys
from asyncio import gather, get_event_loop
from pprint import pprint
from dataclasses import dataclass
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root + '/python')
import ccxt.async_support as ccxt # noqa: E402
class DataIO:
def __init__(self, asyncio_loop):
exchanges_symbols = (
('okex', 'BTC/USDT'),
('binance', 'BTC/USDT'),
)
self.exchanges = []
args = {'enableRateLimit': True,
'asyncio_loop': asyncio_loop}
for exchange_id, symbol in exchanges_symbols:
exchange_class = getattr(ccxt, exchange_id)
exchange = exchange_class(args)
self.exchanges.append((exchange_id, exchange, symbol))
@staticmethod
async def get_orderbook(exchange_id, exchange, symbol):
_orderbook = await exchange.fetch_order_book(symbol)
_bids = _orderbook['bids'][:10]
_asks = _orderbook['asks'][:10]
bids = []
asks = []
for bidPrice, bidVolume in _bids:
bid = PriceVolume(bidPrice, bidVolume)
bids.append(bid)
for askPrice, askVolume in _asks:
ask = PriceVolume(askPrice, askVolume)
asks.append(ask)
timestamp = exchange.milliseconds()
timestamp = exchange.iso8601(timestamp)
bookEntry = BookEntry(bids=bids,
asks=asks,
timestamp=timestamp,
exchange_id=exchange_id)
return bookEntry
async def get_next_entries(self):
routines = [self.get_orderbook(*args) for args in self.exchanges]
output = await gather(*routines)
# print(output)
return output
def close(self):
for _, exchange, _ in self.exchanges:
exchange.close()
self.fileObj.close()
def send_order(self, order):
if self.fileObj is None:
self.fileObj = open('data/tradeorder.txt', 'a')
self.fileObj.write(str(order) + '\n')
# with open('data/tradeorder.txt', 'a') as f:
# f.write(str(order) + "\n")
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,930
|
DasAnish/DeFi-arbitrage
|
refs/heads/main
|
/src/arbitrageAlgorithm.py
|
from data import ArbitrageEntry, PriceVolume
def arbitrage(tradebook, dataIO, entry1, entry2, backtesting=False):
idx_bid = 0
idx_ask = 0
order = ArbitrageEntry(*([None]*5))
order.bidExchangeID = entry1.exchange_id
order.askExchangeID = entry2.exchange_id
order.timestamp = entry1.timestamp
# buying entry2 selling entry1
while (idx_bid < 10) and (idx_ask < 10):
if entry1.bids[idx_bid].price > entry2.asks[idx_ask].price:
volume = min(entry1.bids[idx_bid].volume, entry2.asks[idx_ask].volume)
order.bid = PriceVolume(entry1.bids[idx_bid].price, volume)
order.ask = PriceVolume(entry2.asks[idx_ask].price, volume)
if not backtesting:
dataIO.send_order(order)
else:
tradebook.append(order)
print(order)
entry1.bids[idx_bid].volume -= volume
entry2.asks[idx_ask].volume -= volume
if entry1.bids[idx_bid].volume == 0:
idx_bid += 1
if entry2.asks[idx_ask].volume == 0:
idx_ask += 1
# if no more arbitrage opportunities
elif entry1.bids[idx_bid].price <= entry2.asks[idx_ask].price:
break
|
{"/data/__init__.py": ["/data/dataIO.py", "/data/dataObjects.py"], "/src/backtesting.py": ["/data/__init__.py", "/src/__init__.py"], "/src/__init__.py": ["/src/arbitrageAlgorithm.py", "/src/backtesting.py"], "/main.py": ["/data/__init__.py", "/src/__init__.py"], "/data/dataObjects.py": ["/data/dataIO.py"], "/data/dataIO.py": ["/data/dataObjects.py"], "/src/arbitrageAlgorithm.py": ["/data/__init__.py"]}
|
6,969
|
karthikgvsk/remo
|
refs/heads/master
|
/remo/voting/views.py
|
from datetime import datetime
from django.shortcuts import get_object_or_404, render
from remo.base.decorators import permission_check
from remo.voting.models import Poll
@permission_check()
def list_votings(request):
"""List votings view."""
user = request.user
now = datetime.now()
polls = Poll.objects.all()
if not user.groups.filter(name='Admin').exists():
polls = Poll.objects.filter(valid_groups__in=user.groups.all())
past_polls = polls.filter(end__lt=now)
current_polls = polls.filter(start__lt=now, end__gt=now)
future_polls = polls.filter(start__gt=now)
return render(request, 'list_votings.html',
{'user': user,
'past_polls': past_polls,
'current_polls': current_polls,
'future_polls': future_polls})
@permission_check(group='Admin')
def edit_voting(request, slug=None):
"""Create/Edit voting view."""
return render(request, 'edit_voting.html')
@permission_check()
def view_voting(request, slug):
"""View voting view."""
voting = get_object_or_404(Poll, slug=slug)
return render(request, 'view_voting.html', {'voting': voting})
|
{"/remo/voting/views.py": ["/remo/voting/models.py"]}
|
6,970
|
karthikgvsk/remo
|
refs/heads/master
|
/remo/voting/admin.py
|
from django.contrib import admin
from models import Poll, PollRange, PollRangeVotes, PollRadio, PollChoices
# Poll Range
class PollRangeVotesInline(admin.StackedInline):
"""Poll Range Votes Inline."""
model = PollRangeVotes
extra = 2
class PollRangeInline(admin.StackedInline):
"""Poll Range Inline."""
model = PollRange
extra = 1
# Radio votes
class PollChoicesInline(admin.StackedInline):
"""Poll Choices Inline."""
model = PollChoices
extra = 1
class PollRadioInline(admin.StackedInline):
"""Poll Radio Inline."""
model = PollRadio
extra = 1
class PollRadioAdmin(admin.ModelAdmin):
inlines = [PollChoicesInline]
class PollRangeAdmin(admin.ModelAdmin):
inlines = [PollRangeVotesInline]
class PollAdmin(admin.ModelAdmin):
"""Voting Admin."""
inlines = [PollRangeInline, PollRadioInline]
search_fields = ['name']
date_hierarchy = 'created_on'
admin.site.register(PollRange, PollRangeAdmin)
admin.site.register(PollRadio, PollRadioAdmin)
admin.site.register(Poll, PollAdmin)
|
{"/remo/voting/views.py": ["/remo/voting/models.py"]}
|
6,971
|
karthikgvsk/remo
|
refs/heads/master
|
/remo/voting/models.py
|
from django.core.validators import MaxLengthValidator, MinLengthValidator
from django.contrib.auth.models import Group, User
from django.db import models
from uuslug import uuslug as slugify
class Poll(models.Model):
"""Poll Abstract Model."""
name = models.CharField(max_length=100)
slug = models.SlugField(blank=True, max_length=100)
start = models.DateTimeField()
end = models.DateTimeField()
valid_groups = models.ForeignKey(Group, related_name='valid_polls')
created_on = models.DateField(auto_now_add=True)
description = models.TextField(validators=[MaxLengthValidator(500),
MinLengthValidator(20)])
created_by = models.ForeignKey(User, related_name='polls_range_created')
users_voted = models.ManyToManyField(User, related_name='polls_voted',
through='Vote')
def __unicode__(self):
return self.name
class Meta:
ordering = ['-created_on']
def save(self, *args, **kwargs):
if not self.pk:
self.slug = slugify(self.name)
super(Poll, self).save(*args, **kwargs)
class Vote(models.Model):
"""Vote model."""
user = models.ForeignKey(User)
ranged_poll = models.ForeignKey('Poll')
date_voted = models.DateField(auto_now_add=True)
def __unicode__(self):
return '%s %s' % (self.user, self.ranged_poll)
class PollRange(models.Model):
"""Poll Model for range voting."""
name = models.CharField(max_length=500, default='')
poll = models.ForeignKey('Poll')
def __unicode__(self):
return self.name
class PollRangeVotes(models.Model):
""" Poll Range model to count votes."""
votes = models.IntegerField(default=0)
poll_range = models.ForeignKey('PollRange')
nominee = models.ForeignKey(User)
class PollRadio(models.Model):
"""Poll Model for radio (Boolean) voting."""
title = models.CharField(max_length=500)
poll = models.ForeignKey('Poll')
def __unicode__(self):
return self.title
class PollChoices(models.Model):
"""Poll Model with available choices for radio voting."""
answer = models.CharField(max_length=500)
votes = models.IntegerField(default=0)
radio_poll = models.ForeignKey('PollRadio')
def __unicode__(self):
return self.answer
|
{"/remo/voting/views.py": ["/remo/voting/models.py"]}
|
6,972
|
karthikgvsk/remo
|
refs/heads/master
|
/remo/voting/helpers.py
|
from jingo import register
@register.filter
def get_users_voted(poll):
"""Returns the number of users voted to the specific poll."""
return poll.users_voted.all().count()
|
{"/remo/voting/views.py": ["/remo/voting/models.py"]}
|
6,978
|
n0rrt/cs-172
|
refs/heads/master
|
/lab3.py
|
class Fraction:
def __init__(self, num, den):
self.num = num
self.den = den
self.simplify()
def __str__(self):
if self.den == 1:
return str(self.num)
else:
return str(self.num)+"/"+str(self.den)
def getNum(self):
return self.num
def getDen(self):
return self.den
def approximate(self):
return self.num/self.den
def simplify(self):
x = self.gcd(self.num, self.den)
self.num = self.num//x
self.den = self.den//x
def gcd(self, a, b):
if b == 0:
return a
else:
return self.gcd(b, a % b)
def __add__(self, other):
return Fraction(((self.num * other.den) + (self.den * other.num)), (self.den * other.den))
def __sub__(self, other):
return Fraction(self.num, self.den) + Fraction(-1*other.num, other.den)
def __mul__(self, other):
return Fraction((self.num * other.num), (self.den * other.den))
def __truediv__(self, other):
return Fraction((self.num * other.den), (self.den * other.num))
def __pow__(self, exp):
if exp == 0:
return Fraction(1,1)
elif exp < 0:
temp = self.num
self.num = self.den
self.den = temp
return self*(self.__pow__(-1*exp))
elif exp > 0:
return self*(self.__pow__(exp-1))
if __name__ == "__main__":
#debugging
'''frac1 = Fraction(2,3)
frac2 = Fraction(3,4)
print(frac1 + frac2)
print(frac1 - frac2)
print(frac1 * frac2)
print(frac1 / frac2)
print(frac1 ** 2)'''
def H(n):
total = Fraction(0,1)
for i in range(1, n+1):
total += Fraction(1, i)
return total
def T(n):
total = Fraction(0,1)
half = Fraction(1,2)
for i in range(n+1):
total += half ** i
return total
def Z(n):
return (Fraction(2,1)-T(n))
def R(n, b):
total = Fraction(0,1)
for ii in range(1, n+1):
total += (Fraction(1, ii) ** b)
return total
riemannNums = [2,3,4,5,6,7,8]
while True:
try:
userIn = int(input('Enter number of iterations (integer > 0):\n'))
except TypeError:
print('Bad Input')
continue
print('H({})={}'.format(userIn, H(userIn)))
print('H({})~={:0.8f}'.format(userIn, H(userIn).approximate()))
print('T({})={}'.format(userIn, T(userIn)))
print('T({})~={:0.8f}'.format(userIn, T(userIn).approximate()))
print('Z({})={}'.format(userIn, Z(userIn)))
print('Z({})~={:0.8f}'.format(userIn, Z(userIn).approximate()))
for i in riemannNums:
print('R({},{})={}'.format(userIn, i, R(userIn, i)))
print('R({},{})~={:0.8f}'.format(userIn, i, R(userIn, i).approximate()))
break
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,979
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework3/Ball.py
|
from Drawable import Drawable
import pygame
class Ball(Drawable):
def __init__(self, position, visible, radius = 0, color = (0,0,0)):
self.radius = radius
self.color = color
super().__init__(position, visible)
def draw(self, surface):
pygame.draw.circle(surface, super().getColor(), super().getPos(), self.getRadius())
def get_rect(self):
return pygame.Rect((super().getPos()[0] - self.getRadius(), super().getPos()[1] - self.getRadius()), (2 * self.getRadius(), 2 * self.getRadius()))
def getRadius(self):
return self.radius
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,980
|
n0rrt/cs-172
|
refs/heads/master
|
/lab7.py
|
from room import *
from maze import *
'''
0123456
SRRRRRR
7 8 9
R###R#R
10
R######
11/12
RR#####
13/14
#RR####
3x south
1x east
1x south
1x east
'''
rooms = []
rooms.append(Room("This room is the entrance."))
rooms.append(Room("This room has a table. Maybe a dining room?"))
rooms.append(Room('This room has several toilets in a circle. Weird.'))
rooms.append(Room('This room is empty, save for a nickle on the floor. Neat!'))
rooms.append(Room('This room has a bookshelf. Perhaps it\'s a personal libary'))
rooms.append(Room('This room has a prehistoric computer lining the walls. How vintage.'))
rooms.append(Room('This room contains a squirrel. We\'re not quite sure why either.'))
rooms.append(Room('This room has a table with a key in the shape of a skull on top. It might not be useful in the future but God it looks cool.'))
rooms.append(Room('This room has another squirrel. Don\'t worry, it\'s not the same room.'))
rooms.append(Room('This room has a cash register and a few weapons on a table. Perhaps a shop? But where\'s the shopkeep...'))
rooms.append(Room('This room is very much different when compared to the others, much taller. It\'s still just an ordinary room, though.'))
rooms.append(Room('This room contains a squirrel-wedding ceremony. Perhaps the prevoius squirrels were ushers?'))
rooms.append(Room('This room is empty, no nickle this time. What was up with that squirrel wedding though?'))
rooms.append(Room('This room contains a heavy door with a skull on it. Now would be a great time for that-- oh it\'s already unlocked.'))
rooms.append(Room("This room is the exit. Good Job."))
start = rooms[0]
# top row of rooms
for x in range(6):
rooms[x].setEast(rooms[x+1])
rooms[x+1].setWest(rooms[x])
rooms[0].setSouth(rooms[7])
rooms[7].setNorth(rooms[0])
rooms[4].setSouth(rooms[8])
rooms[8].setNorth(rooms[4])
rooms[6].setSouth(rooms[9])
rooms[9].setNorth(rooms[6])
rooms[7].setSouth(rooms[10])
rooms[10].setNorth(rooms[7])
rooms[10].setSouth(rooms[11])
rooms[11].setNorth(rooms[10])
rooms[11].setEast(rooms[12])
rooms[12].setWest(rooms[11])
rooms[12].setSouth(rooms[13])
rooms[13].setNorth(rooms[12])
rooms[13].setSouth(rooms[14])
rooms[14].setNorth(rooms[13])
maze = Maze(rooms[0], rooms[14])
while True:
print(maze.getCurrent())
userIn = input("Enter direction to move north south east west restart\n")
if userIn.lower() == 'north':
if maze.moveNorth():
print('You went north')
maze.setCurrent(rooms[rooms.index(maze.getCurrent().getNorth())])
if maze.atExit():
print('You found the exit')
break
else:
print("Direction invalid")
elif userIn.lower() == 'south':
if maze.moveSouth():
print('You went south')
maze.setCurrent(rooms[rooms.index(maze.getCurrent().getSouth())])
if maze.atExit():
print('You found the exit')
break
else:
print('Direction invalid')
elif userIn.lower() == 'east':
if maze.moveEast():
print('You went east')
maze.setCurrent(rooms[rooms.index(maze.getCurrent().getEast())])
if maze.atExit():
print('You found the exit')
break
else:
print('Direction invalid')
elif userIn.lower() == 'west':
if maze.moveWest():
print('You went west')
maze.setCurrent(rooms[rooms.index(maze.getCurrent().getWest())])
if maze.atExit():
print('You found the exit')
break
else:
print('Direction invalid')
elif userIn.lower() == 'restart':
maze.reset()
#debug
elif userIn.lower() == 'index':
print(rooms.index(maze.getCurrent()))
else:
print('invalid input try again')
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,981
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework3/Drawable.py
|
import pygame
import abc
class Drawable(metaclass = abc.ABCMeta):
def __init__(self, position = (0,0), visible = True):
self.position = position
self.visible = visible
@abc.abstractmethod
def draw(self, surface):
pass
@abc.abstractmethod
def get_rect(self):
pass
def getPos(self):
return self.position
def getVisible(self):
return self.visible
def getColor(self):
return self.color
def setPos(self, x, y):
self.position = (x, y)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,982
|
n0rrt/cs-172
|
refs/heads/master
|
/monster.py
|
#Mark Boady and Matthew Burlick
#Drexel University 2018
#CS 172
#This class defines a generic monster
#It doesn't actually DO anything.
#It just gives you a template for how a monster works.
#We can make any number of monsters and have them fight
#they just all need to INHERIT from this one so that work the same way
#Since this class is not intended to be used
#none of the methods do anything
#This class is cannot be used by itself.
import abc
class monster(metaclass=abc.ABCMeta):
def __init__(self):
return
def __str__(self):
return "Generic Monster Class"
#Methods that need to be implemented
#The description is printed at the start to give
#additional details
#Name the monster we are fighting
#The description is printed at the start to give
#additional details
@abc.abstractmethod
def getName(self):
pass
@abc.abstractmethod
def getDescription(self):
pass
#Basic Attack Move
#This will be the most common attack the monster makes
#You are passed the monster you are fighting
@abc.abstractmethod
def basicAttack(self,enemy):
pass
#Print the name of the attack used
@abc.abstractmethod
def basicName(self):
pass
#Defense Move
#This move is used less frequently to
#let the monster defend itself
@abc.abstractmethod
def defenseAttack(self,enemy):
pass
#Print out the name of the attack used
@abc.abstractmethod
def defenseName(self):
pass
#Special Attack
#This move is used less frequently
#but is the most powerful move the monster has
@abc.abstractmethod
def specialAttack(self,enemy):
pass
@abc.abstractmethod
def specialName(self):
pass
#Health Management
#A monster at health <= 0 is unconscious
#This returns the current health level
@abc.abstractmethod
def getHealth(self):
pass
#This function is used by the other monster to
#either do damage (positive int) or heal (negative int)
@abc.abstractmethod
def doDamage(self,damage):
pass
#Reset Health for next match
@abc.abstractmethod
def resetHealth(self):
pass
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,983
|
n0rrt/cs-172
|
refs/heads/master
|
/BST.py
|
class Node():
def __init__(self,val):
self.__value = val
self.__right = None
self.__left = None
def setLeft(self,n):
self.__left = n
def setRight(self,n):
self.__right = n
def getLeft(self):
return self.__left
def getRight(self):
return self.__right
def getValue(self):
return self.__value
class BST():
def __init__(self):
self.__root = None
def append(self,val):
node = Node(val)
if self.__root == None:
self.__root = node
return
current = self.__root
while True:
if val <= current.getValue():
if current.getLeft() == None:
current.setLeft(node)
return
else:
current = current.getLeft()
else:
if current.getRight() == None:
current.setRight(node)
return
else:
current = current.getRight()
def isin(self,val):
if self.__root == None:
return False
current = self.__root
while current != None:
if current.getValue() == val:
return True
elif val < current.getValue():
current = current.getLeft()
else:
current = current.getRight()
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,984
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework3/Text.py
|
from Drawable import Drawable
import pygame
class Text(Drawable):
def __init__(self, position, visible, color=(0,0,0)):
self.color = color
super().__init__(position, visible)
def draw(self, surface, message):
font = pygame.font.Font('freesansbold.ttf', 12)
text = font.render(message, True, super().getColor())
surface.blit(text, self.get_rect())
def get_rect(self):
return pygame.Rect(self.getPos(), self.getPos())
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,985
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework3/Block.py
|
from Drawable import Drawable
import pygame
class Block(Drawable):
def __init__(self, position, visible, size = 0, color = (0,0,0)):
self.color = color
self.size = size
super().__init__(position, visible)
def draw(self, surface):
pygame.draw.rect(surface, super().getColor(), self.get_rect(), 0)
def get_rect(self):
return pygame.Rect(super().getPos(), (self.getSize(), self.getSize()))
def getSize(self):
return self.size
def setPos(self, pos):
self.position = pos
def setVis(self, bool):
self.visible = bool
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,986
|
n0rrt/cs-172
|
refs/heads/master
|
/homework2.py
|
from media import *
song1 = Song("song", "Waiting Room", "*****", "Fugazi", "13 Songs")
song2 = Song("song", "HEAT", "*****", "BROCKHAMPTON", "SATURATION")
song3 = Song("song", "Alright","*****", "Kendrick Lamar", "To Pimp A Butterfly")
song4 = Song("song", "Bound 2","*****", "Kanye West", "Yeezus")
movie1 = Movie("movie", "Pulp Fiction", "8.9/10", "Quentin Tarantino", "2 hours 34 minutes")
movie2 = Movie("movie", "The Matrix", "8.7/10", "The Wachowski Brothers", "2 hours 16 minutes")
movie3 = Movie("movie", "Inglorious Basterds", "8.3/10", "Quentin Tarantino", "2 hours 33 minutes")
movie4 = Movie("movie", "Spirited Away", "8.6/10", "Hayao Miyazaki", "2 hours 5 minutes")
picture1 = Picture("picture", "Mountain", "****", "1920x1080")
picture2 = Picture("picture", "Lake", "****", "1920x1080")
picture3 = Picture("picture", "Tree", "*****", "1920x1080")
picture4 = Picture("picture", "Dog", "*****", "1920x1080")
mediaList = [song1, song2, song3, song4, movie1, movie2, movie3, movie4, picture1, picture2, picture3, picture4]
def displayAll():
for i in mediaList:
(i.getAll())
print("\n")
def displaySongs():
for i in mediaList:
if isinstance(i, Song):
(i.getAll())
print("\n")
def displayMovies():
for i in mediaList:
if isinstance(i, Movie):
(i.getAll())
print("\n")
def displayPictures():
for i in mediaList:
if isinstance(i, Picture):
(i.getAll())
print("\n")
if __name__ == "__main__":
inMedia = False
try:
userIn = input("What do you want to display?\n")
except:
print("Enter valid input")
while not(userIn.lower()==("quit")):
if userIn.lower()==("display all"):
displayAll()
elif userIn.lower()==("display songs"):
displaySongs()
elif userIn.lower()==("display movies"):
displayMovies()
elif userIn.lower()==("display pictures"):
displayPictures()
elif userIn.lower().split()[0] == "play":
for i in range(len(mediaList)):
try:
if (str(userIn.lower().split()[1] + " " + userIn.lower().split()[2])) in mediaList[i].getName().lower():
mediaList[i].play()
inMedia = True
break
except IndexError:
if (str(userIn.lower().split()[1]) in mediaList[i].getName().lower()):
mediaList[i].play()
inMedia=True
break
if inMedia == False:
print("Unable to find the requested media")
inMedia = False
else:
inMedia = False
for i in range(len(mediaList)):
if userIn.lower()==(mediaList[i].getName().lower()):
mediaList[i].getAll()
inMedia = True
break
if inMedia == False:
print("Unable to find the requested media or command")
userIn = input("What do you want to display?\n")
inMedia = False
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,987
|
n0rrt/cs-172
|
refs/heads/master
|
/media.py
|
class Media:
def __init__(self, media_type, name, rating):
self.__media_type = media_type
self.__name = name
self.__rating = rating
def __str__(self):
return str(self.getType(), self.getName(), self.getRating())
def getType(self):
return self.__media_type
def getName(self):
return self.__name
def getRating(self):
return self.__rating
def getAll(self):
print(self)
class Movie(Media):
def __init__(self, media_type, name, rating, director, runtime):
self.__director = director
self.__runtime = runtime
super().__init__(media_type, name, rating)
def __str__(self):
return self.getName() + "\n" + self.getDirector() + "\n" + self.getRating()
def play(self):
print("{}, playing now".format(self.getName()))
def getDirector(self):
return self.__director
def getRuntime(self):
return self.__runtime
class Song(Media):
def __init__(self, media_type, name, rating, artist, album):
self.__artist = artist
self.__album = album
super().__init__(media_type, name, rating)
def __str__(self):
return self.getName() + "\n" + self.getArtist() + "\n" + self.getAlbum() + "\n" + self.getRating()
def play(self):
print("{} by {}, now playing".format(self.getName(), self.getArtist()))
def getArtist(self):
return self.__artist
def getAlbum(self):
return self.__album
class Picture(Media):
def __init__(self, media_type, name, rating, resolution):
self.__resolution = resolution
super().__init__(media_type, name, rating)
def __str__(self):
return self.getName() + "\n" + self.getResolution() + "\n" + self.getRating()
def play(self):
print("Showing {}".format(str(self.getName())))
def getResolution(self):
return self.__resolution
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,988
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework4/main.py
|
#Tim Harris tlh339 - Main script
from Node import Node
from LinkedList import LinkedList
from Employee import Employee
employeeList = LinkedList()
#custom error handler
class invalidError(Exception):
pass
#creates new employee
def makeNewEmp(idNum, rate):
if validate():
newEmp = Employee(idNum, rate)
employeeList.append(newEmp)
#validates that all values are acceptable
def validate():
index = 1
for x in range(len(employeeList)):
for y in range(index, len(employeeList) - x): #goes through each employee object following the current and compares ids
if (employeeList[x].getId() == employeeList[y].getId()):
raise invalidError('Duplicate employee')
return False
index += 1
if int(employeeList[x].getRate()) < 6: #validates rate
raise invalidError('bla bla bla')
return False
if int(employeeList[x].getHours()) < 0: #validates hours
raise invalidError('something')
return False
return True
'''
a. new employee
b. set hours
c. display payroll
d. update rate
e. remove employee
f. quit
'''
if __name__ == "__main__":
options = 'abcdef'
options_string = "a. new employee\nb. set hours\nc. display payroll\nd. update rate\ne. remove employee\nf. quit\n"
userIn = input(options_string)
if userIn.lower() not in options:
print('invalid option')
userIn = input(options_string)
while userIn.lower() != 'f':
if userIn.lower() == 'a':
print('add employee')
try:
idNum = input('Enter ID num: ')
rate = input("Enter hourly rate: ")
makeNewEmp(idNum, rate)
validate()
except(invalidError, ValueError) as e:
print(str(e))
elif userIn.lower() == 'b':
print("set hours")
for emp in range(len(employeeList)):
try:
newHours = int(input("Enter hours for {}: ".format(employeeList[emp].getId())))
employeeList[emp].setHours(newHours)
employeeList[emp].setWage(int(employeeList[emp].getHours()) * int(employeeList[emp].getRate()))
validate()
except(invalidError, ValueError):
print("invalid input")
elif userIn.lower() == 'c':
print('display payroll')
for emp in range(len(employeeList)):
print("ID: {}\nHours: {}\nRate: ${:.2f}\nWages: ${:.2f}\n".format(employeeList[emp].getId(), employeeList[emp].getHours(), int(employeeList[emp].getRate()), employeeList[emp].getWage()))
elif userIn.lower() == 'd':
print('change rate')
newid = input('Enter the id: ')
found = False
for emp in range(len(employeeList)):
if employeeList[emp].getId() == str(newid):
try:
newrate = input("Enter the new rate: $")
employeeList[emp].setRate(newrate)
validate()
except(invalidError, ValueError):
print('invalid input')
found = True
break
if not found:
print('ID not found')
elif userIn.lower() == 'e':
print('remove employee')
newid = input('Enter the id: ')
found = False
for emp in range(len(employeeList)):
if employeeList[emp].getId() == str(newid):
employeeList.remove(employeeList[emp])
found = True
break
if not found:
print('ID not found')
userIn = input(options_string)
if userIn.lower() not in options:
print('invalid option')
userIn = input(options_string)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,989
|
n0rrt/cs-172
|
refs/heads/master
|
/homework1.py
|
class Question:
def __init__(self, prompt = '', ans1 = '', ans2 = '', ans3 = '', ans4 = '', ansCorrect = 0):
self.__prompt = prompt
self.__ans1 = ans1
self.__ans2 = ans2
self.__ans3 = ans3
self.__ans4 = ans4
self.__ansCorrect = ansCorrect
def __str__(self):
return str(self.__prompt)
def getPrompt(self):
return self.__prompt
def getAns1(self):
return self.__ans1
def getAns2(self):
return self.__ans2
def getAns3(self):
return self.__ans3
def getAns4(self):
return self.__ans4
def getAnsCorrect(self):
return self.__ansCorrect
def setPrompt(self, question):
self.__prompt = question
def setAns1(self, ans):
self.__ans1 = ans
def setAns2(self, ans):
self.__ans2 = ans
def setAns3(self, ans):
self.__ans3 = ans
def setAns4(self, ans):
self.__ans4 = ans
def setAnsCorrect(self, ans):
self.__ansCorrect = ans
def askQuestion(self):
return ('{}\n1. {}\n2. {}\n3. {}\n4. {}'.format(self.__prompt, self.__ans1, self.__ans2, self.__ans3, self.__ans4))
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,990
|
n0rrt/cs-172
|
refs/heads/master
|
/homework1_main.py
|
from homework1 import Question
class invalidAnswerError(Exception):
pass
def getAns():
invalid = True
while (invalid):
try:
userAns = int(input('Enter your answer: '))
if userAns not in range(1,5):
raise invalidAnswerError
invalid = False
return userAns
except (invalidAnswerError, ValueError):
print('Error: enter a valid answer')
continue
if __name__ == "__main__":
print('Welcome to the Adventure Time Episode Quiz')
for x in range(20):
print('-', end = '')
print('Match the epsiode description to the title of the episode.')
questionList = []
question1 = Question()
question1.setPrompt('Princess Bubblegum accidentally made a potion thing that was supposes to bring people back alive, but turned them into zombies instead.')
question1.setAns1('Wizard')
question1.setAns2('What is Life?')
question1.setAns3('Slumber Party Panic')
question1.setAns4('Dungeon')
question1.setAnsCorrect(3)
questionList.append(question1)
question2 = Question()
question2.setPrompt("Marceline takes Finn and Jake's treehouse")
question2.setAns1('Evicted!')
question2.setAns2('Gut Grinder')
question2.setAns3('Rainy Day Daydream')
question2.setAns4('What Have You Done?')
question2.setAnsCorrect(1)
questionList.append(question2)
question3 = Question()
question3.setPrompt("Marceline and her ghost friends tricks Finn and Jake that they made them into vampires, but her ghost friends try to kill Finn and Jake.")
question3.setAns1('The Real You')
question3.setAns2('Death in Blossom')
question3.setAns3('The Limit')
question3.setAns4('Heat Signature')
question3.setAnsCorrect(4)
questionList.append(question3)
question4 = Question()
question4.setPrompt("A deer rampages in the candy kingdom, breaks Finn's Legs, and hits Jake's Head. Finn wakes up and Jake has gone crazy acting like it's his birthday.")
question4.setAns1('No One Can Hear You')
question4.setAns2('Wizard Battle')
question4.setAns3('From Bad to Worse')
question4.setAns4('The New Frontier')
question4.setAnsCorrect(1)
questionList.append(question4)
question5 = Question()
question5.setPrompt('Lady Ranicorn and Princess Bubblegum save Finn and Jake from Iceking')
question5.setAns1('Return to the Nightosphere')
question5.setAns2('In Your Footsteps')
question5.setAns3('Lady and Peebles')
question5.setAns4('Too Young')
question5.setAnsCorrect(3)
questionList.append(question5)
question6 = Question()
question6.setPrompt("BMO tries to solve the mystery of Finn's missing sock")
question6.setAns1('Burning Low')
question6.setAns2('Gotcha!')
question6.setAns3('You Made Me')
question6.setAns4('BMO Noire')
question6.setAnsCorrect(4)
questionList.append(question6)
question7 = Question()
question7.setPrompt("Ice King takes princess's body parts and Finn and Jake investigate")
question7.setAns1('The Hard Easy')
question7.setAns2('I Remember You')
question7.setAns3('Princess Monster Wife')
question7.setAns4('Two Sword')
question7.setAnsCorrect(3)
questionList.append(question7)
question8 = Question()
question8.setPrompt('Finn and Jake try to wake up Marceline by going into her memories, but after they do, her ex-bf tricked them into Marceline liking him again')
question8.setAns1('Memory of a Memory')
question8.setAns2('The Creeps')
question8.setAns3('Orb')
question8.setAns4('What Was Missing')
question8.setAnsCorrect(1)
questionList.append(question8)
question9 = Question()
question9.setPrompt('Finn tells lies cause to trouble between Ice King and Flame Princess.')
question9.setAns1('Shh!')
question9.setAns2('Frost and Fire')
question9.setAns3('All Your Fault')
question9.setAns4('Bad Little Boy')
question9.setAnsCorrect(2)
questionList.append(question9)
question10 = Question()
question10.setPrompt('Fern takes Finn to a dungeon to trap Finn, so Fern can become the real Finn')
question10.setAns1('Three Buckets')
question10.setAns2('The Wild Hunt')
question10.setAns3('Son of Rap Bear')
question10.setAns4('Come Along with Me')
question10.setAnsCorrect(1)
questionList.append(question10)
playerNum = 1
player1Score = 0
player2Score = 0
for questionNumber in range(len(questionList)):
print('Player {}, here is your question: '.format(playerNum))
print(questionList[questionNumber].askQuestion())
if playerNum == 1:
if getAns() == questionList[questionNumber].getAnsCorrect():
print('Correct')
player1Score += 1
else:
print('Incorrect')
print('Player {} score: {}'.format(playerNum, player1Score))
playerNum = 2
elif playerNum == 2:
if getAns() == questionList[questionNumber].getAnsCorrect():
print('Correct')
player2Score += 1
else:
print('Incorrect')
print('Player {} score: {}'.format(playerNum, player2Score))
playerNum = 1
print('Final score:\nPlayer 1: {}\nPlayer 2: {}'.format(player1Score, player2Score))
if player1Score == player2Score:
print('Tie')
elif player1Score > player2Score:
print('Player 1 wins')
elif player2Score > player1Score:
print('Player 2 wins')
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,991
|
n0rrt/cs-172
|
refs/heads/master
|
/lab8.py
|
from BST import *
import random
import matplotlib.pyplot as plt
import time
def populate(n):
myList = []
myBST = BST()
while len(myList) != n:
randNum = (random.randrange(0, n + 1))
myList.append(randNum)
myBST.append(randNum)
return (myList, myBST)
def isIn(myList, num):
for x in myList:
if x == num:
return True
return False
def timing(myList, myBST):
listTime1 = time.time()
for val in myList:
isIn(myList, val)
listTime2 = time.time()
listTimeFinal = listTime2 - listTime1
bstTime1 = time.time()
for val in myList:
myBST.isin(val)
bstTime2 = time.time()
bstTimeFinal = bstTime2 - bstTime1
print("List time: {}\nBST time: {}".format(listTimeFinal, bstTimeFinal))
return (listTimeFinal, bstTimeFinal)
if __name__ == "__main__":
tup=populate(100)
newList=tup[0]
newBST=tup[1]
count=0
for val in newList:
if isIn(newList, val):
count += 1
print(count)
bstTimes = []
listTimes = []
xVals = []
for n in range(1, 10000, 1000):
newTup = populate(n)
newList = newTup[0]
newBST = newTup[1]
times = timing(newList, newBST)
listTime = times[0]
bstTime = times[1]
listTimes.append(listTime)
bstTimes.append(bstTime)
xVals.append(n)
listTotal = 0
bstTotal = 0
listCount = 0
bstCount = 0
listAverage = []
bstAverage = []
for x in listTimes:
listTotal += x
listCount += 1
listAverage.append(listTotal/listCount)
for y in bstTimes:
bstTotal += y
bstCount += 1
bstAverage.append(bstTotal/bstCount)
plt.plot(xVals, listTimes, label="List times")
plt.plot(xVals, bstTimes, label="BST times")
plt.plot(xVals, listAverage, label ="list average")
plt.plot(xVals, bstAverage, label = "bst average")
plt.legend()
plt.show()
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,992
|
n0rrt/cs-172
|
refs/heads/master
|
/maze.py
|
from room import *
class Maze:
#Inputs: Pointer to start room and exit room
#Sets current to be start room
def __init__(self,st=None,ex=None):
#Room the player starts in
self.__start_room = st
#If the player finds this room they win
self.__exit_room = ex
#What room is the player currently in
self.__current = st
#Return the room the player is in (current)
def getCurrent(self):
return self.__current
#The next four all have the same idea
#See if there is a room in the direction
#If the direction is None, then it is impossible to go that way
#in this case return false
#If the direction is not None, then it is possible to go this way
#Update current to the new move (move the player)
#then return true so the main program knows it worked.
def setCurrent(self, room):
self.__current = room
def moveNorth(self):
if self.getCurrent().getNorth() == None:
return False
return True
def moveSouth(self):
if self.getCurrent().getSouth() == None:
return False
return True
def moveEast(self):
if self.getCurrent().getEast() == None:
return False
return True
def moveWest(self):
if self.getCurrent().getWest() == None:
return False
return True
#If the current room is the exit,
#then the player won! return true
#otherwise return false
def atExit(self):
if self.__current == self.__exit_room:
return True
else:
return False
#If you get stuck in the maze, you should be able to go
#back to the start
#This sets current to be the start_room
def reset(self):
self.__current == self.__start_room
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,993
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework4/LinkedList.py
|
#Tim Harris tlh339 - LinkedList class
from Node import Node
class LinkedList:
#from class notes
def __init__(self):
self.__head = None
def isEmpty(self):
return self.__head == None
def append(self, data):
newNode = Node(data)
if self.isEmpty():
self.__head = newNode
else:
current = self.__head
while current.getNext() != None:
current = current.getNext()
current.setNext(newNode)
def remove(self, item):
current = self.__head
previous = None
found = False
while not found:
if current.getData() == item:
found == True
else:
previous = current
current = current.getNext()
if previous == None:
self.__head = current.getNext()
else:
previous.setNext(current.getNext())
def __len__(self):
if self.__head == None:
return 0
current = self.__head
counter = 1
while current.getNext() != None:
counter += 1
current = current.getNext()
return counter
def __str__(self):
string = ""
current = self.__head
while current != None:
string += str(current.getData()) + '->'
current = current.getNext()
return string
def __getitem__(self, index):
current = self.__head
for i in range(index):
current = current.getNext()
return current.getData()
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,994
|
n0rrt/cs-172
|
refs/heads/master
|
/lab4_monsters.py
|
from monster import monster
class bear(monster):
def __init__(self, name="8 F***ing bears", description="They're 8 bears", basicName="Claw", defenseName="Block", specialName="Hybernate", health = 40):
self.__name = name
self.__description = description
self.__basicName = basicName
self.__defenseName = defenseName
self.__specialName = specialName
self.__health = health
def __str__(self):
return self.__name
def getName(self):
return self.__name
def getDescription(self):
return self.__description
def basicAttack(self, enemy):
enemy.doDamage(10)
def basicName(self):
return self.__basicName
def defenseAttack(self, enemy):
self.doDamage(-10)
def defenseName(self):
return self.__defenseName
def specialAttack(self, enemy):
self.resetHealth()
def specialName(self):
return self.__specialName
def getHealth(self):
return self.__health
def doDamage(self, damage):
self.__health -= damage
def resetHealth(self):
self.__health = 40
class lapras(monster):
def __init__(self, name="Lapras", description="A water type pokemon", basicName="Blizzard Burn", defenseName="Block", specialName="Ice Beam", health = 190):
self.__name = name
self.__description = description
self.__basicName = basicName
self.__defenseName = defenseName
self.__specialName = specialName
self.__health = health
def __str__(self):
return str(self.__name)
def getName(self):
return self.__name
def getDescription(self):
return self.__description
def basicAttack(self, enemy):
enemy.doDamage(160)
def basicName(self):
return self.__basicName
def defenseAttack(self, enemy):
self.doDamage(-10)
def defenseName(self):
return self.__defenseName
def specialAttack(self, enemy):
enemy.doDamage(100)
self.resetHealth()
def specialName(self):
return self.__specialName
def getHealth(self):
return self.__health
def doDamage(self, damage):
self.__health -= damage
def resetHealth(self):
self.__health = 160
class pile(monster):
def __init__(self, name='Pile of Sh*t', description='It\'s a literal pile of sh*t', basicName='Splatter', defenseName='Steam', specialName='Waft', health=45):
self.__name = name
self.__description = description
self.__basicName = basicName
self.__defenseName = defenseName
self.__specialName = specialName
self.__health = health
def __str__(self):
return str(self.__name)
def getName(self):
return self.__name
def getDescription(self):
return self.__description
def basicAttack(self, enemy):
enemy.doDamage(10)
def basicName(self):
return self.__basicName
def defenseAttack(self, enemy):
self.doDamage(-10)
def defenseName(self):
return self.__defenseName
def specialAttack(self, enemy):
enemy.doDamage(20)
def specialName(self):
return self.__specialName
def getHealth(self):
return self.__health
def doDamage(self, damage):
self.__health -= damage
def resetHealth(self):
self.__health = 45
class tree(monster):
def __init__(self, name='Oaker', description='Make like a Tree and have a kid with an Ogre.', basicName='Willow Womp', defenseName='Re-Root', specialName='Photosynthesis', health=200):
self.__name = name
self.__description = description
self.__basicName = basicName
self.__defenseName = defenseName
self.__specialName = specialName
self.__health = health
def __str__(self):
return str(self.__name)
def getName(self):
return self.__name
def getDescription(self):
return self.__description
def basicAttack(self, enemy):
enemy.doDamage(40)
def basicName(self):
return self.__basicName
def defenseAttack(self, enemy):
self.doDamage(-40)
def defenseName(self):
return self.__defenseName
def specialAttack(self, enemy):
self.resetHealth
def specialName(self):
return self.__specialName
def getHealth(self):
return self.__health
def doDamage(self, damage):
self.__health -= damage
def resetHealth(self):
self.__health = 200
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,995
|
n0rrt/cs-172
|
refs/heads/master
|
/room.py
|
class Room:
#Constructor sets the description
#All four doors should be set to None to start
def __init__(self,descr):
#Description of the room to print out
#These should be unique so the player knows where they are
self.__descr = descr
#These either tell us what room we get to if we go through the door
#or they are None if the "door" can't be taken.
self.__north = None
self.__south = None
self.__east = None
self.__west = None
#Access
#Return the correct values
def __str__(self):
return str(self.__descr)
def getNorth(self):
return self.__north
def getSouth(self):
return self.__south
def getEast(self):
return self.__east
def getWest(self):
return self.__west
#Mutators
#Update the values
def setDescription(self,d):
self.__descr = d
def setNorth(self,n):
self.__north = n
def setSouth(self,s):
self.__south = s
def setEast(self,e):
self.__east = e
def setWest(self,w):
self.__west = w
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,996
|
n0rrt/cs-172
|
refs/heads/master
|
/lab4.py
|
#Mark Boady and Matthew Burlick
#Drexel University 2018
#CS 172
from lab4_monsters import *
import random
#This function has two monsters fight and returns the winner
def monster_battle(m1, m2):
#first reset everyone's health!
#####TODO######
m1.resetHealth()
m2.resetHealth()
#next print out who is battling
print("Starting Battle Between")
print(m1.getName()+": "+m1.getDescription())
print(m2.getName()+": "+m2.getDescription())
#Whose turn is it?
attacker = None
defender = None
draw = random.random()
if (draw >= 0.5):
attacker = m1
defender = m2
else:
attacker = m2
defender = m1
#Select Randomly whether m1 or m2 is the initial attacker
#to other is the initial definder
######TODO######
print(attacker.getName()+" goes first.")
#Loop until either 1 is unconscious or timeout
while( m1.getHealth() > 0 and m2.getHealth() > 0):
#Determine what move the monster makes
#Probabilities:
# 60% chance of standard attack
# 20% chance of defense move
# 20% chance of special attack move
#Pick a number between 1 and 100
move = random.randint(1,100)
#It will be nice for output to record the damage done
before_health=defender.getHealth()
#for each of these options, apply the appropriate attack and
#print out who did what attack on whom
if(move >=1 and move <= 60):
attacker.basicAttack(defender)
print("{} used {} on {}".format(attacker.getName(), attacker.basicName(), defender.getName()))
elif(move>=61 and move <= 80):
#Defend!
attacker.defenseAttack(defender)
print("{} used {} on {}".format(attacker.getName(), attacker.defenseName(), defender.getName()))
else:
#Special Attack!
attacker.specialAttack(defender)
print("{} used {} on {}".format(attacker.getName(), attacker.specialName(), defender.getName()))
#Swap attacker and defender
######TODO######
temp = attacker
attacker = defender
defender = temp
#Print the names and healths after this round
######TODO######
print("{} health: {}\n{} health: {}".format(attacker.getName(), attacker.getHealth(), defender.getName(), defender.getHealth()))
#Return who won
######TODO######
if m1.getHealth() <= 0:
return m2
else:
return m1
#----------------------------------------------------
if __name__=="__main__":
#Every battle should be different, so we need to
#start the random number generator somewhere "random".
#With no input Python will set the seed
random.seed(0)
first = bear()
second = lapras()
winner1 = monster_battle(first,second)
#Print out who won
####TODO####
print("{} wins".format(winner1))
third = pile()
fourth = tree()
winner2 = monster_battle(third, fourth)
print("{} wins".format(winner2))
winner3 = monster_battle(winner1, winner2)
print("{} wins".format(winner3))
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,997
|
n0rrt/cs-172
|
refs/heads/master
|
/lab5.py
|
import time
import random
from Drawable import *
surface = pygame.display.set_mode((400,300))
drawables = []
GREEN = (0, 153, 51)
BLUE = (0, 0, 255)
WHITE = (255, 255, 255)
ground = Rectangle(0, 200, 400, 100, GREEN)
sky = Rectangle(0, 0, 400, 200, BLUE)
drawables.append(ground)
drawables.append(sky)
clock = pygame.time.Clock()
display_rect = False
while True:
for event in pygame.event.get():
if (event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.__dict__['key'] == pygame.K_q)):
pygame.quit()
exit()
if event.type == pygame.KEYDOWN and event.__dict__['key'] == pygame.K_SPACE:
display_rect = not display_rect
if display_rect:
if (random.randrange(0, 10) in range(0, 3)):
newSnow = Snowflake(random.randrange(0, 400), 0, WHITE)
drawables.append(newSnow)
for drawable in drawables:
if (isinstance(drawable, Snowflake)):
drawable.setLoc([drawable.getX(), drawable.getY() + 1])
drawable.draw(surface)
pygame.display.update()
clock.tick(30)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,998
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework4/Employee.py
|
#Tim Harris tlh339 - Employee class
class Employee:
def __init__(self, idNum, rate, hours=0, wages=0):
self.__idNum = idNum
self.__hours = hours
self.__rate = rate
self.__wages = wages
#setters
def setId(self, newId):
self.__idNum = newId
def setHours(self, newHours):
self.__hours = newHours
def setRate(self, newRate):
self.__rate = newRate
def setWage(self, newWage):
self.__wages = newWage
#getters
def getId(self):
return self.__idNum
def getHours(self):
return self.__hours
def getRate(self):
return self.__rate
def getWage(self):
return self.__wages
def __str__(self):
return str(self.__idNum) + " " + str(self.__hours) + " " + str(self.__rate) + " " + str(self.__wages)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
6,999
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework4/Node.py
|
#Tim Harris tlh339 - Node class
class Node:
#from class notes
def __init__(self, data, next = None):
self.__data = data
self.__next = next
def getData(self):
return self.__data
def getNext(self):
return self.__next
def setData(self, newData):
self.__data = newData
def setNext(self, newNext):
self.__next = newNext
def __str__(self):
return str(self.__data, self.__next)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
7,000
|
n0rrt/cs-172
|
refs/heads/master
|
/lab2.py
|
import datetime
class item():
def __init__(self, _receipt__name, _receipt__price, _receipt__taxable):
self._receipt__name = _receipt__name
self._receipt__price = _receipt__price
self._receipt__taxable = _receipt__taxable
def __str__(self):
return str(self._receipt__name)
def getPrice(self):
return self.__price
def getTax(self, __tax_rate):
return self.__price + (self.__price * receipt.__tax_rate)
class receipt():
def __init__(self, __tax_rate = 0.07, __purchases = []):
self.__tax_rate = __tax_rate
self.__purchases = __purchases
def __str__(self):
return str(self.__purchases)
def addItem(self, item):
self.__purchases.append(item)
def getItem(self, index):
return self.__purchases[int(index)]
def createReceipt(self, subtotal = 0, taxTotal = 0, total = 0):
print(self.getItem(0))
print(("-----Receipt " + str(datetime.datetime.now())+"-----"))
print()
for x in range(len(self.__purchases)):
print("{:_<20}{:_>20.2f}".format(self.getItem(x).__name, self.getItem(x).__price))
subtotal += self.getItem(x).__price
if (self.getItem(x).__taxable):
taxTotal += self.getItem(x).__price * self.__tax_rate
total = subtotal + taxTotal
print()
print("{:_<20}{:_>20.2f}".format("Sub Total", subtotal))
print("{:_<20}{:_>20.2f}".format("Tax", taxTotal))
print("{:_<20}{:_>20.2f}".format("Total", total))
print('Welcome to receipt creator')
newReceipt = receipt()
end = True
while (end):
try:
itemName = input('Enter Item Name: ')
itemPrice = float(input('Enter Item Price: '))
itemTaxable = input('Is the item taxable (yes/no): ')
stop = input('Add another item (yes/no): ')
except:
print('Enter a valid item')
continue
if (itemTaxable == 'yes'):
itemTaxable = True
else:
itemTaxable = False
newItem = item(itemName, itemPrice, itemTaxable)
newReceipt.addItem(newItem)
if (stop == 'no'):
end = False
newReceipt.createReceipt()
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
7,001
|
n0rrt/cs-172
|
refs/heads/master
|
/Homework3/homework3.py
|
from Drawable import *
from Ball import *
from Block import *
from Text import *
import pygame
import random
pygame.init()
surface = pygame.display.set_mode((400, 300))
colors = {"blue": (51, 119, 255), "red": (255, 42, 0), "green": (34, 204, 0), "white" : (255, 255, 255), "black" : (0, 0, 0)}
drawables = []
blocks = []
rects = []
ball = Ball((30,200), True, 15, colors["blue"])
initBlock = Block((300, 200), True, 15, colors["red"])
text = Text((0, 0), True, colors["green"])
yv = 0
xv = 0
drawables.append(ball)
drawables.append(initBlock)
drawables.append(text)
rects.append(pygame.Rect(0,0,0,0))
while len(blocks) < 6:
newblock = Block((random.randrange(200, 300), 200), True, 15, colors["red"])
newrect = newblock.get_rect()
if newrect.collidelist(rects) == -1:
blocks.append(newblock)
rects.append(newrect)
def intersect(rect1, rect2):
if (rect1.x < rect2.x + rect2.width) and(rect1.x + rect1.width > rect2.x) and (rect1.y < rect2.y + rect2.height) and(rect1.height + rect1.y > rect2.y):
return True
return False
clock = pygame.time.Clock()
score = 0
dt = 0.1
g = 6.67
R = 0.7
eta = 0.5
y = ball.getPos()[1]
x = ball.getPos()[0]
while True:
surface.fill(colors["white"])
pygame.draw.line(surface, colors["black"], (400, 215), (0, 215))
text.draw(surface, "score: " + str(score))
ball.draw(surface)
for event in pygame.event.get():
if (event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.__dict__['key'] == pygame.K_q)):
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
initX = pygame.mouse.get_pos()[0]
initY = pygame.mouse.get_pos()[1]
if event.type == pygame.MOUSEBUTTONUP:
finX = pygame.mouse.get_pos()[0]
finY = pygame.mouse.get_pos()[1]
xv = finX - initX
yv = -1 * (finY - initY)
if abs(yv) > 0.0001:
x += (dt * xv)
y -= (dt * yv)
ball.setPos(int(x), int(y))
if y > 200:
yv = -1 * R * yv
xv = eta * xv
if x > 300:
xv = -1 * R * xv
else:
yv = yv - (g * dt)
ball.draw(surface)
#pygame.draw.rect(surface, colors["black"], ball.get_rect())
for block in blocks:
if intersect(ball.get_rect(), block.get_rect()):
block.setVis(False)
score += 1
block.draw(surface)
# pygame.draw.rect(surface, colors["green"], rects[z])
pygame.display.update()
clock.tick(30)
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
7,002
|
n0rrt/cs-172
|
refs/heads/master
|
/lab6.py
|
#Interface Class for a Stack
#Only allows access to the
#Stack commands of the built in list
class Stack:
#Create a New Empty Stack
def __init__(self):
self.__S = []
#Display the Stack
def __str__(self):
return str(self.__S)
#Add a new element to top of stack
def push(self, x):
self.__S.append(x)
#Remove the top element from stack
def pop(self):
return self.__S.pop()
#See what element is on top of stack
#Leaves stack unchanged
def top(self):
return self.__S[-1]
#postfix
def postfix(exp):
stack = Stack()
operators = ['+', '-', '*', '/']
for c in exp.split(' '):
try:
stack.push(float(c))
except:
pass
if c in operators:
input1 = float(stack.pop())
input2 = float(stack.pop())
if c == '+':
output = float(input1 + input2)
elif c == '-':
output = float(input1 - input2)
elif c == '*':
output = float(input1 * input2)
elif c == '/':
output = float(input2 / input1)
stack.push(output)
return output
#Main program
if __name__ == "__main__":
print("Welcome to Postfix Calculator")
print("Enter exit to quit.")
userInput = input("Enter expression:\n")
while(userInput.lower() != 'exit'):
print(postfix(userInput))
userInput = input("Enter expression:\n")
|
{"/lab7.py": ["/room.py", "/maze.py"], "/homework2.py": ["/media.py"], "/homework1_main.py": ["/homework1.py"], "/lab8.py": ["/BST.py"], "/maze.py": ["/room.py"], "/lab4_monsters.py": ["/monster.py"], "/lab4.py": ["/lab4_monsters.py"]}
|
7,015
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/lib/data_package_importer.py
|
from urllib.parse import urlparse
import urllib.request
import re, shutil, shortuuid, zipfile, json
from werkzeug.datastructures import FileStorage
from ckan.common import config
import ckan.logic as logic
from ckanext.ecosis.controller.resource import _process as resource_process
from os import path, makedirs, walk
from .aws import s3
'''
package.json file is required. Should contain:
{
title: "",
resources : {
"[filename]" : {
"layout" : "column|row",
"metadata" : Boolean,
"separator" : "empty (comma),tab or character"
}
}
}
'''
class DataPackageImporter():
"""
Import a dataset from a S3 bucket or HTTP endpoint
"""
root_dir = ''
uri = ''
uri_parts = None
id = ''
zipfile = ''
zipfile_name = 'package.zip'
package_dir = 'package'
package = {}
def __init__(self, uri):
self.uri = uri
self.id = shortuuid.ShortUUID().random(length=8)
# todo, make this better
self.root_dir = path.join(config.get('ecosis.package_import.root'), self.id)
self.package_dir = path.join(self.root_dir, self.package_dir)
def run(self, context):
self.context = context
try:
self.download()
self.unzip()
self.validate()
self.create()
except Exception as e:
self.cleanup()
raise e
self.cleanup()
return self.newPkg
def download(self):
"""
Download the uri, currently supports S3 or HTTP uri
"""
makedirs(self.root_dir)
self.uri_parts = urlparse(self.uri)
self.zipfile = path.join(self.root_dir, self.zipfile_name)
if self.uri_parts.scheme == 's3':
self.downloadS3()
elif re.match(r'^http(s)?', self.uri_parts.scheme ):
self.downloadHttp()
def downloadS3(self):
"""
Download file from S3 bucket
"""
obj_key = re.sub(r'^/', '', self.uri_parts.path)
print("Attempting S3 download: %s %s %s" % (self.uri_parts.netloc, obj_key, self.zipfile))
s3.download_file(self.uri_parts.netloc, obj_key, self.zipfile)
def downloadHttp(self):
"""
Download file from http endpoint
"""
urllib.request.urlretrieve(self.uri, self.zipfile)
def unzip(self):
"""
unzip package contents
"""
makedirs(self.package_dir)
with zipfile.ZipFile(self.zipfile, 'r') as zip_ref:
zip_ref.extractall(self.package_dir)
# check one level deep for package.json folder
if not self.package_file_exits('package.json', throw_error=False):
for root, dirs, files in walk(self.package_dir):
for dir in dirs:
new_pkg_dir = path.join(self.package_dir, dir)
if path.exists(path.join(new_pkg_dir, 'package.json')):
self.package_dir = new_pkg_dir
break
break
def validate(self):
"""
Validate a package
"""
self.package_file_exits('package.json')
f = open(path.join(self.package_dir, 'package.json'))
self.package = json.load(f)
if self.package.get("resources") is not None:
self.resources = self.package.get("resources")
del self.package["resources"]
if self.package.get('title') is None:
raise Exception('No package title provided')
# set name based on title
self.package['name'] = self.package.get('title').strip().lower().replace(' ', '-')
if self.package.get('owner_org') is None:
raise Exception('No package owner_org provided')
org = logic.get_action('organization_show')(self.context, {'id': self.package.get('owner_org')})
if org is None:
raise Exception('No organization provided')
# set a default license id if none provided
if not self.package.get('license_id'):
self.package['license_id'] = 'cc-by'
# if extras provided as dict, convert to array
if self.package.get('extras') is not None:
extras = self.package.get('extras')
if type(extras) is dict:
arr = []
for key, value in extras.items():
arr.append({
'key' : key,
'value' : value
})
self.package['extras'] = arr
if self.package.get('tags') is not None:
tags = self.package.get('tags')
for i in range(len(tags)):
if type(tags[i]) is str:
tags[i] = {'display_name': tags[i], 'name': tags[i]}
def create(self):
"""
Create package
"""
# For debugging
# try:
# logic.get_action('package_delete')(self.context, {'id': self.package.get('name')})
# except Exception as err:
# print(err)
# pass
# TODO: does this raise error on badness?
self.newPkg = logic.get_action('package_create')(self.context, self.package)
# TODO: list files
# TODO: add all non-package.json files as resources
# TODO: set all self.resources config using EcoSIS API
# Reminder: any file not defined in self.resource should have `ignore` flag set
if self.resources is not None:
for filename, properties in self.resources.items():
fp = open(path.join(self.package_dir, filename), mode='rb')
upload = FileStorage(fp)
resource = {
'package_id' : self.newPkg.get('id'),
'name' : filename,
'mimetype' : properties.get('mimetype'),
'upload': upload
}
resource_create = logic.get_action('resource_create')
resource = resource_create(self.context, resource)
resource_process(resource['package_id'], None, resource['id'], None, properties)
def package_file_exits(self, file, throw_error=True):
if not path.exists(path.join(self.package_dir, file)):
if throw_error:
raise Exception('Missing package file: %s' % file)
else:
return False
return True
def cleanup(self, remove_pkg=False):
"""
Remove all files and package is flag is set and package has been created
"""
if remove_pkg == True and self.package.get('id') is not None:
logic.get_action('package_delete')(self.context, {'id': self.package.get('id')})
shutil.rmtree(self.root_dir)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,016
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/controlled.py
|
import re
from . import utils
schema = None
# inject global dependencies
def init(schemaDef):
global schema
schema = schemaDef
def getEcoSISName(key):
flat = utils.flatten(key)
if schema.get(flat) != None:
return schema.get(flat).get('name')
# now try an clean the key
return re.sub(r'(\.|\$)', '', key)
# enforce controlled vocab on spectra attributes
def enforce(spectra):
set = {}
remove = []
# loop through all attributes in spactra
for key, value in spectra.items():
# TODO: why?
value = spectra[key]
# ignore datapoints attribute or badness
if key == 'datapoints' or value == '' or value == None:
continue
# create flat (lower case, no spaces) key, see if it's part of the EcoSIS schema
flat = utils.flatten(key)
if schema.get(flat) == None:
continue
item = schema.get(flat)
# see if the schema is of type controlled and has a associated vocabulary
if item['input'] != 'controlled' or item.get('vocabulary') == 'None':
continue
lower = value.lower().strip()
found = False
# check if the provided value is allowed
for name in item.get('vocabulary'):
if lower == name.lower():
spectra[key] = name
found = True
break
# if not allowed
if not found:
# if 'other' is allowed, append to other(s) array
if item.get('allowOther') == True:
other = "%s Other" % item.get("name")
set[other] = value
spectra[key] = 'Other'
# otherwise remove
else:
remove.append(key)
# the loop above is keeping track of additional items to add and remove, do that here
for key, value in set.items():
spectra[key] = value
for key in remove:
del spectra[key]
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,017
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/lib/auth.py
|
from ckan.logic import check_access
from ckan.common import c
import ckan.model as model
'''
Helper methods for verifying user login state and access
Since we override a lot of endpoints as well as add our own in
EcoSIS, these are used a lot.
'''
# Does the requesting user have access to the package
def hasAccess(package_id):
context = {'model': model, 'session': model.Session,
'api_version': 3, 'for_edit': True,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {
"id" : package_id
}
check_access('package_update', context, data_dict)
# Does the requesting user have access to the organization
def hasOrgAccess(package_id):
context = {'model': model, 'session': model.Session,
'api_version': 3, 'for_edit': True,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {
"id" : package_id
}
check_access('organization_update', context, data_dict)
# is the user a site admin
def isAdmin():
if c.userobj == None:
return False
if not c.userobj.sysadmin:
return False
return True
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,018
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/utils.py
|
import re
# a 'flat' name is when the name is in lower case with no spaces
def flatten(name):
return re.sub(r'\s', '', name).lower()
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,019
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/__init__.py
|
from . import controlled, usda, top, gcmd
# inject global dependencies
def init(schema, collections):
controlled.init(schema)
usda.init(collections)
top.init(collections)
gcmd.init(collections)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,020
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/push/__init__.py
|
from multiprocessing import Process, Queue
from ckan.lib.email_notifications import send_notification
from ckan.common import config
import ckan.logic as logic
from ckan.lib.base import c, model
import traceback
from ckanext.ecosis.datastore import mapreduce
from ckanext.ecosis.datastore import query
from ckanext.ecosis.datastore import delete as deleteUtils
from ckanext.ecosis.datastore.mapreduce.lookup import update as updateLookup
from ckanext.ecosis.lib.utils import getPackageExtra, setPackageExtra
spectraCollection = None
'''
This module handles 'pushing' or 'publishing' a dataset from the dataset administration (CKAN)
to search (ecosis.org)
'''
# inject global dependencies
def init(collections):
global spectraCollection
spectraCollection = collections.get('search_spectra')
# this will run the main worker on a separate thread so we can send a HTTP response, then email
# user when push is complete
class Push:
def run(self, ckanPackage, emailOnComplete=False, emailAddress="", username=""):
# first clean out data
deleteUtils.cleanFromSearch(ckanPackage.get('id'))
# we don't want to push private datasets to public search
if ckanPackage.get('private') == True:
raise Exception('This dataset is private')
# set the citation field
setCitation(ckanPackage)
context = {'model': model, 'user': c.user}
ckanPackage = logic.get_action('package_update')(context, ckanPackage)
# start our new thread
q = Queue()
p = Process(target=sub_run, args=(q, ckanPackage, emailOnComplete, emailAddress, username))
p.start()
return {'success': True, 'emailing': emailOnComplete, 'email': emailAddress}
# run the new push
def sub_run(q, ckanPackage, emailOnComplete, emailAddress, username):
try:
# calculate bounding box from spectra (lat/lng was provided)
total = query.total(ckanPackage.get('id')).get('total')
bbox = {
"maxlat" : -9999,
"minlat" : 9999,
"maxlng" : -9999,
"minlng" : 9999,
"use" : False
}
# grab each spectra and insert into public EcoSIS search
for i in range(0, total):
spectra = query.get(ckanPackage.get('id'), index=i, must_be_valid=True, clean_wavelengths=False)
if not 'datapoints' in spectra:
continue
if len(spectra['datapoints']) == 0:
continue
# TODO: make sure species attributes are lower case
# update search
spectraCollection.insert(spectra)
# update the bounding box if the spectra has a lat/lng
updateBbox(spectra, bbox)
# see if we found a bounding box from the spectra
if bbox["maxlat"] != -9999 and bbox["maxlng"] != -9999 and bbox["minlng"] != 9999 and bbox["minlat"] != -9999:
bbox["use"] = True
# max sure all of the spectra points were not in the same position
# this cause the geojson index mongo to break
if bbox["maxlat"] == bbox["minlat"]:
bbox["maxlat"] += 0.00001
if bbox["maxlng"] == bbox["minlng"]:
bbox["maxlng"] += 0.00001
# mapreduce the dataset package data
mapreduce.mapreducePackage(ckanPackage, bbox)
# alert (email user) or quit
if not emailOnComplete:
updateLookup()
return
try:
send_notification(
{
"email" : emailAddress,
"display_name" : username
},
{
"subject" : "EcoSIS Push Successful",
"body" : ("Your dataset '%s' has been pushed to EcoSIS Search. "
"You can view your dataset here: %s#result/%s"
"\n\n-The EcoSIS Team") %
(ckanPackage.get('title'), config.get('ecosis.search_url'), ckanPackage.get("id"))
}
)
except Exception as e:
print("Failed to send email: %s" % emailAddress)
updateLookup()
except Exception as e:
try:
print('ERROR pushing to search: %s' % ckanPackage.get('id'))
# if badness, remove from search
deleteUtils.cleanFromSearch(ckanPackage.get('id'))
print(e)
traceback.print_exc()
if not emailOnComplete:
return
send_notification(
{
"email" : emailAddress,
"display_name" : username
},
{
"subject" : "EcoSIS Push Failed",
"body" : ("Your recent push to search for '%s' has failed. "
"You can try again or contact help@ecosis.org. "
"We apologize for the inconvenience\n\n-The EcoSIS Team") % (ckanPackage["title"])
}
)
except Exception as e:
print(e)
traceback.print_exc()
# update bounding box built from spectra given either a lat/lng coordinate or geojson
def updateBbox(spectra, bbox):
if 'ecosis' not in spectra:
return
if 'geojson' not in spectra['ecosis']:
return
geojson = spectra['ecosis']['geojson']
if geojson.get('type') != 'Point':
return
if 'coordinates' not in geojson:
return
if len(geojson['coordinates']) < 2:
return
if bbox['maxlat'] < geojson['coordinates'][1]:
bbox['maxlat'] = geojson['coordinates'][1]
if bbox['minlat'] > geojson['coordinates'][1]:
bbox['minlat'] = geojson['coordinates'][1]
if bbox['maxlng'] < geojson['coordinates'][0]:
bbox['maxlng'] = geojson['coordinates'][0]
if bbox['minlng'] > geojson['coordinates'][0]:
bbox['minlng'] = geojson['coordinates'][0]
# TODO: this needs to stay in sync with the Importer UI :/
# Auto build the citiation field when data is pushed
def setCitation(pkg):
citation = []
title = pkg.get('title')
authors = pkg.get('author')
year = getPackageExtra('Year', pkg)
doi = getPackageExtra('EcoSIS DOI', pkg)
if doi is None or doi == '':
doi = getPackageExtra('Citation DOI', pkg)
if authors is not None:
authors = authors.encode('ascii', 'ignore').decode("utf-8").split(',')
# authors = map(unicode.strip, authors)
if len(authors) == 1:
citation.append(authors[0])
elif len(authors) == 2:
citation.append(' and '.join(authors))
elif len(authors) > 2:
last = authors.pop()
citation.append('%s and %s' % (', '.join(authors), last))
if year is not None:
citation.append(year)
if title is not None:
citation.append(title)
citation.append('Data set. Available on-line [http://ecosis.org] from the Ecological Spectral Information System (EcoSIS)')
if doi is not None:
citation.append(doi)
citation = '. '.join(citation)
setPackageExtra('Citation', citation, pkg)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,021
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/lib/utils.py
|
import urllib, json, datetime, re
import ckan.lib.helpers as h
from flask import make_response
import traceback, sys
# replicating default param parsing in ckan... really python... really...
# TODO: see if this is really needed
def get_request_data(request):
try:
keys = request.POST.keys()
# Parsing breaks if there is a = in the value, so for now
# we will check if the data is actually all in a single key
if keys and request.POST[keys[0]] in [u'1', u'']:
request_data = keys[0]
else:
request_data = urllib.unquote(request.body)
except Exception as inst:
msg = "Could not find the POST data: %r : %s" % \
(request.POST, inst)
raise ValueError(msg)
try:
request_data = h.json.loads(request_data, encoding='utf8')
except ValueError as e:
raise ValueError('Error decoding JSON data. '
'Error: %r '
'JSON data extracted from the request: %r' %
(e, request_data))
return request_data
# We don't want to send HTML templated errors when things go wrong (CKAN default).
# Instead we catch errors and respond with this.
def handleError(e):
headers = {"Content-Type": "application/json"}
stack = ""
try:
stack = traceback.format_exc()
except e:
pass
if hasattr(e, 'message'):
if e.message is not None:
return make_response((json.dumps({
"error": True,
"message": "%s:%s" % (type(e).__name__, e.message),
"stack" : stack
}), 500, headers))
if hasattr(e, 'error_summary'):
if e.error_summary is not None:
return make_response((json.dumps({
"error": True,
"message": "%s:%s" % (type(e).__name__, e.error_summary),
"stack" : "stack"
}), 500, headers))
return make_response(json.dumps({
"error": True,
"message": "%s:%s" % (type(e).__name__, str(e)),
"stack" : stack
}), 500, headers)
# helper for sending json, mostly adds ability to encode dates in ISO format.
def jsonStringify(obj, formatted=False):
if not formatted:
return json.dumps(obj, default=jsondefault)
return json.dumps(obj, default=jsondefault, indent=4, separators=(',', ': '))
def jsondefault(obj):
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
return obj.isoformat()
else:
return None
def setPackageExtra(attr, value, pkg):
extra = pkg.get('extras')
if extra == None:
pkg['extras'] = []
extra = pkg['extras']
for item in extra:
if item.get('key') == attr:
item['value'] = value;
return
extra.append({
'key' : attr,
'value' : value
})
def getPackageExtra(attr, pkg):
extra = pkg.get('extras')
if extra == None:
return None
for item in extra:
if item.get('key') == attr:
return item.get('value')
return None
# a 'flat' name is when the name is in lower case with no spaces
def flatten(name):
return re.sub(r'\s', '', name).lower()
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,022
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/ckan/__init__.py
|
from . import package as ckanPackageQuery
from . import resource as ckanResourceQuery
'''
This module clones so CKAN functionality, allow access of CKAN-like objects
w/o the requirement of authenticated HTTP requests. This helps with seperation of concerns
'''
# inject global dependencies
def init(pgConn, schema):
ckanPackageQuery.init(pgConn, schema)
ckanResourceQuery.init(pgConn)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,023
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/ckan/utils.py
|
# mostly for debug
# was having issue with dropped db connection
def verifyConnection(conn):
print("Connect status:%s, closed:%s" % (conn.status, conn.closed))
if conn.closed != 0:
print("Postgres connection is CLOSED, reseting...")
conn.reset()
return
print("Postgres connection is OPEN")
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,024
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/admin/__init__.py
|
from ckan.lib.base import c, model
import ckan.logic as logic
import ckan.lib.uploader as uploader
import json, subprocess, os, urllib, re
from ckanext.ecosis.datastore import delete as deleteUtil
from ckanext.ecosis.datastore.mapreduce import mapreducePackage
from ckanext.ecosis.lib.utils import jsonStringify
from ckanext.ecosis.datastore.mongo import get_package_spectra_collection
from .upgrade import run as runUpgrade
from .upgrade import fixUnits as runFixUnits
from .upgrade import fixCitationText as runFixCitationText
# rebuild entire search index
def rebuildIndex(collections):
context = {'model': model, 'user': c.user}
if not isAdmin():
raise Exception('Nope.')
list = logic.get_action('package_list')(context,{})
# clear the current collection
collections.get("search_package").remove({})
for pkgId in list:
context = {'model': model, 'user': c.user}
ckanPackage = logic.get_action('package_show')(context,{id: pkgId})
mapreducePackage(ckanPackage, collections.get("search_spectra"), collections.get("package_search"))
return json.dumps({'success': True, 'rebuildCount': len(list)})
# Remove all testing data flagged with _testing_
def cleanTests():
context = {'model': model, 'user': c.user}
path = os.path.dirname(__file__)
if not isAdmin():
raise Exception('Nope.')
result = logic.get_action('package_search')(context, {'q' : '_testing_:true'})
packages = []
msgs = []
for package in result.get('results'):
packages.append({
'id': package.get('id'),
'name' : package.get('name')
})
logic.get_action('package_delete')(context, {'id' : package.get('id')})
deleteUtil.package(package.get('id'))
# from ckan's admin.py, run a 'purge' on the dataset
pkgs = model.Session.query(model.Package).filter_by(id=package.get('id'))
for pkg in pkgs:
revisions = [x[0] for x in pkg.all_related_revisions]
revs_to_purge = []
revs_to_purge += [r.id for r in revisions]
model.Session.remove()
for id in revs_to_purge:
revision = model.Session.query(model.Revision).get(id)
try:
model.repo.purge_revision(revision, leave_record=False)
except Exception as inst:
msgs.append('Problem purging revision %s: %s' % (id, inst))
return json.dumps({
"packages" : packages,
"messages" : msgs,
"success" : True
})
# dump everything (data)!
# this will not work on the master branch
def clean(collections):
context = {'model': model, 'user': c.user}
path = os.path.dirname(__file__)
if not isAdmin():
raise Exception('Nope.')
cmd = "git branch"
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, cwd=path)
branches = process.communicate()[0].decode("utf-8").split("\n")
for branch in branches:
if "*" in branch:
branch = branch.replace("* ","")
if branch == 'master':
return json.dumps({'error':True, 'message':'operation can\'t be preformed on branch master'})
packages = logic.get_action('package_list')(context, {})
for package in packages:
package = logic.get_action('package_show')(context, {'id': package})
# make sure all resources are removed from disk
if 'resources' in package:
for r in package['resources']:
if r.get('url_type') == "upload":
upload = uploader.ResourceUpload(r)
path = upload.get_path(r['id'])
if os.path.exists(path):
os.remove(path)
logic.get_action('package_delete')(context, {'id': package['id']})
get_package_spectra_collection(package['id']).remove({})
# clear mongo
collections.get('resource').remove({})
collections.get('package').remove({})
collections.get('search_package').remove({})
collections.get('search_spectra').remove({})
collections.get('lookup').remove({})
return json.dumps({
'removed': packages,
'message' : 'Go to /ckan-admin/trash to finish cleanup'
})
# rebuild the USDA MongoDB collection
def rebuildUSDACollection(collections, usdaApiUrl):
if not isAdmin():
raise Exception('Nope.')
usdaCollection = collections.get('usda')
usdaCollection.remove({})
resp = urllib2.urlopen(usdaApiUrl)
rows = re.sub(r'\r', '', resp.read()).split('\n')
header = re.sub(r'"', '', rows[0]).split(',')
for i in range(1, len(rows)-1):
row = re.sub(r'"', '', rows[i]).split(',')
item = {}
for j in range(0, len(header)-1):
item[header[j]] = row[j]
usdaCollection.insert(item)
return json.dumps({'success':True, 'count': len(rows)-2})
# check workspace collections for badness
def verifyWorkspace(collections):
if not isAdmin():
raise Exception('Nope.')
packages = collections.get('package').find({},{"packageId":1,"prepared":1,"lastTouched":1})
packageInfo = {}
ids = []
repeatPackages = []
pCount = 0
for package in packages:
packageInfo[package.get("packageId")] = {
"prepared" : package.get("prepared"),
"lastTouched" : package.get("lastTouched"),
"workspaceSpectra" : get_package_spectra_collection(package.get("packageId")).count()
}
if package.get("packageId") in ids:
repeatPackages.append(package.get("packageId"))
else:
pCount += 1
ids.append(package.get("packageId"))
resources = collections.get('resource').find({},{"resourceId":1,"sheetId": 1})
ids = []
repeatResources = []
rCount = 0
for resource in resources:
id = "%s %s" % (resource.get("resourceId"), resource.get("sheetId"))
if id in ids:
repeatResources.append(id)
else:
rCount += 1
ids.append(id)
return jsonStringify({
"packageCount" : pCount,
"resourceCount" : rCount,
"spectraCount" : get_package_spectra_collection(package.get("packageId")).count({"type": "data"}),
"metadataCount" : get_package_spectra_collection(package.get("packageId")).count({"type": "metadata"}),
"repeats" : {
"resources" : repeatResources,
"packages" : repeatPackages
},
"packageInfo" : packageInfo
})
def isAdmin():
if c.userobj == None:
return False
if not c.userobj.sysadmin:
return False
return True
def upgrade():
if not isAdmin():
raise Exception('Nope.')
return jsonStringify(runUpgrade())
def fixUnits():
if not isAdmin():
raise Exception('Nope.')
return jsonStringify(runFixUnits())
def fixCitationText():
if not isAdmin():
raise Exception('Nope.')
return jsonStringify(runFixCitationText())
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,025
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/user/__init__.py
|
# from ckan.common import response, request
from ckan.common import request
from ckan.lib.base import c, model
import ckan.logic as logic
import json, jwt, re
from ckan.common import config
import ckan.lib.authenticator as authenticator
import ckanext.ecosis.user_data.model as githubInfoModel
import ckanext.ecosis.lib.utils as utils
from ckanext.ecosis.lib.auth import isAdmin
secret = config.get('ecosis.jwt.secret')
# get information about logged in user, including if they are logged in
def info():
if len(c.user) == 0:
return {"loggedIn": False}
context = {'model': model, 'user': c.user}
# see line 604 of ckan/logic/action/get about params for this method
orgs = logic.get_action('organization_list_for_user')(context,{"permission": "create_dataset"})
user = {
"loggedIn": True,
"username": c.user,
"organizations" : orgs
}
githubInfo = githubInfoModel.get(c.user)
if githubInfo is not None:
user['githubUsername'] = githubInfo.github_username
# user['githubAccessToken'] = githubInfo.github_access_token
if isAdmin():
user['isAdmin'] = True
return user
def remote_login():
token = request.form.get('token')
token = jwt.decode(token, secret, algorithm='HS256')
username = token.get('username');
password = token.get('password');
if username is None or password is None:
return json.dumps({"loggedIn": False})
identity = {
'login' : username,
'password' : password
}
auth = authenticator.UsernamePasswordAuthenticator()
user = auth.authenticate(request.environ, identity)
if user == None:
return json.dumps({
"loggedIn": False,
"message": "invalid username or password",
})
return create_remote_login_response(user)
def create_remote_login_response(user):
context = {'model': model, 'user': user}
# see line 604 of ckan/logic/action/get about params for this method
# orgs = logic.get_action('organization_list_for_user')(context,{"permission": "create_dataset"})
user = logic.get_action('user_show')(context, {'id':user})
is_admin = user.get('sysadmin')
user = {
"loggedIn" : True,
"username": user['name'],
"fullname": user['fullname'],
"email" : user['email'],
"id" : user['id'],
"state" : user['state'],
"github" : {}
#"organizations": orgs
}
if is_admin:
user['admin'] = True
githubInfo = githubInfoModel.get(user['username'])
if githubInfo is not None:
user['github']['username'] = githubInfo.github_username
user['github']['accessToken'] = githubInfo.github_access_token
if githubInfo.github_data is not None:
user['github']['data'] = json.loads(githubInfo.github_data)
user['token'] = jwt.encode({
'username': user['username'],
'admin' : is_admin
}, secret, algorithm='HS256')
return user
# TODO: implementing JWT support is kinda a can of worms.
# will work as a workaround hack for now...
def set_github_info():
params = request.get_json()
token = request.headers.get('authorization')
if not token:
raise Exception('No jwt token provided')
token = re.sub(r"Bearer ", "", token)
token = jwt.decode(token, secret, algorithm='HS256')
user_id = token.get("username")
if not user_id:
raise Exception('Jwt token did not provide user id')
github_username = params.get('username')
github_access_token = params.get('accessToken')
github_data = params.get('data')
githubInfoModel.update(user_id, github_username, github_access_token, github_data)
return info()
def get_all_github_info():
token = request.headers.get('authorization')
if not token:
raise Exception('No jwt token provided')
token = re.sub(r"Bearer ", "", token)
jwt.decode(token, secret, algorithm='HS256')
githubInfo = githubInfoModel.getAll()
results = []
for user in githubInfo:
results.append(user.as_dict())
return json.dumps(results)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,026
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/user_data/model.py
|
import logging
import datetime
from sqlalchemy import Table, Column, types
from ckan.model.meta import mapper, metadata, Session
from ckan.model.domain_object import DomainObject
from ckan import model
from ckan.model.types import make_uuid
log = logging.getLogger(__name__)
# good references for editing tables
# https://github.com/ckan/ckanext-harvest/blob/master/ckanext/harvest/model/__init__.py
# https://github.com/stadt-karlsruhe/ckanext-extractor/blob/master/ckanext/extractor/model.py
source_table = None
def setup():
if source_table is None:
define_table()
log.debug('User github table defined in memory')
if not model.package_table.exists():
log.debug('User github table creation deferred')
return
if not source_table.exists():
source_table.create()
log.debug('User github table created')
else:
log.debug('Github table already exist')
# Check if existing tables need to be updated
# for migration
def get(user_id):
q = Session.query(UserGithubInfo).\
filter(UserGithubInfo.user_id == user_id)
return q.first()
def getAll():
q = Session.query(UserGithubInfo)
return q.all()
def update(user_id, github_username, github_access_token, github_data):
info = get(user_id)
if info is None:
info = UserGithubInfo()
info.user_id = user_id
info.github_username = github_username
info.github_access_token = github_access_token
info.github_data = github_data # string encoded JSON
info.save()
def define_table():
global source_table
if source_table is None:
source_table = Table('user_github_info', metadata,
Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
Column('created', types.DateTime, default=datetime.datetime.utcnow),
Column('user_id', types.UnicodeText, default=u''),
Column('github_username', types.UnicodeText, default=u''),
Column('github_access_token', types.UnicodeText, default=u''),
Column('github_data', types.UnicodeText, default=u'')
)
mapper(UserGithubInfo, source_table)
class UserGithubInfo(DomainObject):
'''Contains a users github user account for EcoSML
'''
pass
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,027
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/usda.py
|
usdaCollection = None
# inject global dependencies
def init(collections):
global usdaCollection
usdaCollection = collections.get('usda')
# Given a spectra object with attribute 'USDA Symbol', lookup USDA plant information
def setCodes(spectra, info=None):
if not 'USDA Symbol' in spectra:
return
item = usdaCollection.find_one({'Accepted Symbol': spectra['USDA Symbol'].upper()},{'_id':0})
if item != None:
# see if we have common name
if item.get('Common Name') != None and item.get('Common Name') != "":
spectra['Common Name'] = item['Common Name']
# let the UI know where this attribute came from
if info is not None:
info.append({
"type" : "usda lookup",
"key" : "Common Name"
})
# see if we have genus and species
if item.get('Scientific Name') != None and item.get('Scientific Name') != "":
parts = item.get('Scientific Name').split(' ')
spectra['Latin Genus'] = parts.pop(0)
spectra['Latin Species'] = " ".join(parts)
# let the UI know where these attributes cam from
if info is not None:
info.append({
"type" : "usda lookup",
"key" : "Latin Genus"
})
info.append({
"type" : "usda lookup",
"key" : "Latin Species"
})
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,028
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/utils/__init__.py
|
from . import storage
# inject global dependencies
def init(config):
storage.init(config)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,029
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/files/csvReader.py
|
import csv, re
# parse a csv file
def read(file, separator):
data = []
with open(file, 'rU') as csvfile:
# open will csv files
reader = csv.reader(csvfile, delimiter=separator, quotechar='"')
for row in reader:
# stip no unicode characters: http://stackoverflow.com/questions/26541968/delete-every-non-utf-8-symbols-froms-string
# TODO: is there a better way todo this?
for i in range(0, len(row)):
try:
row[i] = unicode(row[i], 'utf-8').encode("utf-8", "ignore")
except Exception as e:
# HACK, remove bad characters
try:
row[i] = re.sub(r'[^\x00-\x7F]+',' ', row[i]).encode("utf-8", "ignore")
except Exception as e:
row[i] = '__invalid_utf-8_characters__'
data.append(row)
csvfile.close()
return data
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,030
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/mongo.py
|
from pymongo import MongoClient
from ckan.common import config
client = MongoClient(config.get("ecosis.mongo.url"))
db = client[config.get("ecosis.mongo.db")]
collections = {
# "spectra" : db[config.get("ecosis.mongo.workspace_spectra_collection", "workspace_spectra")],
"resource" : db[config.get("ecosis.mongo.workspace_resource_collection", "workspace_resources")],
"package" : db[config.get("ecosis.mongo.workspace_package_collection", "workspace_packages")],
"usda" : db[config.get("ecosis.mongo.usda_collection", "usda")],
"top" : db[config.get("ecosis.mongo.top_collection", "top")],
"gcmd" : db[config.get("ecosis.mongo.gcmd_collection", "gcmd")],
"search_package" : db[config.get("ecosis.mongo.search_collection", "search")],
"search_spectra" : db[config.get("ecosis.mongo.spectra_collection", "spectra")],
"lookup" : db["lookup"]
}
def get_package_spectra_collection(pkgid):
return db['workspace_spectra_%s' % pkgid];
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,031
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/files/excel.py
|
import xlrd, os, shutil, datetime, json, csv, re
from ckanext.ecosis.datastore.files import csvReader
# TODO: document the workspace object and what attributes actually mean
workspaceDir = None
# inject global dependencies
def init(workspaceDirectory):
global workspaceDir
workspaceDir = workspaceDirectory
# src:
# https://github.com/python-excel/xlrd
# help:
# http://www.youlikeprogramming.com/2012/03/examples-reading-excel-xls-documents-using-pythons-xlrd/
# https://secure.simplistix.co.uk/svn/xlrd/trunk/xlrd/doc/xlrd.html?p=4966
# Known Issues:
# Looks like some versions of officelibre and badness that xlrd doesn't like...
def process(collection, sheetConfig, hash):
# remove the place holder, the sheets will be the actual 'files'
datasheets = []
# check if a excel file sheets has been cached as csv files
# this is the default way we want to read the excel files, cause parsing excel is slooooow
sheetIds = []
# we are not cached, need to write excel sheets as csv files
if sheetConfig.get('hash') != hash:
sheetIds = cacheWrite(collection, sheetConfig, hash)
# we are cached, read in sheet ids
else:
workspacePath = os.path.join(workspaceDir, sheetConfig.get('packageId'), sheetConfig.get('resourceId'))
fullPath = os.path.join(workspacePath,'sheets.json')
f = open(fullPath, 'r')
sheetIds = json.load(f)
f.close()
for sheetId in sheetIds:
configSheetId = sheetConfig.get('sheetId')
config = None
# we are processing a single sheet
if configSheetId == sheetId:
config = sheetConfig
# we are processing everything
elif configSheetId is None:
config = collection.find_one({
"packageId" : sheetConfig.get('packageId'),
"resourceId" : sheetConfig.get('resourceId'),
"sheetId" : sheetId
})
# no config prepared
if config == None:
config = {
"packageId" : sheetConfig.get('packageId'),
"resourceId" : sheetConfig.get('resourceId'),
"layout" : sheetConfig.get('layout'),
"sheetId": sheetId
}
if configSheetId == sheetId or configSheetId is None:
config['hash'] = hash
# tack on zip stuff
if sheetConfig.get("fromZip") == True:
config["fromZip"] = True
config["name"] = sheetConfig.get("name")
config["file"] = sheetConfig.get("file")
config["zip"] = sheetConfig.get("zip")
# finally let's read the csv file for this sheet
data = cacheRead(config)
# append the sheet data and config to the response
datasheets.append({
"data" : data,
"config" : config
})
return datasheets
# read an individual excel sheet
def getWorksheetData(sheet, workbook):
data = []
# run over rows and columns of sheet
for i in range(sheet.nrows):
row = []
for j in range(sheet.ncols):
val = ""
# let's try and parse out some values
try:
# if of type date, read in as iso formatted string
if sheet.cell_type(i,j) == xlrd.XL_CELL_DATE:
val = sheet.cell_value(i, j)
val = datetime.datetime(*xlrd.xldate_as_tuple(val, workbook.datemode)).isoformat()
# otherwise, just read value as string
else:
val = sheet.cell_value(i, j)
if isinstance(val, unicode):
val = val.encode("utf-8")
else:
val = str(val) # see if this fails. if so, we have badness
# if anything fails, see if it's cause of bad utf-8 characters
except Exception as e:
try:
# try and scrub utf-8 badness
val = re.sub(r'[^\x00-\x7F]+',' ', sheet.cell_value(i, j))
# just give up.
except Exception as e:
val = '__invalid_utf-8_characters__'
row.append(val)
data.append(row)
return data
# read a single sheet
def cacheRead(sheetConfig):
id = sheetConfig.get('sheetId').split('-')[0]
filename = '%s.csv' % id
workspacePath = os.path.join(workspaceDir, sheetConfig.get('packageId'), sheetConfig.get('resourceId'), filename)
return csvReader.read(workspacePath, ",")
# write excel files to disk as csv for faster read time
# excel read is unreal slow in python.
def cacheWrite(collection, sheetConfig, hash):
# we need to update the csv file cache
workspacePath = os.path.join(workspaceDir, sheetConfig.get('packageId'), sheetConfig.get('resourceId'))
# clean out any existing extraction
if os.path.exists(workspacePath):
shutil.rmtree(workspacePath)
# create workspace (cache) path
os.makedirs(workspacePath)
sheetNames = []
try:
# open up the excel file
workbook = xlrd.open_workbook(sheetConfig.get('file'))
# grab sheet information
sheets = workbook.sheet_names()
for i, sheet in enumerate(sheets):
sheetNames.append('%s-%s' % (i, sheet))
# read in the sheet
data = getWorksheetData(workbook.sheet_by_name(sheet), workbook)
fullPath = os.path.join(workspacePath,'%s.csv' % i)
# prepare to write the csv file for this sheet
csvfile = open(fullPath, 'wb')
# actually write csv file to disk
wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
for row in data:
wr.writerow(row)
csvfile.close()
except Exception as e:
print(e)
pass
# make sure we save the hash
excelConfig = collection.find_one({
"packageId" : sheetConfig.get('packageId'),
"resourceId" : sheetConfig.get('resourceId'),
"sheetId" : None
})
# this is the first time we are reading the file
if excelConfig is None:
excelConfig = {
"file" : sheetConfig.get('file'),
"packageId" : sheetConfig.get('packageId'),
"resourceId" : sheetConfig.get('resourceId'),
"sheetId" : None,
"hash" : hash,
"cached" : datetime.datetime.utcnow(),
"excel" : True
}
else:
excelConfig['excel'] = True;
excelConfig['cached'] = datetime.datetime.utcnow();
excelConfig['hash'] = hash
# if we have a 'fake' resource, it's from a zipfile, make sure we save the name
if sheetConfig.get('fromZip') and 'name' in sheetConfig:
excelConfig['name'] = sheetConfig['name']
excelConfig['file'] = sheetConfig.get('file')
# update the workspace collection
collection.update({
"resourceId" : sheetConfig.get('resourceId'),
"packageId" : sheetConfig.get('packageId'),
"sheetId" : None
}, excelConfig, upsert=True)
# save ids
fullPath = os.path.join(workspacePath,'sheets.json')
f = open(fullPath, 'w')
json.dump(sheetNames, f)
f.close()
return sheetNames
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,032
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/workspace/__init__.py
|
from datetime import datetime, timedelta
import re
import os
import shutil
import zipfile
import hashlib
from ckanext.ecosis.datastore.ckan import resource as ckanResourceQuery
from ckanext.ecosis.datastore.ckan import package as ckanPackageQuery
from ckanext.ecosis.datastore.files import process as importer
from ckanext.ecosis.datastore.mongo import get_package_spectra_collection
dataExtension = ["xlsx","xls","spectra","csv","tsv"]
allowedOptions = ['map', 'sort']
packageExpireTime = timedelta(days=1)
workspaceDir = ""
'''
The workspace is the core of the EcoSIS ckan extension. It is composed of both collections in MongoDB
and files on disk. The workspace spectra, resource and dataset collections contain information about
the dataset including last process time and resource (csv, excel, etc) parsing information. The spectra
collection contains a entry for every row/column in every spectra resource file.
Workspaces will remain 'open', ie files on disk and data in the workspace package and spectra collections
for one day. If they are not 'touched' in 24 hours, they will be removed.
The resources workspace collection is persistent. It must always remember the file parsing configuration
incase the user comes back while the other two workspace collections (spectra, package) are volatile.
Finally, the filesystem stores all spectra resources as individual CSV sheets for fast lookups. That means
valid file types are extrated from zip files and excel files are expanded into multiple csv sheets. This
allows efficient responses to file parsing changes by the user (Excel is SLOW, nested inside zip is complicated,
zipped excel is both).
'''
# inject global resources
def init(co, resourceUtils, workDir):
global collections, resourceUtil, workspaceDir
collections = co
resourceUtil = resourceUtils
workspaceDir = workDir
# fix seperator misselling -> separator
# TODO: remove this when patch applied
collections.get("resource").update_many(
{'seperator': {'$exists': True}},
{ '$rename': { 'seperator': 'separator'}}
)
# cleanup unused workspaces
def clean(current_package_id=None):
expired = datetime.utcnow() - packageExpireTime
# find all packages that were last touch before expired date
packages = collections.get("package").find({
'$and' : [
{'$or' :[
{'lastTouched': {'$exists' : False} },
{'lastTouched': {'$lt' : expired } },
]},
{'packageId' : {'$ne' : current_package_id}}
]
})
# remove those packages from workspace
for package in packages:
if package.get("package_id") == current_package_id:
continue
cleanPackage(package.get('packageId'))
# remove a package from the workspace
def cleanPackage(package_id):
# remove all spectra chunks
wkspaceCol = get_package_spectra_collection(package_id)
wkspaceCol.remove({})
wkspaceCol.drop()
# set not prepared
collections.get("package").update({
"packageId": package_id
},{
"$set" : {
"prepared" : False,
"runInfo" : None
}
})
# remove all hash file ids. next time we process, we should assume all is bad
collections.get("resource").update({
"packageId": package_id
},{
"$set" : {
"hash" : None
}
},multi=True)
# clear anything placed on the filesystem workspace
workspacePath = os.path.join(workspaceDir, package_id)
# clean out any existing extraction
if os.path.exists(workspacePath):
shutil.rmtree(workspacePath)
# update the last touched field for a package
def touch(package_id):
collections.get("package").update(
{"packageId":package_id},
{"$set" :{
"lastTouched" : datetime.utcnow()
}}
)
# prepare a package, that means expand it out, make sure the workspace package and
# spectra collections are populated for this package. Make sure all disk resources
# are properly created in workspace folder
def prepare(package_id, force=False):
packageInfo = collections.get("package").find_one({
"packageId" : package_id,
},{"_id" : 0})
# create a workspace package object if required
if packageInfo is None:
packageInfo = {
"packageId" : package_id
}
# quit if there is nothing todo
if not force and packageInfo.get("prepared") == True:
return {
"success" : True,
"message" : "already prepared, use force flag to force prepare"
}
# make sure we are not trying to prepare a package that has been deleted
ckanPackage = ckanPackageQuery.get(package_id)
if ckanPackage.get('state') == 'deleted':
raise Exception('Package has been deleted')
# get all package resources
resources = ckanResourceQuery.active(package_id)
status = []
for resource in resources:
# get path on disk for file as well as file extension
filepath = resourceUtil.get_path(resource.get('id'))
ext = _getFileExtension(resource.get('name'))
# extract zip contents if zip
if ext == "zip":
# TODO: we should be checking a zip hash before we go unzipping every time
results = extractZip(package_id, resource.get('id'), filepath, resource.get('name'))
for result in results:
status.append(result)
# extract 'normal' file (non-zip)
else:
result = importer.processFile(filepath, package_id, resource.get('id'), resource=resource)
status.append(result)
# respond with update of what we did (or did not) do.
packageInfo["runInfo"] = status
packageInfo["lastTouched"] = datetime.utcnow()
packageInfo["prepared"] = True
collections.get("package").update({"packageId":package_id}, packageInfo, upsert=True)
return packageInfo
# helper for single file process, handles zip condition
def prepareFile(package_id, resource_id, sheet_id=None, options={}):
sheetInfo = collections.get("resource").find_one({
"resourceId" : resource_id,
"sheetId" : sheet_id
})
if sheetInfo is None:
sheetInfo = {}
# get the name of the resource
if 'name' in sheetInfo:
resource = sheetInfo
else: # fallback on querying PG for the name
resource = ckanResourceQuery.get(resource_id)
# see if we have the path, otherwise lookup it up
if 'file' in sheetInfo:
filepath = sheetInfo.get('file')
else:
filepath = resourceUtil.get_path(resource_id)
ext = _getFileExtension(resource.get('name'))
# much like in the prepare() method aboves resource loop
if ext == "zip":
extractZip(package_id, resource.get('id'), filepath, resource.get('name'), options=options)
else:
importer.processFile(filepath, package_id, resource_id, sheetId=sheet_id, options=options, resource=resource)
# extract zip file and set resources
def extractZip(package_id, resource_id, zipPath, zipName, options={}):
status = []
# check to see if there are any changes
zipFileInfo = collections.get("resource").find_one({
"packageId" : package_id,
"resourceId" : resource_id
})
if zipFileInfo is None:
zipFileInfo = {}
hash = importer.hashfile(zipPath)
# if hashes are equal, we nothing has changed
if zipFileInfo.get("hash") == hash:
status.append({
"resourceId" : resource_id,
"name" : zipName,
"unzipped" : False,
"message" : "nothing todo, hash is equal"
})
return status
# Send info back about what was processed
zipFileInfo['hash'] = hash
zipFileInfo['resourceId'] = resource_id
zipFileInfo['packageId'] = package_id
zipFileInfo['file'] = zipPath
zipFileInfo['isZip'] = True
# update resource collection
collections.get("resource").update({
"packageId" : package_id,
"resourceId" : resource_id
}, zipFileInfo, upsert=True)
status.append({
"resourceId" : resource_id,
"name" : zipName,
"unzipped" : True
})
# get the workspace path on disk
workspacePath = os.path.join(workspaceDir, package_id, resource_id)
# clean out any existing extraction
if os.path.exists(workspacePath):
shutil.rmtree(workspacePath)
z = zipfile.ZipFile(zipPath, "r")
zipPackageIds = []
for info in z.infolist():
if _isDataFile(info.filename):
# create id for individual file
name = re.sub(r".*/", "", info.filename)
if re.match(r"^\..*", name): # ignore .dot files
continue
id = _getZipResourceId(resource_id, info.filename)
#extract individual file
z.extract(info, workspacePath)
# check for existing config
resource = collections.get("resource").find_one({
"packageId" : package_id,
"resourceId" : id
})
# create new config if one doesn't exist
if resource is None:
resource = {
"packageId" : package_id,
"resourceId" : id,
"name" : name,
"file" : os.path.join(workspacePath, info.filename),
"zip" : {
"name" : zipName,
"resourceId" : resource_id
},
"fromZip" : True
}
collections.get("resource").update({
"packageId" : package_id,
"resourceId" : id
}, resource, upsert=True)
zipPackageIds.append(id)
# now we pass with new resource id, but path to file
result = importer.processFile(resource.get('file'), package_id, id, resource=resource, options=options)
status.append(result)
# TODO: implement .ecosis file
# cleanup
collections.get("resource").remove({
"packageId" : package_id,
"zip.resourceId" : resource_id,
"resourceId" : {
"$nin" : zipPackageIds
}
})
# more cleanup
collections.get("spectra").remove({
"packageId" : package_id,
"zip.resourceId" : resource_id,
"resourceId" : {
"$nin" : zipPackageIds
}
})
return status
def _getZipResourceId(rid, name):
m = hashlib.md5()
m.update("%s%s" % (rid, name))
return m.hexdigest()
def _getFileExtension(filename):
return re.sub(r".*\.", "", filename)
def _isDataFile(filename):
if _getFileExtension(filename) in dataExtension:
return True
return False
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,033
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/utils/storage.py
|
import os
storage_path = None
'''
Helpers for reading storage paths for reading resources from disk
'''
def init(config):
global storage_path
storage_path = None
_storage_path = None
#None means it has not been set. False means not in config.
if _storage_path is None:
storage_path = config.get("app:main", 'ckan.storage_path')
if storage_path:
_storage_path = storage_path
else:
_storage_path = False
if not _storage_path:
storage_path = None
return
storage_path = os.path.join(_storage_path, 'resources')
def get_directory(id):
directory = os.path.join(storage_path,
id[0:3], id[3:6])
return directory
def get_path(resource_id):
directory = get_directory(resource_id)
filepath = os.path.join(directory, resource_id[6:])
return filepath
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,034
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/git/__init__.py
|
import subprocess, json, re, os
path = os.path.dirname(os.path.abspath(__file__))
# grab git info using command line git commands
def info():
resp = {}
cmd = "git describe --tags"
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, cwd=path)
resp["version"] = process.communicate()[0].decode("utf-8")
cmd = "git branch"
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, cwd=path)
resp["branch"] = process.communicate()[0].decode("utf-8").split("\n")
for branch in resp["branch"]:
if "*" in branch:
resp["branch"] = branch.replace("* ","")
break
cmd = "git log -1"
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, cwd=path)
resp["commit"] = re.sub(r'\n.*', '', process.communicate()[0].decode("utf-8")).replace("commit ","")
return resp
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,035
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/query/__init__.py
|
import re, pymongo, json, dateutil
from ckan.common import config
from ckanext.ecosis.datastore.ckan import package as ckanPackageQuery
from ckanext.ecosis.datastore.ckan import resource as ckanResourceQuery
from ckanext.ecosis.datastore.vocab import usda
from ckanext.ecosis.datastore.vocab import controlled as controlledVocab
from ckanext.ecosis.datastore.utils import mongo
from ckanext.ecosis.datastore.mongo import get_package_spectra_collection
from ckanext.ecosis.lib import utils
from . import workspace
'''
Query ecosis workspace collections
'''
collections = None
host = ""
# inject global dependencies
def init(co, hostUrl):
global collections, host
collections = co
host = hostUrl
workspace.init(co, getResource, isPushed)
# get a spectra at a specific index.
def get(packageId="", resourceId=None, sheetId=None, index=0, showProcessInfo=False, must_be_valid=False, clean_wavelengths=True):
# build out query
query = {
"type" : "data",
"packageId" : packageId
}
# you can limit by resource and sheet id if you want
if resourceId is not None:
query["resourceId"] = resourceId
if sheetId is not None:
query["sheetId"] = sheetId
# get spectra at index
main = get_package_spectra_collection(packageId).find_one(query, skip=index, sort=[("index", pymongo.ASCENDING)])
if main == None:
raise Exception('Unabled to get spectra from package_id: %s at index %s' % (packageId, index))
# the collection also contains config information about the spectra, just grab to spectra attribute
spectra = main.get('spectra')
# this also replaces , with .
# also moves measurement waveslength keys to 'datapoints' object
moveWavelengths(spectra, clean_wavelengths)
if must_be_valid:
if 'datapoints' not in spectra:
return {}
if len(spectra['datapoints']) == 0:
return {}
# get information for the sheet this spectra came from
sheetInfo = collections.get('resource').find_one({
"packageId": packageId,
"resourceId": main.get("resourceId"),
"sheetId" : main.get("sheetId")
})
# get package information for the package this spectra came from
package = ckanPackageQuery.get(packageId)
attributeProcessInfo = []
# join together metadata to this spectra
join(packageId, spectra, attributeProcessInfo)
config = collections.get('package').find_one({"packageId": packageId})
if config == None:
config = {}
# set the spectra attribute aliases
mapNames(spectra, config, attributeProcessInfo, package)
# lookup any usda code given
usda.setCodes(spectra, info=attributeProcessInfo)
# strip controlled vocab fields. Remove any values that are not part of the controlled
# vocabulary
controlledVocab.enforce(spectra)
# add 'spectra.ecosis' attribute with package and sheet info
if showProcessInfo:
addEcosisNamespace(spectra, package, main, sheetInfo, processInfo=attributeProcessInfo)
else:
addEcosisNamespace(spectra, package, main, sheetInfo)
# set the sort information. This data needs to be of the correct type (string, number, date) for
# proper sorting in mongodb
setSort(spectra, config, package)
# set the location information. Needs to be proper geojson if it's going to be used
setLocation(spectra)
# set photo
setPhoto(packageId, spectra)
return spectra
# just like get spectra, but retrieves a row or column of metadata
def getMetadataChunk(packageId, resourceId=None, sheetId=None, index=0):
query = {
"type" : "metadata",
"packageId" : packageId
}
# add additional query parameters
if resourceId is not None:
query['resourceId'] = resourceId
if sheetId is not None:
query['sheetId'] = sheetId
# grab metadata chunk at given index
chunk = get_package_spectra_collection(packageId).find_one(query, skip=index, sort=[("index", pymongo.ASCENDING)])
if chunk is None:
raise Exception('Invalid resource ids given')
# grab the sheet information
del query['type']
sheetInfo = collections.get('resource').find_one(query)
# now look up information about what spectra we are joining to
joinedNames = []
joinOn = sheetInfo.get("joinOn")
if sheetInfo is not None and joinOn is not None and joinOn != "" and chunk.get('spectra') is not None:
# now make join query
joinQuery = {
"type" : "data",
"packageId" : packageId
}
# we are going to find all spectra that have the 'joinOn' attribute equal to this metadata
# chunks value.
joinQuery['spectra.%s' % sheetInfo.get("joinOn")] = chunk.get('spectra')[sheetInfo.get("joinOn")]
# run query
joined = get_package_spectra_collection(packageId).find(joinQuery)
# for all results, append sheet information to the 'joinedNames' resources array.
for r in joined:
# TODO: is there a better way to get the actual 'name' of a resource?
joinedInfo = collections.get('resource').find_one(
{
'resourceId': r.get('resourceId'),
'sheetId': r.get('sheetId')
},
{"layout": 1,"name": 1})
if joinedInfo is None: # Badness
joinedName = {}
joinedInfo = {}
elif 'name' in joinedInfo:
joinedName = joinedInfo
else: # if no name is provided in workspace, fallback to postgres
try:
joinedName = ckanResourceQuery.get(r.get('resourceId'))
except:
joinedName = {}
# add information about which spectra this chunk joins to
if joinedName is not None:
joinedNames.append({
"resourceId" : r.get('resourceId'),
"sheetId" : r.get('sheetId'),
"name" : joinedName.get('name'),
"layout" : joinedInfo.get('layout'),
"index" : r.get("index")
})
# set photo
setPhoto(packageId, chunk.get('spectra'))
# return metadata and join information
return {
"metadata" : chunk.get('spectra'),
"joinedResources" : joinedNames,
"joinKey" : sheetInfo.get("joinOn")
}
# get all metadata information for a sheet
# get number of chunks and number of joined chunks
def getMetadataInfo(packageId, resourceId=None, sheetId=None):
query = {
"packageId" : packageId
}
# add additional query parameters
if resourceId is not None:
query['resourceId'] = resourceId
if sheetId is not None:
query['sheetId'] = sheetId
sheetInfo = collections.get('resource').find_one(query)
if sheetInfo is None:
raise Exception('No resource found')
query['type'] = "metadata"
# get all distinct join values
attrs = mongo.distinct(get_package_spectra_collection(packageId), 'spectra.%s' % sheetInfo.get('joinOn'), query)
# get total number of metadata rows or columns
total = mongo.count(get_package_spectra_collection(packageId), query)
query = {
"packageId" : packageId,
"type" : "data"
}
query['spectra.%s' % sheetInfo.get('joinOn')] = {
"$in" : attrs
}
# get the number of spectra that match to this sheet
return {
"joinCount": mongo.count(get_package_spectra_collection(packageId), query),
"total" : total
}
# get total number of rows/cols for a sheet
def total(packageId, resourceId=None, sheetId=None):
query = {
"type" : "data",
"packageId" : packageId
}
if resourceId is not None:
query['resourceId'] = resourceId
if sheetId is not None:
query['sheetId'] = sheetId
# need to support 2.8 drive cause pythons 3.0 seems to be a POS
return {"total" : mongo.count(get_package_spectra_collection(packageId), query)}
# if the spectra has a 'photo' attribute see if it's a name of a resource file,
# if so, set the download URL as the value
def setPhoto(packageId, spectra):
if spectra is None:
return
photoKey = None
for key in spectra:
if utils.flatten(key) == 'photo':
photoKey = key
break
if photoKey is None:
return
if photoKey != 'photo':
spectra['photo'] = spectra[photoKey]
del spectra[photoKey]
if re.match(r'^https?', spectra['photo'], re.I):
return
spectra['photo'] = "%s/ecosis/resource/byname/%s/%s" % (config.get('ckan.site_url'), packageId, spectra['photo'])
# make sure location information for spectra is valid geojson
# if this is not valid, mongodb will not allow it to be inserted (geoindex)
def setLocation(spectra):
if spectra.get('geojson') != None:
js = json.loads(spectra['geojson'])
# extract geometry from feature
if js.get("type") == "Feature":
spectra['ecosis']['geojson'] = js.get('geometry')
# extract geometries from feature collection in geo collection
elif js.get("type") == "FeatureCollection":
result ={
"type": "GeometryCollection",
"geometries": []
}
for f in js.get("features"):
result['geometries'].append(f.get("geometry"))
spectra['ecosis']['geojson'] = result
# else we should be good to just add the geometry
else:
spectra['ecosis']['geojson'] = js
del spectra['geojson']
elif spectra.get('Latitude') != None and spectra.get('Longitude') != None:
try:
spectra['ecosis']['geojson'] = {
"type" : "Point",
"coordinates": [
float(spectra.get('Longitude')),
float(spectra.get('Latitude'))
]
}
except:
pass
elif spectra.get('latitude') != None and spectra.get('longitude') != None:
try:
spectra['ecosis']['geojson'] = {
"type" : "Point",
"coordinates": [
float(spectra.get('longitude')),
float(spectra.get('latitude'))
]
}
except:
pass
# set sort value as correct type (string, number, date)
# dates need to be ISO strings
def setSort(spectra, config, package):
sort = None
# backword compatibility. But moving away from config object
# all 'advanced data' should be stored in package
extras = package.get('extras')
if extras != None and extras.get('sort') != None:
sort = json.loads(extras.get('sort'))
elif config.get("sort") != None:
sort = config.get("sort")
if sort == None:
return
on = sort.get('on')
type = sort.get('type')
if on is None:
return
if on not in spectra:
return
if type == 'datetime':
try:
spectra['ecosis']['sort'] = dateutil.parser.parse(spectra[on])
except:
pass
elif type == 'numeric':
try:
spectra['ecosis']['sort'] = float(spectra[on])
except:
pass
else:
spectra['ecosis']['sort'] = spectra[on]
# append the .ecosis attribute to a spectra given sheet and dataset information
def addEcosisNamespace(spectra, package, main, sheetInfo, processInfo=None):
name = sheetInfo.get('name')
# fall back to postgres if we don't have a name
if name is None and sheetInfo.get('fromZip') != True:
resource = ckanResourceQuery.get(sheetInfo.get('resourceId'))
name = resource.get('name')
# append sheet and package information
ecosis = {
'package_id': sheetInfo.get("packageId"),
'package_title': package.get('title'),
'resource_id' : main.get('resourceId'),
'filename': name,
'sheet_id': main.get('sheetId'),
'layout' : sheetInfo.get('layout'),
'index' : main.get('index'),
'dataset_link' : '%s#result/%s' % (host, sheetInfo.get('packageId')),
'dataset_api_link' : '%spackage/get?id=%s' % (host, sheetInfo.get('packageId')),
}
# append zip package information if from zip file
if 'zip' in sheetInfo:
ecosis['zip_package'] = {
"id" : sheetInfo.get('zip').get('resourceId'),
"name" : sheetInfo.get('zip').get('name')
}
# append the latest processing information (when the sheet was last parsed)
if processInfo is not None:
ecosis['processInfo'] = processInfo
# append the organziation information
if package.get('organization') != None:
ecosis['organization'] = package['organization']['title']
spectra['ecosis'] = ecosis
# set attribute aliases if they exists
def mapNames(spectra, config, processInfo, package):
# backword compatibility. But moving away from config object
# all 'advanced data' should be stored in package
aliases = None
extras = package.get('extras')
if extras != None and extras.get('aliases') != None:
aliases = json.loads(extras.get('aliases'))
elif config.get("map") != None:
aliases = config.get("map")
if aliases != None and isinstance(aliases, dict):
for key, value in aliases.items():
if value in spectra:
spectra[key] = spectra[value]
processInfo.append({
"type" : "mapped",
"key" : key,
"from" : value
})
# move wavelengths from first class citizen in spectra to 'datapoints' object
def moveWavelengths(spectra, clean):
wavelengths = {}
toRemove = []
for name in spectra:
if re.match(r"^-?\d+\,?\d*", name) or re.match(r"^-?\d*\,\d+", name):
if clean:
wavelengths[uncleanKey(name)] = spectra[name].strip()
else:
wavelengths[name] = spectra[name].strip()
toRemove.append(name)
for name in toRemove:
del spectra[name]
spectra['datapoints'] = wavelengths
# given a list of joinable metadata sheets, see if any of the sheets have matches
# to the 'joinOn' value. If so merge metadata information to spectra.
def join(packageId, spectra, processInfo):
# get all the metadata sheets
joinableSheets = collections.get('resource').find({"metadata": True, "packageId": packageId})
# for each sheet config
for sheetConfig in joinableSheets:
# grab the metadata sheets join variable
joinOn = sheetConfig.get('joinOn')
if joinOn == None:
continue
# see if we have a join variable in the spectra
joinVar = spectra.get(joinOn)
if joinVar != None:
query = {
"type" : "metadata",
"packageId" : packageId,
"resourceId" : sheetConfig["resourceId"]
}
query["spectra.%s" % sheetConfig.get('joinOn')] = joinVar
if sheetConfig.get('sheetId') != None:
query["sheetId"] = sheetConfig.get('sheetId')
# query for matches to spectras value
joinData = get_package_spectra_collection(packageId).find_one(query)
if joinData != None:
# for each match, append all attributes
for key in joinData.get("spectra"):
if key not in spectra:
spectra[key] = joinData.get("spectra").get(key)
# keep track of where attribute came from, this mostly for reporting in the UI
# lets the user know which attributes have properly join and which sheets
# those attributes came from
processInfo.append({
"type" : "join",
"key" : key,
"resourceId" : joinData.get("resourceId"),
"sheetId" : joinData.get("sheetId"),
})
# switch from MongoDB version to normal (only for wavelengths)
def uncleanKey(key):
return re.sub(r',', '.', key)
# get workspace resource
def getResource(resource_id, sheet_id=None):
# query needs to check if is part of zip package
query = {
"$or" : [
{"resourceId" : resource_id},
{"zip.resourceId" : resource_id}
]
}
# see if a sheet id was provided, sheets of excel files are
# considered individual resources in this context
if sheet_id is not None:
query['sheetId'] = sheet_id
# grab the sheet information from the resource workspace collection
sheets = collections.get("resource").find(query,{
"localRange" : 0,
"hash" : 0,
"file" : 0,
"_id" : 0
})
response = []
for sheet in sheets:
# only send metadata attributes
metadata = []
repeats = []
units = {}
attributeRepeatFlag = False # proly have wrong layout
# read in all of the attribute data including attribute names,
# units and possible repeats if they exist
if sheet.get('attributes') is not None:
for attr in sheet.get('attributes'):
if attr.get("type") != "metadata":
continue
if attr.get("name") in metadata:
if attr.get("name") not in repeats:
repeats.append(attr.get("name"))
attributeRepeatFlag = True
continue
metadata.append(attr.get("name"))
if attr.get("units") is not None:
units[attr.get("name")] = attr.get("units")
sheet['attributes'] = metadata
sheet['units'] = units
if attributeRepeatFlag:
sheet['repeatAttributes'] = True
sheet['repeats'] = repeats
response.append(sheet)
return response
# get the dict of attribute name to units type
def allUnits(package_id):
query = {
"packageId" : package_id
}
# query workspace resources for package
sheets = collections.get("resource").find(query,{
"localRange" : 0,
"hash" : 0,
"file" : 0,
"_id" : 0
})
units = {}
# loop through all sheets, all attributes
for sheet in sheets:
if sheet.get('attributes') is not None:
for attr in sheet.get('attributes'):
if attr.get("units") is not None and attr.get("units") != "":
units[attr.get("name")] = attr.get("units")
return units
# get last pushed time
def isPushed(package_id):
result = collections.get("search_package").find_one({"value.ecosis.package_id": package_id},{"value.ecosis.pushed": 1})
if result is None:
return result
ecosis = result.get("value").get("ecosis")
if ecosis is None:
return ecosis
return ecosis.get("pushed")
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,036
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/top.py
|
import re, copy
topCollection = None
# MongoDB query projection
projection = {
'definition' : 1,
'_id' : 0,
'prefUnit' : 1,
'label' : 1,
'alternativeLabel' : 1,
'relatedTerm' : 1,
'preferredLabel' : 1,
'relatedMatch' : 1,
'broaderTransitive' : 1,
'subClassOf' : 1,
'id' : 1,
}
searchProjection = copy.copy(projection)
searchProjection['score'] = {
'$meta': "textScore"
}
# inject global dependencies
def init(collections):
global topCollection
topCollection = collections.get('top')
# get list of suggested top names
def overview(list):
result = {}
for name in list:
regx = re.compile("^%s$" % name, re.IGNORECASE)
match = topCollection.find_one({'preferredLabel': regx}, {'preferredLabel': 1})
if match is not None:
result[name] = {
'match' : match,
'type' : 'match'
}
continue
match = topCollection.find_one(
{
'$or' : [
{'alternativeLabel' : regx},
{'abbreviation': regx},
{'relatedTerm': regx}
]
},
projection
)
if match is not None:
result[name] = {
'match' : match,
'type' : 'related'
}
continue
count = topCollection.find(
{
'$text': {
'$search' : name
},
},
{'_id':1}
).count()
if count > 0:
result[name] = {
'type' : 'textMatch'
}
continue
result[name] = {
'type' : 'nomatch'
}
return result
# get a list of top names that match the given name
def suggest(name):
result = []
regx = re.compile("^%s$" % name, re.IGNORECASE)
match = topCollection.find_one({'preferredLabel': regx}, projection)
if match is not None:
return {
'type' : 'match',
'result' : [match]
}
cur = topCollection.find(
{
'$or' : [
{'alternativeLabel' : regx},
{'abbreviation': regx},
{'relatedTerm': regx}
]
},
projection
)
for doc in cur:
result.append(doc)
if len(result) > 0:
return {
'type' : 'related',
'result' : result
}
cur = topCollection.find(
{
'$text': {
'$search' : name
},
},
searchProjection
).sort( [('score', { '$meta': "textScore" })] ).limit(30)
for doc in cur:
result.append(doc)
return {
'type' : 'textMatch',
'result' : result
}
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,037
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/delete/__init__.py
|
import os, shutil
from ckanext.ecosis.datastore.mongo import get_package_spectra_collection
collections = None
workspaceDir = None
'''
Helper methods for remove packages from the EcoSIS extension. There are two groups
of MongoDB collections that are maintained. The workspace collections used by the CKAN
extension and the search collections, used by the search application. Finally, the
extension does store files on disk while a workspace is 'open'. These files need
to be cleaned as well.
'''
# inject global dependencies
def init(mongoCollections, workspaceDirectory):
global collections, workspaceDir
collections = mongoCollections
workspaceDir = workspaceDirectory
# remove a package from EcoSIS
# Note: this does not handle removing package for CKAN, the EcoSIS extension
def package(package_id):
# remove from the package workspace
collections.get('package').remove({
"packageId": package_id
})
# remove from the spectra workspace
get_package_spectra_collection(package_id).remove({})
# remove from the resources workspace
collections.get('resource').remove({
"packageId": package_id,
})
cleanFromSearch(package_id)
# clear anything placed on the filesystem workspace
# this is were we store cache files (ie excel sheets as CSV files, for example) and the like
workspacePath = os.path.join(workspaceDir, package_id)
if os.path.exists(workspacePath):
shutil.rmtree(workspacePath)
# The really important bit, remove package for primary MongoDB search collections
# this will pull all references of package from ecosis.org
def cleanFromSearch(package_id):
# remove from package collection
collections.get('search_package').remove({
"_id": package_id,
})
# remove from spectra collection
collections.get('search_spectra').remove({
"ecosis.package_id": package_id,
})
# remove a resource from Mongo collections as well as disk
# Note: this does not handle removing resource for CKAN, the EcoSIS extension
def resource(package_id, resource_id):
get_package_spectra_collection(package_id).remove({
"resourceId": resource_id,
})
collections.get('resource').remove({
"resourceId": resource_id,
})
# if zip, remove child references
childResources = collections.get('resource').find({"zip.resourceId": resource_id})
for childResource in childResources:
get_package_spectra_collection(package_id).remove({
"resourceId": childResource.get('resourceId'),
})
collections.get('resource').remove({
"zip.resourceId": resource_id,
})
path = os.path.join(workspaceDir, package_id, resource_id)
if os.path.exists(path):
shutil.rmtree(path)
# when a excel sheet is processed, keep track of it's sheet id's
# this will remove all spectra sheets that are not in this list
def removeDeletedExcelSheets(resource_id, current_sheet_id_list):
if None not in current_sheet_id_list:
current_sheet_id_list.append(None)
collections.get('resource').remove({
'resourceId' : resource_id,
'sheetId' : {
'$nin' : current_sheet_id_list
}
})
get_package_spectra_collection(package_id).remove({
'resourceId' : resource_id,
'sheetId' : {
'$nin' : current_sheet_id_list
}
})
def resources(package_id, resources):
for resource_id in resources:
resource(package_id, resource_id)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,038
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/mapreduce/__init__.py
|
import os, json, re, traceback
import datetime
from bson.code import Code
from bson.son import SON
from ckan.common import config
from . import lookup
import dateutil.parser as dateparser
from ckanext.ecosis.datastore import query
from ckanext.ecosis.lib.utils import getPackageExtra
path = os.path.dirname(os.path.abspath(__file__))
# read in mapreduce strings. These javascript files are (obviously) stored locally
f = open('%s/../mapreduce/map.js' % path, 'r')
mapJs = Code(f.read())
f.close()
f = open('%s/../mapreduce/reduce.js' % path, 'r')
reduceJs = Code(f.read())
f.close()
f = open('%s/../mapreduce/finalize.js' % path, 'r')
finalizeJs = Code(f.read())
f.close()
collections = None
schema = None
# map from CKAN attribute name to EcoSIS
schemaMap = {
'Keywords' : 'tags',
'Author' : 'author',
'Author Email' : 'author_email',
'Maintainer' : 'maintainer',
'Maintainer Email' : 'maintainer_email'
}
# initialized in init()
mapReduceAttribute = []
# inject global dependencies
def init(mongoCollections, jsonSchema):
global collections, schema, mapReduceAttribute
collections = mongoCollections
schema = jsonSchema
lookup.init(collections)
# loop schema and lookup mapreduce attributes
for cat, arr in schema.items():
for item in arr:
if item.get('name') == 'Latitude' or item.get('name') == 'geojson' or item.get('name') == 'Longitude':
continue
mapReduceAttribute.append(item.get('name'))
# pkg should be a ckan pkg
# collection should be the search collection
def mapreducePackage(ckanPackage, bboxInfo):
# pass along attribute to mapreduce
scope = {
"mapReduceAttribute" : mapReduceAttribute
}
# run mongo db mapreduce
collections.get("search_spectra").map_reduce(mapJs, reduceJs, finalize=finalizeJs, scope=scope, out=SON([("merge", config.get("ecosis.mongo.search_collection"))]), query={"ecosis.package_id": ckanPackage['id']})
# get the resulting count
spectra_count = collections.get("search_spectra").find({"ecosis.package_id": ckanPackage['id']}).count()
# now that we have our mapreduce spectra colleciton, lets process it
updateEcosisNs(ckanPackage, spectra_count, bboxInfo)
# process dataset after mapreduce. add the ecosis namespace with additional ecosis/ckan information
def updateEcosisNs(pkg, spectra_count, bboxInfo):
# get the package workspace object, contains config
config = collections.get("package").find_one({"packageId": pkg.get("id")})
if config is None:
config = {}
collection = collections.get('search_package')
sort = getPackageExtra("sort", pkg)
if sort is None:
sort = {}
else:
sort = json.loads(sort)
# store these as dates
created = None
modified = None
try:
created = dateparser.parse(pkg.get("metadata_created"))
except Exception as e:
pass
try:
modified = dateparser.parse(pkg.get("metadata_modified"))
except Exception as e:
pass
# default ecosis information we are going to add to the package
ecosis = {
# TODO: change to ISO string, check this doesn't break 'updated since last push check'
"pushed" : datetime.datetime.utcnow(),
"organization" : "",
"organization_id" : "",
"description" : pkg.get('notes'),
"groups" : [],
"package_id" : pkg.get("id"),
"package_name" : pkg.get("name"),
"package_title" : pkg.get("title"),
"created" : created,
"modified" : modified,
"version" : pkg.get("version"),
"license" : pkg.get("license_title"),
"spectra_count" : spectra_count,
"spectra_metadata_schema" : {
"wavelengths" : [],
"metadata" : [],
"units" : {},
"aliases" : None,
},
"resources" : [],
"linked_data" : [],
"geojson" : None,
"sort_on" : sort.get("on"),
"sort_type": sort.get("type"),
"sort_description" : sort.get("description")
}
# append the units
units = query.allUnits(pkg.get("id"))
if units != None:
ecosis["spectra_metadata_schema"]["units"] = units
# append the linked data
linkeddata = getPackageExtra('LinkedData', pkg)
if linkeddata != None:
ecosis["linked_data"] = json.loads(linkeddata)
# append the EcoSIS DOI
doi = getPackageExtra('EcoSIS DOI', pkg)
if doi != None:
ecosis["doi"] = doi
# append the list of resources
for item in pkg['resources']:
if item.get("state") != "active":
continue
ecosis["resources"].append({
"type" : item.get('url_type'),
"mimetype" : item.get("mimetype"),
"name" : item.get("name"),
"url" : item.get("url")
})
# append the list of keywords
keywords = []
for item in pkg['tags']:
keywords.append(item['display_name'])
# add metadata aliases
aliases = getPackageExtra('aliases', pkg)
if aliases is not None:
try:
ecosis["spectra_metadata_schema"]["aliases"] = json.loads(aliases)
# map units for aliases
for key, value in ecosis["spectra_metadata_schema"]["aliases"].iteritems():
unit = ecosis["spectra_metadata_schema"]["units"].get(value)
if unit is not None:
ecosis["spectra_metadata_schema"]["units"][key] = unit
except Exception as e:
print(e)
traceback.print_exc()
pass
# append the data groups
for item in pkg['groups']:
ecosis["groups"].append(item['display_name'])
# append the organizations
if 'organization' in pkg:
if pkg['organization'] != None:
ecosis["organization"] = pkg['organization']['title']
ecosis["organization_id"] = pkg['organization']['id']
else:
ecosis['organization'] = 'None'
else:
ecosis['organization'] = 'None'
# make sure the map reduce did not create a null collection, if so, remove
# This means there is no spectra
item = collection.find_one({'_id': pkg['id']})
# now see if we have a group by attribute...
if item is None:
pass
elif item.get('value') is None:
collection.remove({'_id': pkg['id']})
else:
item = collection.find_one({'_id': pkg['id']})
setValues = {'$set' : { 'value.ecosis': ecosis }, '$unset' : {}}
# grab the mapreduce value
mrValue = item.get('value')
# process ecosis schema
# bubble attributes from mapreduce
names = []
for category, items in schema.items():
for item in items:
name = item.get('name')
input = item.get('input')
# ignore these attributes, they should not be processed.
# TODO: make this a global list
if name == 'Latitude' or name == 'Longitude' or name == 'geojson' or name == 'NASA GCMD Keywords':
continue
# processAttribute does all sorts-o-stuff, see function definition below
processAttribute(name, input, pkg, mrValue, setValues, keywords)
names.append(name)
if item.get('allowOther') == True:
processAttribute(name+" Other", "split-text", pkg, mrValue, setValues, keywords)
names.append(name+" Other")
# set the known data attributes
# the mapreduce function created these objects, storing all know wavelengths and metadata attributes
# for the function. Here we transform those objects (dicts) into arrays, we only care about the keys
# Finally, MongoDB does not allow '.' in attribute name, so names were stored with commas instead,
# transpose the ',' back to '.'
for key in mrValue['tmp__schema__']['wavelengths']:
ecosis['spectra_metadata_schema']['wavelengths'].append(re.sub(r',', '.', key))
for key in mrValue['tmp__schema__']['metadata']:
ecosis['spectra_metadata_schema']['metadata'].append(re.sub(r',', '.', key))
# tell MongoDB to remove the object storing our schema information processed above
setValues['$unset']['value.tmp__schema__'] = ''
# append the gcmd keywords
gcmd = getPackageExtra('NASA GCMD Keywords', pkg)
if gcmd is not None and gcmd != '':
arr = json.loads(gcmd)
setValues['$set']['value.ecosis']['nasa_gcmd_keywords'] = arr
keywords = []
# create unique array of all gcmd keywords to be searched on
for item in arr:
parts = item.get('label').encode('ascii', 'ignore').decode('utf-8').split('>')
# parts = map(unicode.strip, parts)
for key in parts:
if key not in keywords:
keywords.append(key)
setValues['$set']['value.NASA GCMD Keywords'] = keywords
# finally, let's handle geojson
geojson = processGeoJson(bboxInfo, pkg)
if len(geojson.get('geometries')) == 0:
setValues['$set']['value.ecosis']['geojson'] = None
else:
setValues['$set']['value.ecosis']['geojson'] = geojson
# really, finally, update the collection with the 'setValues' dict we have been creating
collection.update(
{'_id': pkg['id']},
setValues
)
# handle the various ways we are given a bounding box
def processGeoJson(bboxInfo, pkg):
result = {
"type": "GeometryCollection",
"geometries": []
}
# if we found bbox info in the spectra, add it
if bboxInfo['use']:
result['geometries'].append({
"type": "Polygon",
"coordinates" : [[
[bboxInfo["maxlng"], bboxInfo["maxlat"]],
[bboxInfo["minlng"], bboxInfo["maxlat"]],
[bboxInfo["minlng"], bboxInfo["minlat"]],
[bboxInfo["maxlng"], bboxInfo["minlat"]],
[bboxInfo["maxlng"], bboxInfo["maxlat"]]
]]
})
geojson = getPackageExtra("geojson", pkg)
if geojson != None:
try:
# TODO: add checks for valid geojson
result['geometries'].append(json.loads(geojson))
except Exception:
pass
return result
# make sure value is not none, strip string and set to lower case
def cleanValue(value):
if value is None:
return ""
return value.lower().strip()
# all sorts of magics here.
def processAttribute(name, input, pkg, mrValue, setValues, keywords):
val = None
# first we need to get the values we are working with
if name == 'Keywords': # this is the keywords attribute, special case
val = keywords
elif schemaMap.get(name) != None: # if the schemaMap has alias set, lookup value based on alias name
val = pkg.get(schemaMap.get(name))
else: # otherwise just use the provided attribute name
val = getPackageExtra(name, pkg)
# if we don't have values to process, do nothing
if val == None or val == '':
return
# if attribute schema type is 'controlled', split to multiple values
if name == 'Keywords':
pass
elif input == "controlled" or input == "split-text" or name == 'Author':
val = val.split(",")
else: # we store everything as an array, easier to handle on other end
val = [val]
# now we have an dataset value, see if we have spectra value and join if we do
# what does this mean? So spectra resource attributes were mapreduced into
# this single 'mrValue' dict. If the attribute name is found as a first class
# citizen, then it was provided by the spectra and we need to include it
if mrValue.get(name) != None:
spValues = mrValue.get(name)
# merge and above values with new values
for v in val:
if not v in spValues:
spValues.append(v)
val = spValues
# finally, clean all values (strip and set to lower case)
if name != 'geojson' and name != 'Citation':
# TODO: fix this
val = list(map(lambda it: cleanValue(it), val))
setValues['$set']['value.'+name] = val
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,039
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/ckan/package.py
|
import psycopg2.extras, json
connStr = None
schema = None
# inject global dependencies
def init(pgConn, s):
global connStr, schema
connStr = pgConn
schema = s
# helper for returning EcoSIS attribute schema
def getSchema():
return schema
def getPgConn():
return psycopg2.connect(connStr)
# get a dataset by id
def get(package_id):
conn = psycopg2.connect(connStr)
# query pg for dataset
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from package where id = %s", (package_id,))
package = cur.fetchall()
cur.close()
if len(package) == 0:
raise Exception('Invalid package ID')
else:
package = package[0]
# extras
package['extras'] = {}
# grab all extra fields where status is not deleted
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from package_extra where package_id = %s and state != 'deleted'", (package_id,))
rows = cur.fetchall()
for row in rows:
package['extras'][row['key']] = row['value']
cur.close()
# append organization information
ownerOrg = package.get('owner_org')
if ownerOrg != None and ownerOrg != "":
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from public.group where id = %s", (ownerOrg,))
rows = cur.fetchall()
cur.close()
if len(rows) > 0:
package['organization'] = rows[0]
# append tag (keywords) information
package['tags'] = []
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select t.name from tag t, package_tag pt where t.id = pt.tag_id and pt.package_id = %s and pt.state = 'active'", (package_id,))
rows = cur.fetchall()
cur.close()
conn.close()
for row in rows:
package['tags'].append(row.get('name'))
return package
# query package by EcoSIS DOI status. Uses pg-json functionality
# TODO: build index on doi status -> value field
def doiQuery(status="", query="", limit=10, offset=0):
conn = psycopg2.connect(connStr)
if status == "" or status is None:
status = "Pending Approval"
if query is None:
query = ""
if limit is None:
limit = 10
if offset is None:
offset = 0
query = "%%%s%%" % query.lower()
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(
("select p.title as title, p.id, pe.value as status, pev.value as doi "
"from package_extra pe join package p on pe.package_id = p.id "
"left join package_extra pev on pev.package_id = p.id and pev.key = 'EcoSIS DOI' "
"where pe.key = 'EcoSIS DOI Status' and pe.state != 'deleted' "
"and pe.value::json->>'value' = ANY(%s) and lower(p.title) like %s order by title limit %s offset %s;"),
(status.split(','), query, limit, offset)
)
packages = cur.fetchall()
cur.close()
for package in packages:
if package.get('status') is not None:
package['status'] = json.loads(package['status'])
return packages
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,040
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/admin/upgrade.py
|
import psycopg2, json, re
from ckanext.ecosis.datastore.ckan.package import getPgConn
from ckanext.ecosis.datastore import getCollections
# append the aliases to the search results
def run():
conn = getPgConn()
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(("select name, p.id, pe.value as aliases from package p join package_extra pe on "
"p.id = pe.package_id where pe.key = 'aliases' and pe.state = 'active'"))
packages = cur.fetchall()
cur.close()
collection = getCollections().get('search_package')
for pkg in packages:
aliases = json.loads(pkg.get('aliases'))
pkg['result'] = collection.update(
{'_id': pkg.get('id')},
{
'$set' : {
'value.ecosis.spectra_metadata_schema.aliases' : aliases
}
}
)
return packages
# Fix unit bug
def fixUnits():
conn = getPgConn()
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(("select id from package"))
packages = cur.fetchall()
cur.close()
collection = getCollections().get('resource')
searchCollection = getCollections().get('search_package')
for pkg in packages:
query = {
"packageId" : pkg.get('id')
}
# query workspace resources for package
sheets = collection.find(query,{
"localRange" : 0,
"hash" : 0,
"file" : 0,
"_id" : 0
})
units = {}
# loop through all sheets, all attributes
for sheet in sheets:
if sheet.get('attributes') is not None:
for attr in sheet.get('attributes'):
if attr.get("units") is not None and attr.get("units") != "":
units[attr.get("name")] = attr.get("units")
pkg['result'] = searchCollection.update(
{'_id': pkg.get('id')},
{
'$set' : {
'value.ecosis.spectra_metadata_schema.units' : units
}
}
)
return packages
def fixCitationText():
conn = getPgConn()
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(("select * from package_extra where key = 'Citation';"))
rows = cur.fetchall()
cur.close()
pgCount = 0
for row in rows:
citation = re.sub(r'Ecosystem', 'Ecological', row.get('value'), flags=re.I)
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(("update package_extra set value = '%s' where id = '%s';" % (citation, row.get('id'))))
conn.commit()
cur.close()
pgCount += 1
mongoCount = 0
searchCollection = getCollections().get('search_package')
results = searchCollection.find({'value.Citation':{'$exists': True}}, {'value.Citation':1})
for item in results:
citation = re.sub(r'Ecosystem', 'Ecological', item.get('value').get('Citation')[0], flags=re.I)
searchCollection.update(
{'_id': item.get('_id')},
{
'$set' : {
'value.Citation' : [citation]
}
}
)
mongoCount += 1
return {
'success' : True,
'stats' : {
'MongoUpdates' : mongoCount,
'PGUpdates' : pgCount
}
}
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,041
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/mapreduce/lookup.py
|
import os
from bson.code import Code
from bson.son import SON
'''
Create the keywords typeahead collection
Will run the lookup mapreduce js functions in MongoDB
'''
# Read in local js mapreduce files
path = os.path.dirname(os.path.abspath(__file__))
collections = None
running = False
f = open('%s/../mapreduce/lookup_map.js' % path, 'r')
mapJs = Code(f.read())
f.close()
f = open('%s/../mapreduce/lookup_reduce.js' % path, 'r')
reduceJs = Code(f.read())
f.close()
# inject global dependencies
def init(mongoCollections):
global collections
collections = mongoCollections
def update():
global running
# check if we are already running
if running:
return
running = True
try:
_update()
except Exception as e:
print("Error creating lookup index")
running = False
# update the filter lookup collection
def _update():
collection = collections.get('search_package')
try:
collection.map_reduce(mapJs, reduceJs, out=SON([("replace", "lookup")]))
except Exception as e:
pass
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,042
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/lib/aws.py
|
# from boto3.session import Session
# from ckan.common import config
# ACCESS_KEY = config.get('aws.access_key')
# SECRET_KEY = config.get('aws.secret_key')
# session = Session(
# aws_access_key_id=ACCESS_KEY,
# aws_secret_access_key=SECRET_KEY
# )
# s3 = session.resource('s3')
import boto3
s3 = boto3.client('s3')
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,043
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/resource/__init__.py
|
import os, json, re
# from ckan.common import request, response
from ckan.common import request, config
from ckan.lib.base import c, model
import ckan.logic as logic
import ckan.lib.uploader as uploader
import ckanext.ecosis.lib.utils as utils
from ckanext.ecosis.lib.auth import hasAccess
import ckanext.ecosis.datastore.delete as deleteUtil
import ckanext.ecosis.datastore.query as query
import ckanext.ecosis.datastore.workspace as workspace
from ckanext.ecosis.lib.utils import jsonStringify
from ckanext.ecosis.controller.package.doi import hasAppliedDoi
from ckanext.ecosis.datastore.ckan import resource as ckanResourceQuery
from flask import make_response
parseOptions = ["ignore", "layout", "metadata", "joinOn", "separator"]
# delete are resource
# By default, CKAN keeps resources on disk after they are deleted. EcoSIS does not.
def delete():
params = utils.get_request_data(request)
# remove resource from disk - normally this doesn't happen
resp = _delete(params)
return resp
# Single HTTP call for deleting multiple resources
def deleteMany():
ids = request.get_json().get('ids')
resp = []
for id in ids:
resp.append(_delete({'id': id}))
return resp
# Actually delete a resource
def _delete(params):
# remove resource from disk - normally this doesn't happen
context = {'model': model, 'user': c.user}
resource = logic.get_action('resource_show')(context, params)
# if package has DOI applied, resources cannot be modified
if hasAppliedDoi(resource.get('package_id')):
return {'error':True, 'message':'Cannot delete resource of package with applied DOI'}
# this will fire error if user does not have access
logic.get_action('resource_delete')(context, params)
id = params.get('id')
# if the resource is a file upload, remove from disk
if resource.get('url_type') == "upload":
upload = uploader.ResourceUpload(resource)
path = upload.get_path(resource['id'])
if os.path.exists(path):
os.remove(path)
# remove resource from EcoSIS
deleteUtil.resource(resource.get("package_id"), id)
params['success'] = True
return params
# create new resource for dataset
def create():
response.headers["Content-Type"] = "application/json"
request_data = dict(request.POST)
# if the dataset has a DOI applied, you cannot add new resources
if hasAppliedDoi(request_data.get('package_id')):
return {'error': True, 'message': 'Cannot add resources to package with applied DOI'}
# run the default CKAN create resource logic
context = {'model': model, 'user': c.user}
resource_create = logic.get_action('resource_create')
resp = resource_create(context, request_data)
return json.dumps({
'result' : resp,
'success' : True
})
# process a resource
# this will be given a set of options, then parse the measurement data or metadata out of
# the resource based on this options
def process():
package_id = request.form.get('package_id')
hasAccess(package_id)
if hasAppliedDoi(package_id):
return {'error':True, 'message':'Cannot edit resource of package with applied DOI'}
sheet_id = request.form.get('sheet_id')
resource_id = request.form.get('resource_id')
ids = request.form.get('resource_ids')
if sheet_id == "":
sheet_id = None
try:
options = json.loads(request.form.get('options'))
except:
options = {}
return _process(package_id, sheet_id, resource_id, ids, options)
def _process(package_id, sheet_id, resource_id, ids, options):
# option, if a resource id and a datasheet id are passed, then the full 'merged' view will be return
# only allow specified options
safeOptions = {}
for option in parseOptions:
if option in options:
safeOptions[option] = options[option]
# see if we are editing multiple files or just one
result = []
if ids is not None:
ids = json.loads(ids)
for resource_id in ids:
workspace.prepareFile(package_id, resource_id, sheet_id, safeOptions)
result.append(query.getResource(resource_id))
else:
workspace.prepareFile(package_id, resource_id, sheet_id, safeOptions)
result = query.getResource(resource_id, sheet_id)
# update the dataset, so the metadata timestamp changes
context = {'model': model, 'user': c.user}
pkg = logic.get_action('package_show')(context, {'id': package_id})
# use this counter to poke the dataset. This will update the last modified timestamps
# required for 'updated since last pushed UI'
resourceUpdateCount = utils.getPackageExtra('resourceUpdateCount', pkg)
if resourceUpdateCount is None:
resourceUpdateCount = 1
else:
resourceUpdateCount = int(resourceUpdateCount) + 1
utils.setPackageExtra('resourceUpdateCount', resourceUpdateCount, pkg)
pkg = logic.get_action('package_update')(context, pkg)
result = {
'metadata_modified' : pkg.get('metadata_modified'),
'result' : result
}
return result
# get a specific resource
# optional sheet id, it the resource has multiple sheets (excel file)
def get():
pid = request.params.get('package_id')
rid = request.params.get('resource_id')
sid = request.params.get('sheet_id')
if sid == "":
sid = None
hasAccess(pid)
return query.getResource(rid, sid)
# a get a row or column (depending on sheet orientation) of a resource file
# index is the row/column to retrieve
def getMetadataChunk():
package_id = request.params.get('package_id')
resource_id = request.params.get('resource_id')
sheet_id = request.params.get('sheet_id')
if sheet_id == "":
sheet_id = None
return query.getMetadataChunk(package_id, resource_id, sheet_id, _getIndex())
# get overview information for file, like number of rows/columns (chunks)
# also returns number of join rows/columns if metadata resource type
def getMetadataInfo():
package_id = request.params.get('package_id')
resource_id = request.params.get('resource_id')
sheet_id = request.params.get('sheet_id')
if sheet_id == "":
sheet_id = None
return query.getMetadataInfo(package_id, resource_id, sheet_id)
# get the number of spectra measurements found in the file
def getSpectraCount():
package_id = request.params.get('package_id')
resource_id = request.params.get('resource_id')
sheet_id = request.params.get('sheet_id')
if sheet_id == "":
sheet_id = None
return query.total(package_id, resource_id, sheet_id)
def getByName(package_id, resource_name):
resource = ckanResourceQuery.getByName(package_id, resource_name)
url = resource['url'];
# ckan 2.9 doesn't seem to return full url...
if not re.match(r'^https?', url, re.I):
url = "%s/dataset/%s/resource/%s/download/%s" % (config.get('ckan.site_url'), package_id, resource['id'], resource_name)
headers = {"Location": url}
return make_response(("Redirecting", 307, headers))
# helper for getting index as int
def _getIndex():
index = request.params.get('index')
if index is None:
return 0
else:
return int(index)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,044
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/organization/__init__.py
|
from ckan.lib.base import c, model
from ckanext.ecosis.datastore import delete as deleteUtil
import ckan.logic as logic
from ckan.common import config
# from ckan.common import response
import urllib, jwt, json
remote_hosts = config.get('ecosis.remote_hosts', '')
remote_hosts = [x.strip() for x in remote_hosts.split(',')]
secret = config.get('ecosis.jwt.secret')
NotFound = logic.NotFound
collections = None
# inject global dependencies
def init(co):
global collections
collections = co
# delete organization
# when org is deleted, we need to remove all of organizations datasets
def delete(group):
# # first, get a list of all organizations datasets
# group = model.Group.get(id)
#
# if group is None:
# raise NotFound('Organization was not found.')
datasets = []
for pkg in group.packages(with_private=True):
datasets.append(pkg.id)
# now perform normal delete
# this should check auth
# context = {'model': model, 'user': c.user}
# logic.get_action('organization_delete')(context, {'id': id})
# EcoSIS package delete happens here
for package_id in datasets:
deleteUtil.package(package_id)
# notify remote hosts of change
notify_remotes(group.id, True)
# update search (MongoDB) org name when organization is updated
def update(org):
name = org.title
id = org.id
collections\
.get('search_package')\
.update_many(
{"value.ecosis.organization_id": id},
{ "$set" : {"value.ecosis.organization": name} }
)
# notify remote hosts of change
notify_remotes(id, False)
def notify_remotes(organization_id, deleted=False):
msg = {
"id": organization_id,
"deleted" : deleted
}
token = jwt.encode({"id": organization_id}, secret, algorithm='HS256')
msg = json.dumps(msg)
for url in remote_hosts:
try:
req = urllib2.Request(url, msg, {'Content-Type': 'application/json'})
req.add_header('x-ecosis-signature', token)
urllib2.urlopen(req)
# ignore response, we don't care
except Exception as e:
pass
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,045
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/utils/mongo.py
|
# mongo driver hacks
# TODO: this needs to go away when the driver sucks less
def count(collection, query):
try:
return collection.count(query)
except:
return collection.find(query).count()
def distinct(collection, key, query):
try:
return collection.distinct(key, query)
except:
return collection.find(query).distinct(key)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,046
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/ckan/resource.py
|
import psycopg2.extras, psycopg2
connStr = None
# inject global dependencies
def init(pgConn):
global connStr
connStr = pgConn
# get a resource with out the requirement of a authenticated in HTTP request
def get(resource_id):
conn = psycopg2.connect(connStr)
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from resource where id = %s", (resource_id,))
resource = cur.fetchall()
cur.close()
if len(resource) == 0:
raise Exception('Invalid resource ID')
else:
resource = resource[0]
conn.close()
return resource
# get a resource by name and package with out the requirement of a authenticated in HTTP request
def getByName(package_id, resource_name):
conn = psycopg2.connect(connStr)
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from resource where name = %s and package_id = %s and state = 'active'", (resource_name, package_id))
resource = cur.fetchall()
cur.close()
if len(resource) == 0:
return None
else:
resource = resource[0]
conn.close()
return resource
# get all active resources for a package
def active(package_id):
conn = psycopg2.connect(connStr)
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute("select * from resource where state = 'active' and url_type = 'upload' and package_id = %s", (package_id,))
results = cur.fetchall()
cur.close()
conn.close()
return results
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,047
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/spectra/__init__.py
|
import json
from ckan.common import request
import ckanext.ecosis.datastore.query as query
from ckanext.ecosis.datastore.vocab import top
from ckanext.ecosis.datastore.vocab import gcmd
# get a spectra measurement with joined metadata
def get():
package_id = request.params.get('package_id')
resource_id = request.params.get('resource_id')
sheet_id = request.params.get('sheet_id')
return query.get(package_id, resource_id, sheet_id, _getIndex(), showProcessInfo=True)
# get TOP suggestions for given attribute name
def suggestAttributeName():
name = request.params.get('name')
if name is None:
name = ""
return top.suggest(name)
# for a list of attributes of a spectra, returns attributes which might
# have TOP suggestions
def suggestOverview():
params = {
'names' : request.form.get('names')
}
if params.get('names') is None:
params['names'] = request.params.get('names')
if params.get('names') is None:
raise Exception('Name list not provided')
return top.overview(json.loads(params.get('names')))
# Query NASA GCDM vocab
def suggestGCMD():
query = request.params.get('query')
if query is None:
query = ""
return gcmd.suggest(query)
def _getIndex():
index = request.params.get('index')
if index is None:
return 0
else:
return int(index)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,048
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/user_data/paster.py
|
import click
# ckan ecosis initdb -c
name = 'ecosis'
@click.group()
def ecosis():
"""Database management commands.
"""
pass
@ecosis.command(
name=u'initdb',
short_help=u'Initialize ecosis tables'
)
def initdb():
u'''Initialize ecosis tables'''
from .model import setup
setup()
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,049
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/package/doi.py
|
from ckan.lib.base import c, model
import ckan.logic as logic
from ckan.lib.email_notifications import send_notification
from ckan.common import config
# from ckan.common import request, response
from ckan.common import request
import json, datetime, psycopg2, urllib, base64
from dateutil import parser
from ckanext.ecosis.datastore.push import Push
from ckanext.ecosis.lib.utils import setPackageExtra, getPackageExtra
from ckanext.ecosis.lib.auth import isAdmin
from ckanext.ecosis.datastore.ckan import package
import shortuuid
DOI_STATUS = {
'APPLIED' : 'Applied', # DOI has been applied by EZID
'REQUESTING' : 'Requesting', # Requesting DOI from EZID
'ACCEPTED' : 'Accepted', # Admins have accepted DOI request
'PENDING_REVISION' : 'Pending Revision', # Admins not approved the DOI request, dataset needs revision
'PENDING_APPROVAL' : 'Pending Approval', # User has requested DOI
'FAILED_REQUEST' : 'Failed Request' # Request to Datacite failed
}
# Datacite config
DOI_CONFIG = {
"shoulder" : config.get('ecosis.doi.shoulder'),
"url" : config.get("ecosis.doi.url"),
"username" : config.get("ecosis.doi.username"),
"password" : config.get("ecosis.doi.password")
}
connStr = ""
# inject global dependencies
def init(pgConn):
global connStr
connStr = pgConn
## helper for updating package
## for use by package_update_auth in plugin.py
def validDoiUpdate(currentPackage, newPackage):
oldDoi = getDoiStatus(currentPackage)
newDoi = getDoiStatus(newPackage)
# No changes
if oldDoi.get('status').get('value') == newDoi.get('status').get('value') and oldDoi.get('status').get('error') != True:
if oldDoi.get('value') == newDoi.get('value'):
return {'success': True}
# the one thing a USER can do is request approval
if oldDoi.get('status').get('value') in (None, DOI_STATUS['PENDING_REVISION'], DOI_STATUS['PENDING_APPROVAL']):
if newDoi.get('status').get('value') == DOI_STATUS['PENDING_APPROVAL'] and newDoi.get('value') is None:
return {'success': True}
# user is canceling request
if newDoi.get('status').get('value') is None:
return {'success': True}
if not isAdmin():
return {
'success' : False,
'msg' : 'You do not have access to update DOI values'
}
return {'success': True}
## helper for updating package
def handleDoiUpdate(currentPackage, newPackage):
oldDoi = getDoiStatus(currentPackage)
newDoi = getDoiStatus(newPackage)
# No changes
if oldDoi.get('status').get('value') == newDoi.get('status').get('value') and oldDoi.get('status').get('error') != True:
if oldDoi.get('value') == newDoi.get('value'):
# check this package doesn't have a DOI
# Perhaps just not let them make it private?
# if not canUpdate(oldDoi):
# return {
# 'error': True,
# 'message' : 'You cannot update a package once the DOI as been applied'
# }
# else:
return {'success': True}
# the one thing a USER can do is request approval
if oldDoi.get('status').get('value') in (None, DOI_STATUS['PENDING_REVISION']):
if newDoi.get('status').get('value') == DOI_STATUS['PENDING_APPROVAL'] and newDoi.get('value') in (None, ''):
# set the requesting user
status = {
'value' : DOI_STATUS['PENDING_APPROVAL'],
'requested_by' : c.user
}
setPackageExtra('EcoSIS DOI Status', json.dumps(status), newPackage)
# alert the admins of the request
sendAdminNotification(newPackage)
return {'success': True}
# user is canceling request
if newDoi.get('status').get('value') is None:
setPackageExtra('EcoSIS DOI Status', '{}', newPackage)
return {'success': True}
if not isAdmin():
return {
'error' : True,
'message' : 'You do not have access to update DOI values'
}
resp = {}
if newDoi.get('status').get('value') == DOI_STATUS['PENDING_REVISION'] and oldDoi.get('status').get('value') != DOI_STATUS['PENDING_REVISION']:
resp = sendUserNotification(newPackage, False)
elif newDoi.get('status').get('value') == DOI_STATUS['ACCEPTED'] and oldDoi.get('status').get('value') != DOI_STATUS['ACCEPTED']:
resp = sendUserNotification(newPackage, True)
resp['success'] = True
return resp
# Actually request DOI from EZID
def applyDoi(pkg):
# get the current package DOI status
doiStatus = getDoiStatus(pkg)
# make sure it's set to accepted
if doiStatus.get('status').get('value') != DOI_STATUS["ACCEPTED"]:
return
# set the new status to REQUESTING DOI
setPackageExtra('EcoSIS DOI Status', json.dumps({'value':DOI_STATUS["REQUESTING"]}), pkg)
# update the dataset
context = {'model': model, 'user': c.user}
pkg = logic.get_action('package_update')(context, pkg)
# Request DOI from Datacite
try:
doiResponse = requestDoi(pkg)
except Exception as e:
doiResponse = {
'status' : 'error',
'message' : str(e)
}
# If request failed, reset DOI status, return new error
if doiResponse.get('status') != 'success':
status = {
'value': DOI_STATUS["FAILED_REQUEST"],
'error' : True,
'message': doiResponse.get('message'),
'serverResponseStatus' : doiResponse.get('status')
}
setPackageExtra('EcoSIS DOI Status', json.dumps(status), pkg)
logic.get_action('package_update')(context, pkg)
return status
# set the returned DOI and new DOI Status
status = {
'value' : DOI_STATUS["APPLIED"],
'applied' : datetime.datetime.utcnow().isoformat()
}
setPackageExtra('EcoSIS DOI Status', json.dumps(status), pkg)
setPackageExtra('EcoSIS DOI', doiResponse.get('doi'), pkg)
# now that it's applied, re-push to search so updates are visible
push = Push()
push.run(pkg)
# final dataset update with new DOI status
logic.get_action('package_update')(context, pkg)
return {'success': True}
# HTTP request for EZID
def requestDoiEzid(pkg):
# Request body
data = "_profile: datacite\n"
data += "_target: %s/#result/%s\n" % (config.get("ecosis.search_url"), pkg.get('id'))
data += "datacite.creator: %s\n" % pkg.get('author')
data += "datacite.title: %s\n" % pkg.get('title')
data += "datacite.resourcetype: Dataset\n"
data += "datacite.publisher: EcoSIS\n"
data += "datacite.publicationyear: %s" % parser.parse(pkg.get('metadata_created')).year
# set body, authentication header and make request
r = urllib.request.Request(DOI_CONFIG.get('url'))
base64string = base64.encodestring('%s:%s' % (DOI_CONFIG.get('username'), DOI_CONFIG.get('password'))).replace('\n', '')
r.add_header("Authorization", "Basic %s" % base64string)
r.add_header("Content-Type", "text/plain;charset=UTF-8")
r.data = bytes(data, 'utf-8')
try:
result = urllib.request.urlopen(r).read()
except Exception as e:
result = "error: request error"
# parse text response format
(status, doi) = result.split('\n')[0].split(': ')
return {
"status" : status,
"doi" : doi
}
# HTTP request for Datacite
def requestDoi(pkg):
doi = "%s/%s" % (DOI_CONFIG.get('shoulder'), shortuuid.ShortUUID().random(length=8))
data = {
'data' : {
'type' : 'dois',
'attributes' : {
'doi' : doi
}
}
}
r = urllib.request.Request(DOI_CONFIG.get('url'))
userpass = '%s:%s' % (DOI_CONFIG.get('username'), DOI_CONFIG.get('password'))
base64string = base64.b64encode(bytes(userpass, 'utf-8'))
auth_header = "Basic %s" % base64string.decode("utf-8")
r.add_header("Authorization", auth_header)
r.add_header("Content-Type", "application/vnd.api+json")
r.get_method = lambda: 'POST'
r.data = bytes(json.dumps(data), 'utf-8')
result = urllib.request.urlopen(r)
if result.getcode() != 201:
print(result.read())
if result.getcode() == 422:
raise Exception('Doi already taken, please try again')
else:
raise Exception('Invalid response from doi draft service %s: %s', (result.getcode(), result.read()))
data = {
'data': {
'id': doi,
'type': "dois",
'attributes': {
'event': 'publish',
'doi': doi,
'creators': [{
'name': pkg.get('author')
}],
'titles': [{
'title': pkg.get('title')
}],
'descriptions': [{
'description': pkg.get('overview')
}],
'identifiers': [{
'identifierType': 'ecosis-uid',
'identifier': pkg.get('id')
}],
'publisher': 'EcoSIS',
'publicationYear': parser.parse(pkg.get('metadata_created')).year,
'types': {
'resourceTypeGeneral': 'Dataset'
},
'url': "%s/doi:%s" % (config.get("ecosis.search_url"), doi),
'schemaVersion': 'http://datacite.org/schema/kernel-4'
}
}
}
print("%s/%s" % (DOI_CONFIG.get('url'), doi))
r = urllib.request.Request("%s/%s" % (DOI_CONFIG.get('url'), doi))
r.get_method = lambda: 'PUT'
r.add_header("Authorization", auth_header)
r.add_header("Content-Type", "application/vnd.api+json")
r.data = bytes(json.dumps(data), 'utf-8')
result = urllib.request.urlopen(r)
if result.getcode() != 200 and result.getcode() != 201:
raise Exception('Invalid response from doi publish service %s: %s' % (result.getcode(), result.read()))
return {
"status" : 'success',
"doi" : doi
}
# helper for deleting package or updating resources
def hasAppliedDoi(pkgId):
context = {'model': model, 'user': c.user}
pkg = logic.get_action('package_show')(context, {'id': pkgId})
resp = not canUpdate(getDoiStatus(pkg))
return resp
# def doiUpdateStatus():
# response.headers["Content-Type"] = "application/json"
#
# if not isAdmin():
# return {
# 'error' : True,
# 'message' : 'Nope.'
# }
#
# return json.dumps({})
# for admin DOI interface, query datasets by DOI status
def doiQuery():
if not isAdmin():
return {
'error' : True,
'message' : 'Nope.'
}
query = request.params.get('query')
status = request.params.get('status')
offset = request.params.get('offset')
limit = request.params.get('limit')
return package.doiQuery(query=query, status=status, offset=offset, limit=limit)
# for admin, allows admin to completely clear a DOI. Should only be used in
# dev interface, never in production.
def clearDoi():
if not isAdmin():
return {
'error' : True,
'message' : 'Nope.'
}
id = request.params.get('id')
context = {'model': model, 'user': c.user}
pkg = logic.get_action('package_show')(context, {'id': id})
# check EcoSIS DOI status
setPackageExtra('EcoSIS DOI Status', '{}', pkg)
setPackageExtra('EcoSIS DOI', '', pkg)
pkg = logic.get_action('package_update')(context, pkg)
return {'success': True}
# Send admin notification of DOI request
def sendAdminNotification(pkg):
url = config.get('ckan.site_url')
admin_email = config.get('ecosis.admin_email')
if url != "" and url is not None:
if admin_email != "" and admin_email is not None:
try:
send_notification(
{
"email" : admin_email,
"display_name" : "EcoSIS Admins"
},
{
"subject" : "EcoSIS Dataset DOI Request - %s" % pkg.get('title'),
"body" : ("A DOI has been requested for the dataset '%s' by user %s/user/%s. "
"You can view the dataset here: %s/#result/%s and approve the DOI here: %s/doi-admin/#%s"
"\n\n-EcoSIS Server") %
(pkg.get('title'), config.get('ckan.site_url'), c.user, config.get('ecosis.search_url'), pkg.get("id"), config.get('ckan.site_url'), urllib.parse.urlencode(pkg.get("title"), quote_via=urllib.parse.quote_plus))
}
)
except:
print("Failed to send admin email")
# send user notification of approval/denial of DOI request
def sendUserNotification(pkg, approved):
url = config.get('ckan.site_url')
status = getDoiStatus(pkg).get('status')
if status is None:
status = {}
conn = psycopg2.connect(connStr)
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cur.execute(
"select email from public.user where name = %s", (status.get('requested_by'),)
)
users = cur.fetchall()
cur.close()
email = ""
for user in users:
email = user.get('email')
break
body = ""
if approved:
body = ("The DOI request for the dataset '%s' has been approved. "
"It may take a couple minutes for the DOI to generate. "
"You can view the dataset here: %s/package/%s\n\n"
"\n\n-EcoSIS Team") % (pkg.get('title'),
config.get('ecosis.search_url'),
pkg.get("id"))
else:
body = ("The DOI request for the dataset '%s' requires more information before approval. "
"Please provide a full description, keywords and fill out as much metadata as possible. "
"Then feel free to re-request a DOI."
"\n\n-EcoSIS Team") % (pkg.get('title'))
if url != "" and url is not None:
if email != "" and email is not None:
try:
send_notification(
{
"email" : email,
"display_name" : "EcoSIS User %s," % status.get('requested_by')
},
{
"subject" : "EcoSIS Dataset DOI Request - %s" % pkg.get('title'),
"body" : body
}
)
except:
print("Failed to send admin email")
return {
"email" : email,
"user" : status.get('requested_by')
}
# make sure user has permission to update DOI status. There are only two status users are
# allowed to update. 1) Doi has never been requested 2) DOI has been requested and
# the admins have said revisions are needed
def canUpdate(doi):
# TODO: later check for props we can update?
if doi.get('status').get('value') == None or \
doi.get('status').get('value') == DOI_STATUS['PENDING_REVISION']:
return True
return False
# wrapper for getting package-extra doi status from status fields (stored as JSON)
def getDoiStatus(pkg):
doi = {
'status' : getPackageExtra('EcoSIS DOI Status', pkg),
'value' : getPackageExtra('EcoSIS DOI', pkg)
}
if doi['status'] is None or doi['status'] == "":
doi['status'] = {}
else:
doi['status'] = json.loads(doi['status'])
return doi
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,050
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/vocab/gcmd.py
|
import re
gcmdCollection = None
# inject global dependencies
def init(collections):
global gcmdCollection
gcmdCollection = collections.get('gcmd')
# find gcmd keywords based on given query
def suggest(query):
# split on comma's
query = re.split(',|\s', query)
# clean up query
# TODO: this is not required in python3 anymore?
# query = map(unicode.strip, query)
# create regex for each string
arr = []
for q in query:
regx = re.compile("%s" % q)
arr.append({'keywords': regx})
# run query
results = []
match = gcmdCollection.find({'$and': arr}).limit(20)
if match is None:
match = []
else:
for doc in match:
results.append(doc)
return results
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,051
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/plugin.py
|
import json, os
import ckan.plugins as plugins
import ckan.plugins.toolkit as tk
import ckan.lib.base as base
from ckan.common import config, request
from ckan.logic.action.create import organization_member_create
from ckan.logic.action.delete import organization_member_delete
import ckan.logic as logic
from flask import Blueprint, make_response, send_from_directory
import ckanext.ecosis.lib.utils as utils
import ckanext.ecosis.datastore.query as query
from ckanext.ecosis.datastore import delete as deleteUtil
import ckanext.ecosis.controller.organization as orgController
import ckanext.ecosis.controller.package as pkgController
from ckanext.ecosis.controller.package.doi import handleDoiUpdate, validDoiUpdate, hasAppliedDoi, getDoiStatus, DOI_STATUS, applyDoi
import ckanext.ecosis.user_data.model as userDataModel
from ckanext.ecosis.controller import EcosisController
controller = EcosisController()
@tk.side_effect_free
def organization_member_create_wrapper(context, member_create):
organization_member_create(context, member_create)
orgController.notify_remotes(member_create.get('id'))
@tk.side_effect_free
def organization_member_delete_wrapper(context, member_delete):
organization_member_delete(context, member_delete)
orgController.notify_remotes(member_delete.get('id'))
class EcosisPlugin(plugins.SingletonPlugin,
tk.DefaultDatasetForm):
'''An example IDatasetForm CKAN plugin.
Uses a tag vocabulary to add a custom metadata field to datasets.
'''
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.ITemplateHelpers)
plugins.implements(plugins.IRoutes, inherit=True)
plugins.implements(plugins.IOrganizationController)
plugins.implements(plugins.IPackageController)
plugins.implements(plugins.IResourceController)
plugins.implements(plugins.IAuthFunctions)
plugins.implements(plugins.IActions)
plugins.implements(plugins.IClick)
plugins.implements(plugins.IMiddleware)
# plugins.implements(plugins.IDatasetForm)
# map of ecosis required package property to a nice property label
REQUIRED_FIELDS = {
'license_id': { 'label': 'License', 'empty_value' : 'notspecified'}
}
# IClick
def get_commands(self):
"""Implemented for IClick Plugin
register the 'ckan ecosis' CLI commands
"""
# import click
from ckanext.ecosis.user_data.paster import ecosis as ecosisCmd
return [ecosisCmd]
# add iauth functions
def get_auth_functions(self):
return {
'package_update' : self.package_update_auth,
'package_create' : self.package_create_auth,
'package_delete' : self.package_delete_auth,
'resource_delete' : self.resource_delete_auth,
'resource_create' : self.resource_create_auth
}
def get_class_name(self, entity):
"""Helper for getting nice class name string
required because some plugin methods overlap, need to sniff
test which is calling
"""
return entity.__class__.__name__
def is_group(self, entity):
return self.get_class_name(entity) == 'Group'
def is_package(self, entity):
return self.get_class_name(entity) == 'Package'
def is_resource(self, entity):
return self.get_class_name(entity) == 'Resource'
def read(self, entity):
"""Implemented for IOrganizationController and IPackageController Plugins
not used by the EcoSIS Plugin
"""
pass
def create(self, entity):
"""Implemented for IOrganizationController and IPackageController Plugins
IOrganizationController: notify remotes of org update
"""
if self.is_group(entity):
orgController.notify_remotes(entity.id)
def after_update(self, context, pkg_dict):
if pkg_dict.get('type') == "dataset":
# if doi status changed to ACCEPTED, start DOI application process
resp = handleDoiUpdate(context['before_package_update'], pkg_dict)
doiStatus = getDoiStatus(pkg_dict)
if doiStatus.get('status').get('value') == DOI_STATUS["ACCEPTED"]:
applyDoi(pkg_dict)
if resp.get('email') is not None:
pkg_dict['doi_user_email'] = resp.get('email')
pkg_dict['doi_user_name'] = resp.get('user')
def before_create(self, context, resource):
pass
def after_create(self, context, pkg_dict):
"""Implemented for IPackageController"""
if self.get_class_name(pkg_dict) == "dict": # safety check
if pkg_dict.get('type') == "dataset":
pkgController.after_create()
return pkg_dict
def before_index(self, pkg_dict):
"""Implemented for IPackageController"""
return pkg_dict
def edit(self, entity):
pass
# orgController.update(entity)
@tk.auth_sysadmins_check
def package_create_auth(self, context, data_dict=None):
"""Check for required fields
"""
# hack. how do we know if this is a view check or not?!
if data_dict is not None and len(data_dict) == 0:
return {'success': True}
if data_dict is not None:
for field, props in self.REQUIRED_FIELDS.items():
value = data_dict.get(field)
if value == None or value == '' or value == props.get('empty_value'):
return {
'success' : False,
'msg' : 'The %s field is required' % props.get('label')
}
return self.package_update_auth(context, data_dict)
@tk.auth_sysadmins_check
def package_update_auth(self, context, data_dict=None):
"""Check for DOI issues that should prevent saving. store old values
to be used in the after_update() method so we know which DOI actions to
preform
"""
if data_dict is not None:
# bypass flag for view elements to see if something should be displayed
# DOI editing is not part of this, so we can ignore
if data_dict.get('view_auth_check') != True and data_dict.get('id') is not None:
cpkg = {}
if data_dict['id'] != '':
cpkg = logic.get_action('package_show')(context, {'id': data_dict['id']})
context['before_package_update'] = cpkg
return validDoiUpdate(cpkg, data_dict)
return {'success': True}
@tk.auth_sysadmins_check
def package_delete_auth(self, context, data_dict=None):
"""Check that a package can be deleted
"""
if hasAppliedDoi(data_dict.get('id')):
return {'success': False, 'message':'Cannot delete package with applied DOI'}
return {'success': True}
@tk.auth_sysadmins_check
def resource_delete_auth(self, context, data_dict=None):
"""Check that a resource can be deleted
"""
resource = logic.get_action('resource_show')(context, {'id': data_dict['id']})
if hasAppliedDoi(resource.get('package_id')):
return {'success': False, 'msg': 'Cannot delete resource of package with applied DOI'}
return {'success': True}
@tk.auth_sysadmins_check
def resource_create_auth(self, context, data_dict=None):
"""Check that a resource can be created
"""
if hasAppliedDoi(data_dict.get('package_id')):
return {'success': False, 'msg': 'Cannot create resource of package with applied DOI'}
return {'success': True}
def authz_add_role(self, object_role):
pass
def authz_remove_role(self, object_role):
pass
def before_delete(self, context, resource, resources):
pass
def delete(self, entity):
pass
def after_delete(self, context, pkg_dict):
if self.is_group(pkg_dict):
orgController.delete(pkg_dict)
if self.is_package(pkg_dict):
deleteUtil.package(pkg_dict.get('id'))
def after_show(self, context, entity):
return entity
def before_show(self, resource_dict):
pass
def before_view(self, pkg_dict):
return pkg_dict
def before_search(self, search_params):
return search_params
def after_search(self, search_results, search_params):
return search_results
# we need to listen for org create/update/delete events and notify remotes
def get_actions(self):
return {
'organization_member_create' : organization_member_create_wrapper,
'organization_member_delete': organization_member_delete_wrapper
}
def update_config(self, config):
# Add this plugin's templates dir to CKAN's extra_template_paths, so
# that CKAN will use this plugin's custom templates.
tk.add_template_directory(config, 'templates')
tk.add_resource('public/fanstatic', 'ecosis')
userDataModel.define_table()
# set helpers for ecosis templates
def get_helpers(self):
# Example:
#return { 'to_json' : 'self.to_json' }
return {
'get_google_analytics_code' : self.get_google_analytics_code,
'get_search_url' : self.get_search_url,
'get_last_pushed_str' : self.get_last_pushed_str,
'pushed_to_search' : self.pushed_to_search
}
def pushed_to_search(self, package_id):
result = query.isPushed(package_id)
if result is None:
return False
return True
def get_last_pushed_str(self, package_id):
result = query.isPushed(package_id)
if result is None:
return None
try:
return result.strftime("%Y-%m-%d %H:%M")
except:
return result
def get_search_url(self):
return config.get('ecosis.search_url','')
def get_google_analytics_code(self):
return config.get('ckan.google_analytics_code', '')
def is_fallback(self):
# Return True to register this plugin as the default handler for
# spectral types not handled by any other IDatasetForm plugin.
return False
def make_middleware(self, app, config):
# REDIRECTS
editor_redirects = Blueprint(u'editor_redirects', __name__, url_prefix=u'/')
# route all resource edit screens to main ecosis dataset editor
editor_redirects.add_url_rule(u'/dataset/new', methods=[u'GET'],
view_func=controller.createPackageRedirect)
# TODO: the below don't actually work and are currently handled by the template. badness.
editor_redirects.add_url_rule(u'/dataset/resources/<package_id>', methods=[u'GET'],
view_func=controller.editPackageRedirect)
editor_redirects.add_url_rule(u'/dataset/edit/<package_id>', methods=[u'GET'],
view_func=controller.editPackageRedirect)
editor_redirects.add_url_rule(u'/dataset/new_resource/<package_id>', methods=[u'GET'],
view_func=controller.editPackageRedirect)
editor_redirects.add_url_rule(u'/dataset/<package_id>/resource_edit/<resource_id>', methods=[u'GET'],
view_func=controller.editPackageRedirect)
editor_redirects.add_url_rule(u'/dataset/new_resource/<package_id>', methods=[u'GET'],
view_func=controller.editPackageRedirectWResource)
# Serve index.html static paths
root_dir = os.environ.get('CKAN_HOME', os.getcwd())
if not os.path.exists(root_dir):
raise Exception('CKAN_HOME not found: %s. Unable to load static assests' % root_dir)
editor_redirects.add_url_rule(u'/import/', methods=[u'GET'],
endpoint="spectra-importer",
view_func=lambda: send_from_directory(os.path.join(root_dir, 'ckanext-ecosis/spectra-importer/dist/import'), 'index.html'))
editor_redirects.add_url_rule(u'/doi-admin/', methods=[u'GET'],
endpoint="doi-admin",
view_func=lambda: send_from_directory(os.path.join(root_dir, 'ckanext-ecosis/doi-admin/dist/doi-admin'), 'index.html'))
# print(os.path.join(root_dir, 'ckanext-ecosis/spectra-importer/dist/import'), 'index.html')
app.register_blueprint(editor_redirects)
# API
api = Blueprint(u'ecosis', __name__, url_prefix=u'/ecosis')
# ecosis - admin
api.add_url_rule(u'/admin/rebuildIndex', methods=[u'GET'],
view_func=controller.rebuildIndex)
api.add_url_rule(u'/admin/clean', methods=[u'GET'],
view_func=controller.clean)
api.add_url_rule(u'/admin/verifyWorkspace', methods=[u'GET'],
view_func=controller.verifyWorkspace)
# ecosis - root
api.add_url_rule(u'/user/get', methods=[u'GET'],
view_func=controller.userInfo)
api.add_url_rule(u'/gitInfo', methods=[u'GET'],
view_func=controller.gitInfo)
api.add_url_rule(u'/user/remoteLogin', methods=[u'POST'],
view_func=controller.remoteLogin)
api.add_url_rule(u'/user/githubInfo', methods=[u'POST'],
view_func=controller.setGithubInfo)
# ecosis - workspace
api.add_url_rule(u'/workspace/prepare', methods=[u'GET'],
view_func=controller.prepareWorkspace)
api.add_url_rule(u'/workspace/get', methods=[u'GET'],
view_func=controller.getWorkspace)
api.add_url_rule(u'/workspace/push', methods=[u'GET'],
view_func=controller.pushToSearch)
# ecosis - package
api.add_url_rule(u'/package/getTemplate', methods=[u'GET'],
view_func=controller.getTemplate)
api.add_url_rule(u'/package/updateLinkedResources', methods=[u'POST'],
view_func=controller.updateLinkedResources)
api.add_url_rule(u'/package/import', methods=[u'GET'],
view_func=controller.importPackage)
# ecosis - spectra
api.add_url_rule(u'/spectra/suggestOverview', methods=[u'GET', 'POST'],
view_func=controller.topOverview)
api.add_url_rule(u'/spectra/get', methods=[u'GET'],
view_func=controller.getSpectra)
api.add_url_rule(u'/spectra/gcmd', methods=[u'GET'],
view_func=controller.gcmdSuggest)
api.add_url_rule(u'/spectra/suggest', methods=[u'GET'],
view_func=controller.topSuggest)
# ecosis - resource
api.add_url_rule(u'/resource/getSpectraCount', methods=[u'GET'],
view_func=controller.getSpectraCount)
api.add_url_rule(u'/resource/process', methods=[u'POST'],
view_func=controller.processResource)
api.add_url_rule(u'/resource/getMetadataInfo', methods=[u'GET'],
view_func=controller.getMetadataInfo)
api.add_url_rule(u'/resource/byname/<package_id>/<resource_name>', methods=[u'GET'],
view_func=controller.getResourceByName)
api.add_url_rule(u'/resource/deleteMany', methods=[u'POST'],
view_func=controller.deleteResources)
api.add_url_rule(u'/resource/getMetadataChunk', methods=[u'GET'],
view_func=controller.getMetadataChunk)
api.add_url_rule(u'/resource/get', methods=[u'GET'],
view_func=controller.getResource)
# ecosis - admin doi
# map.connect('doi_query', '/ecosis/admin/doi/query', controller=controller, action='doiQuery')
# map.connect('doi_clear', '/ecosis/admin/doi/clear', controller=controller, action='clearDoi')
# ecosis - admin doi
api.add_url_rule(u'/admin/doi/query', methods=[u'GET'],
view_func=controller.doiQuery)
api.add_url_rule(u'/admin/doi/clear', methods=[u'GET'],
view_func=controller.clearDoi)
app.register_blueprint(api)
return app
def make_error_log_middleware(self, app, config):
return app
def before_map(self, map):
self.set_map(map)
return map
# override?
def set_map(self, map):
# The 'new' way
controller = 'ckanext.ecosis.controller:EcosisController'
# Standard CKAN overrides
# map.connect('create_package_3', '/api/3/action/package_create', controller=controller, action='createPackage')
# map.connect('create_package', '/api/action/package_create', controller=controller, action='createPackage')
# map.connect('update_package_3', '/api/3/action/package_update', controller=controller, action='updatePackage')
# map.connect('update_package', '/api/action/package_update', controller=controller, action='updatePackage')
# map.connect('delete_package_3', '/api/3/action/package_delete', controller=controller, action='deletePackage')
# map.connect('delete_package', '/api/action/package_delete', controller=controller, action='deletePackage')
# map.connect('delete_resource_3', '/api/3/action/resource_delete', controller=controller, action='deleteResource')
# map.connect('delete_resource', '/api/action/resource_delete', controller=controller, action='deleteResource')
# map.connect('create_resource_3', '/api/3/action/resource_create', controller=controller, action='createResource')
# map.connect('create_resource', '/api/action/resource_create', controller=controller, action='createResource')
# ecosis - admin
map.connect('rebuild_usda_collection', '/ecosis/admin/rebuildUSDA', controller=controller, action='rebuildUSDACollection')
map.connect('clean_tests', '/ecosis/admin/cleanTests', controller=controller, action='cleanTests')
map.connect('upgrade', '/ecosis/admin/upgrade', controller=controller, action='upgrade')
map.connect('fixUnits', '/ecosis/admin/fixUnits', controller=controller, action='fixUnits')
map.connect('fixCitations', '/ecosis/admin/fixCitations', controller=controller, action='fixCitations')
# ecosis - admin doi
map.connect('getAllGithubInfo', '/ecosis/admin/github/sync', controller=controller, action='getAllGithubInfo')
# ecosis - package
map.connect('setPrivate', '/ecosis/package/setPrivate', controller=controller, action='setPrivate')
# ecosis - workspace
# custom pages
# map.connect('remotelogin', '/user/remotelogin', controller='ckanext.ecosis.plugin:StaticPageController', action='remotelogin')
return map
def package_types(self):
# This plugin doesn't handle any special package types, it just
# registers itself as the default (above).
return []
def _modify_package_schema(self, schema):
# Add custom access_level as extra field
return schema
def create_package_schema(self):
schema = super(EcosisPlugin, self).create_package_schema()
schema = self._modify_package_schema(schema)
return schema
def update_package_schema(self):
schema = super(EcosisPlugin, self).update_package_schema()
schema = self._modify_package_schema(schema)
return schema
def show_package_schema(self):
schema = super(EcosisPlugin, self).show_package_schema()
return schema
# check_data_dict() is deprecated, this method is only here to test that
# legacy support for the deprecated method works.
def check_data_dict(self, data_dict, schema=None):
return
def setup_template_variables(self, context, data_dict):
return super(EcosisPlugin, self).setup_template_variables(
context, data_dict)
def new_template(self):
return super(EcosisPlugin, self).new_template()
def read_template(self):
return super(EcosisPlugin, self).read_template()
def edit_template(self):
return super(EcosisPlugin, self).edit_template()
def comments_template(self):
return super(EcosisPlugin, self).comments_template()
def search_template(self):
return super(EcosisPlugin, self).search_template()
def history_template(self):
return super(EcosisPlugin, self).history_template()
def package_form(self):
return super(EcosisPlugin, self).package_form()
###
# IPackageController
###
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,052
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/files/__init__.py
|
from . import process
from . import excel
# inject global dependencies
def init(collections, workspaceDir):
process.init(collections)
excel.init(workspaceDir)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,053
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(
name='ckanext-ecosis',
version=version,
description="EcoSIS extension adding additional fields and controls for spectral data",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='Spectra',
author='Justin Merz',
author_email='jrmerz@ucdavis.edu',
url='http://www.ucdavis.edu',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['ckanext'],
include_package_data=True,
zip_safe=False,
install_requires=[],
entry_points="""
[ckan.plugins]
ecosis=ckanext.ecosis.plugin:EcosisPlugin
[ckan.click_command]
ecosis = ckanext.ecosis.user_data.paster
""",
)
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,054
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/workspace/__init__.py
|
import ckanext.ecosis.datastore.workspace as workspace
import ckanext.ecosis.datastore.query.workspace as workspaceQuery
from ckanext.ecosis.datastore.push import Push
from ckanext.ecosis.lib.utils import jsonStringify
from ckan.common import request
from ckan.lib.base import c, model
import ckan.logic as logic
def prepare():
package_id = request.params.get('package_id')
# get package by name or id
context = {'model': model, 'user': c.user}
ckanPackage = logic.get_action('package_show')(context, {'id': package_id})
if ckanPackage == None:
raise Exception('Invalid package ID')
force = request.params.get('force')
clean = request.params.get('clean')
if force == "true":
force = False
else:
force = False
# remove old unused packages
workspace.clean(current_package_id=package_id)
if clean == "true":
workspace.cleanPackage(ckanPackage.get("id"))
result = workspace.prepare(ckanPackage.get("id"), force)
if clean == "true":
result['cleaned'] = True
return jsonStringify(result)
def pushToSearch():
package_id = request.params.get('package_id')
email = request.params.get('email')
if email is None:
email = "false"
context = {'model': model, 'user': c.user}
ckanPackage = logic.get_action('package_show')(context, {"id": package_id})
if email == True or email.lower() == "true":
email = True
else:
email = False
push = Push()
return push.run(ckanPackage, email, c.userobj.email, c.userobj.display_name)
def get():
package_id = request.params.get('package_id')
# get package by name or id
context = {'model': model, 'user': c.user}
ckanPackage = logic.get_action('package_show')(context, {'id': package_id})
return jsonStringify(workspaceQuery.get(ckanPackage.get("id")))
def clean():
response.headers["Content-Type"] = "application/json"
package_id = request.params.get('package_id')
workspace.cleanPackage(package_id)
return jsonStringify({
"cleaned": True,
"packageId" : package_id
})
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,055
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/controller/workspace/import.py
|
# Contains functions for importing from remote zip file
# Can be web or s3 accessible
# def packageImport(uri):
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
7,056
|
CSTARS/ckanext-ecosis
|
refs/heads/master
|
/ckanext/ecosis/datastore/files/utils.py
|
import re
from ..vocab import controlled as controlledVocabulary
'''
Utilities for the process module
'''
# grab extension for a file
# TODO: make lower case
def getFileExtension(filename):
return re.sub(r".*\.", "", filename)
# given a sheet config, get the correct layout
def getLayout(sheetConfig):
layout = 'row' # default
if 'layout' in sheetConfig:
layout = sheetConfig['layout']
else: # set default for saving
sheetConfig['layout'] = 'row'
return layout
# walk of data[][] and discover data ranges
# this will find the first two tables of data
def getDataRanges(data):
ranges = []
r = {
"start" : 0,
"stop" : 0
}
started = False
couldBeGlobal = False
i = 0
for i in range(0, len(data)):
if _isEmptyRow(data[i]):
if started:
r['stop'] = i-1
ranges.append(r)
started = False
if len(ranges) == 2:
break
else:
r = {"start":0, "stop":0}
continue
elif couldBeGlobal and len(data[i]) != 2:
r['stop'] = i-1
ranges.append(r)
started = False
couldBeGlobal = False
if len(ranges) == 2:
break
else:
r = {"start":0, "stop":0}
if not started:
# if we are on the first range and there are two columns
# we may be looking at global data
if len(data[i]) == 2 and len(ranges) == 0:
couldBeGlobal = True
r['start'] = i
started = True
if started and len(ranges) < 2:
r['stop'] = i
ranges.append(r)
elif not started and len(ranges) == 0:
ranges.append(r)
return ranges
# is a row array empty
def _isEmptyRow(row):
if len(row) == 0:
return True
for i in range(0, len(row)):
if row[i] != "" and row[i] != None:
return False
return True
# parse out the attribute information from the attribute information
# TODO: check for units and attribute data type
def parseAttrType(name, pos):
original = name
units = None
# clean up string
name = name.strip()
# parse out units
if re.match(r".*\(.*\)\s*$", name):
units = re.sub(r".*\(","", name)
units = re.sub(r"\)\s*","", units)
name = re.sub(r"\(.*", "", name).strip()
type = "metadata" # default type
# if attribute name is actually a number, assum its a wavelength
if re.match(r"^-?\d+\.?\d*", name) or re.match(r"^-?\d*\.\d+", name):
type = "wavelength"
name = re.sub(r"\.0+$", "", name)
# otherwise lookup and see if we 'flatten' (lower case, no spaces) name, does it match
# a EcoSIS defined schema name. If so, set as 'pretty' schema name
else:
name = controlledVocabulary.getEcoSISName(name)
# clean up name for Mongo
if type == "metadata":
name = re.sub(r'[\.\$]', '', name)
else:
name = re.sub(r'\$', '', name)
name = re.sub(r'\.', ',', name)
attr = {
"type" : type,
"name" : name,
"pos" : "%s-%s" % (pos[0], pos[1])
}
# if units were found, store them
if units != None:
attr["units"] = units
# if the name was changed, store the name that was given to us as well
if original != name:
attr["originalName"] = original
return attr
|
{"/ckanext/ecosis/lib/data_package_importer.py": ["/ckanext/ecosis/controller/resource/__init__.py", "/ckanext/ecosis/lib/aws.py"], "/ckanext/ecosis/datastore/vocab/controlled.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/push/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/lookup.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/datastore/mapreduce/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py", "/ckanext/ecosis/controller/admin/upgrade.py"], "/ckanext/ecosis/controller/user/__init__.py": ["/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py"], "/ckanext/ecosis/datastore/files/excel.py": ["/ckanext/ecosis/datastore/files/__init__.py"], "/ckanext/ecosis/datastore/workspace/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/files/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/__init__.py": ["/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/delete/__init__.py": ["/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/mapreduce/__init__.py": ["/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/controller/admin/upgrade.py": ["/ckanext/ecosis/datastore/ckan/package.py", "/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/resource/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/delete/__init__.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/controller/organization/__init__.py": ["/ckanext/ecosis/datastore/__init__.py"], "/ckanext/ecosis/controller/spectra/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/user_data/paster.py": ["/ckanext/ecosis/user_data/model.py"], "/ckanext/ecosis/controller/package/doi.py": ["/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py"], "/ckanext/ecosis/plugin.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/controller/organization/__init__.py", "/ckanext/ecosis/controller/package/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/user_data/model.py", "/ckanext/ecosis/controller/__init__.py", "/ckanext/ecosis/user_data/paster.py"], "/ckanext/ecosis/controller/workspace/__init__.py": ["/ckanext/ecosis/datastore/workspace/__init__.py", "/ckanext/ecosis/datastore/query/workspace.py", "/ckanext/ecosis/datastore/push/__init__.py", "/ckanext/ecosis/lib/utils.py"], "/ckanext/ecosis/datastore/files/utils.py": ["/ckanext/ecosis/datastore/vocab/__init__.py"], "/ckanext/ecosis/datastore/__init__.py": ["/ckanext/ecosis/datastore/query/__init__.py", "/ckanext/ecosis/datastore/utils/__init__.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/controller/package/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/__init__.py", "/ckanext/ecosis/lib/auth.py", "/ckanext/ecosis/datastore/ckan/__init__.py", "/ckanext/ecosis/controller/package/doi.py", "/ckanext/ecosis/lib/data_package_importer.py"], "/ckanext/ecosis/controller/__init__.py": ["/ckanext/ecosis/lib/utils.py", "/ckanext/ecosis/datastore/mongo.py"], "/ckanext/ecosis/datastore/query/workspace.py": ["/ckanext/ecosis/datastore/ckan/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.