index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
20,571
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/models/model.py
|
import os
from googlespreadsheet2django.models.field import *
from googlespreadsheet2django.models.abstract_model import *
class Model(object):
FIELDS_STARTROW = 3
def __init__(self, modelLoader, name, worksheet, answers):
self._modelLoader = modelLoader
self._name = name
self._answers = answers
self._fields = []
self._application = worksheet.cell(0, 0).value.lower()
self._verbose_name = worksheet.cell(0, 1).value
self._verbose_name_plural = worksheet.cell(0, 2).value
self._permissions = worksheet.cell(0, 3).value
self.__load(worksheet, answers)
def __load(self, worksheet, answers):
self._orderedTables = []
self._tables = {}
for row in range(self.FIELDS_STARTROW, worksheet.nrows):
tab = worksheet.cell(row, 2).value
group = worksheet.cell(row, 3).value
key = "%s - %s" % (tab, group)
if key not in self._tables:
abstractModel = AbstractModel(tab, group)
self._tables[key] = abstractModel
self._orderedTables.append( abstractModel )
field = Field(self._tables[key], answers, tab, group, worksheet, row)
self._tables[key].addField(field)
self._fields.append(field)
def addField(self, field): self._fields.append(field)
@property
def model_unicode(self):
fs = []
for f in sorted(self._fields, key=lambda a: a._useonname):
if f._useonname: fs.append('force_text(self.'+f._column+')')
if len(fs)>0:
return """\n\tdef __unicode__(self): return %s\n\n\tdef __str__(self): return str(self.__unicode__())\n""" % "+' - '+".join(fs)
else:
return ''
@property
def name(self):
return self._name.replace('(', '').replace(')', '')
def __unicode__(self):
res = "from django.db import models\n"
res += "from django.contrib.auth.models import User\n"
res += "from django.core.validators import MaxValueValidator, MinValueValidator\n"
res += "from django.utils.encoding import force_text\n"
#res += '\n'.join(self.foreignModels2Import)
res += '\n\n'
res += str( self._answers.codeFor(self.answers) )+"\n\n"
for model in self._tables.values(): res += "%s\n" % str(model)
res += '\nclass Abstract%s(%s):' % (self.name, ',\n\t'.join([ model.tablename for model in self._tables.values() ]))
res += '\n\t%s' % self.model_unicode
res += '\n\n\tclass Meta:'
res += '\n\t\tabstract = True'
res += '\n\t\tverbose_name = "%s"' % self._verbose_name
res += '\n\t\tverbose_name_plural = "%s"\n' % self._verbose_name_plural
res += """
| def ShowHideIf(self, checkingField, rules):
| values, listOfFields = rules
| values = values.split(';')
| if str(self.__dict__[checkingField]) in values:
| for field in listOfFields:
| if not self.__dict__[checkingField]!=None: return False
| return True
"""
res += """
| def ShowHideIfManyToMany(self, checkingField, rules):
| values, listOfFields = rules
| values = values.split(';')
|
| selected = getattr(self,checkingField).all()
| active = False
| for v in selected:
| if v in values:
| active=True
| break
| if active:
| for field in listOfFields:
| if self.__dict__[checkingField]==None: return False
| return True
"""
used_fields = []
is_complete = []
for field, values, fields2show in self.dependencies.values():
used_fields += list(values)
if self[field].fieldtype=='ManyToManyField':
is_complete.append( "self.ShowHideIfManyToMany('{0}','{1}',{2})".format(field, values, fields2show) )
else:
is_complete.append( "self.ShowHideIf('{0}','{1}', {2})".format(field, values,fields2show) )
for field in self._fields:
if field.fieldname not in used_fields and field.fieldtype!=None and field._visible==True:
is_complete.append("getattr(self,'{0}')!=None".format( field.fieldname) )
res +="""
| def is_complete(self):
| return {0}
| is_complete.short_description="Complete"
| is_complete.boolean = True
""".format( ' and \\\n\t\t\t'.join(is_complete), )
res = res.replace('\n\t\t\t|', '\n')
return res
@property
def model(self):
res = "##### auto:start:%s #####\n" % self._name
res += "from {0}.abstractmodels.{1} import Abstract{1}\n".format(self._application, self._name)
res += '\n'
res += "class %s(Abstract%s):" % (self._name, self._name)
res += '\n\tpass\n'
res += "\t##### auto:end:%s #####\n" % self._name
return res
@property
def modelAdmin(self):
res = """##### auto:start:{0} #####
from {1}.models import {0}
from {1}.admins.{0}Admin import *
class {0}Admin({0}AdminAbstract):
pass
##### auto:end:{0} #####\n""".format( self._name, self._application)
res = res.replace('\t\t\t', '')
return res
def __str__(self): return self.__unicode__()
def __strip(self, string):
for x in [' ','.','-','_']:
string = string.replace(x, '')
return string
@property
def foreignModels2Import(self):
models_to_import = [x.choices for x in self._fields if x._type=='Foreign key' or x._type=='Multiple choice']
res = []
for m in models_to_import:
model = self._modelLoader.getModel(m)
if model:
res.append("""from %s.models import %s""" % (model._application, model._name) )
return res
@property
def tablename(self):
firstfield = self._fields[0]
return firstfield._column.split('_')[0].title()
@property
def list_display(self):
l = [(x._showinlist, x.fieldname) for x in self._fields if x._showinlist!='']
l = sorted(l, key=lambda x: x[0])
return ["'%s'" % x[1] for x in l]
@property
def list_filter(self): return [ "'%s'" % x.fieldname for x in self._fields if x._filterby]
@property
def search_list(self): return [ "'%s'" % x.fieldname for x in self._fields if x._searchby]
@property
def createdByUserField(self):
for x in self._fields:
if x._type=='Created by user': return x.fieldname
return None
@property
def answers(self): return [x._choices for x in self._fields if x._choices and x.fieldtype=='CharField']
@property
def foo(self):
return self._foo
@property
def tab(self):
tab = self.__strip(self._tab).replace('\\','')
return tab.lower()
@property
def readonlyFields(self):
res = []
for row in self._orderedTables:
for field in row._fields:
if field._type in ['Creation date and time','Update date and time','Number of identification','Created by user', 'Function']:
res.append("'%s'" % field.fieldname)
return res
@property
def admin(self):
res = "from %s.models import %s\n" % ( self._application, self._name )
res += "from django.forms import Textarea, CheckboxSelectMultiple\n"
res += "from django.contrib import admin\n"
res += "from django.db import models\n\n"
list_display = ''
if len(self.list_display)>0:
list_display = """list_display = (%s,)""" % ','.join(self.list_display)
list_filter = ''
if len(self.list_filter)>0:
list_filter = """list_filter = (%s,)""" % ','.join(self.list_filter)
search_fields = ''
if len(self.search_list)>0:
search_fields = """search_fields = [%s,]""" % ','.join(self.search_list)
readonly_fields = ''
if len(self.readonlyFields)>0:
readonly_fields = "readonly_fields = (%s,)\n" % ", ".join(list(set(self.readonlyFields)))
include_tfieldsets = False
res = "fieldsets = ["
for x in self._orderedTables:
if len(x.fieldsList)==0: continue
include_tfieldsets = True
fields = "'"+"','".join(x.fieldsList)+"'"
res += "\n\t\t('%s',{" % x._group
res += "\n\t\t\t'classes': ('suit-tab suit-tab-%s',)," % x.tab
res += "\n\t\t\t'fields': [%s]\n\t\t}" % fields
res += "),"
res += "\n\t]"
fieldsets = res if include_tfieldsets else ''
include_tsuit_form_tabs = False
listoftabs = []
res = ''
for x in self._orderedTables:
if len(x.fieldsList)==0: continue
if str((x.tab,x._tab)) not in listoftabs:
include_tsuit_form_tabs = True
listoftabs.append( str((x.tab,x._tab)) )
res += "suit_form_tabs = [\n\t\t"
res += ",".join(listoftabs)
res += "\n\t]\n\n"
tsuit_form_tabs = res if include_tsuit_form_tabs else ''
fields = []
for x in self._tables.values():
for f in x._fields:
if f._choices and f.fieldtype=='CharField':
if f._size == 'Horizontal disposition':
fields.append( "\t\t'%s': admin.HORIZONTAL" % f.fieldname )
else:
fields.append( "\t\t'%s': admin.VERTICAL" % f.fieldname )
radio_fields = ''
if len(fields)>0:
radio_fields = "radio_fields = {\n"
radio_fields += ",\n".join(fields)
radio_fields += "\n\t}"
#### Restrict access ##########################################################
createdby = ''
if self._permissions != 'All data is accessible to users' and self.createdByUserField!=None:
createdby = """def save_model(self, request, obj, form, change):\n"""
createdby += """\t\tif obj.pk==None: obj.%s = request.user\n""" % self.createdByUserField
createdby += """\t\tsuper(%sAdminAbstract, self).save_model(request, obj, form, change)\n\n""" % self._name
createdby += '\tdef queryset(self, request):\n'
createdby += '\t\tqs = super(%sAdminAbstract, self).queryset(request)\n' % self._name
if self._permissions == 'Restrict data access by the creator':
createdby += '\t\tqs = qs.filter( %s = request.user )\n' % self.createdByUserField
if self._permissions == 'Restrict data access by the creator group':
createdby += "\t\tgroups = request.user.groups.all()\n"
createdby += '\t\tqs = qs.filter( %s__groups = groups ).distinct()\n' % self.createdByUserField
createdby += '\t\treturn qs\n'
###############################################################################
res = """
|from {6}.models import {0}
|from django.forms import Textarea, CheckboxSelectMultiple
|from django.forms.models import ModelMultipleChoiceField
|from django.utils.translation import ugettext as _
|from django.contrib import admin
|from django.conf import settings
|from django.db import models
|#from common.admintools import export_xlsx, printable_html
|class {0}AdminAbstract(admin.ModelAdmin):
| {2}
| {4}
| {5}
| {7}
| {8}
| {9}
| {10}
| #actions = [export_xlsx,]
| formfield_overrides = dict((
| (models.TextField,dict((( 'widget',Textarea(attrs=dict(rows=5, cols=120,style='width: 600px;') )),) )),
| (models.ManyToManyField,dict((('widget',CheckboxSelectMultiple),)))
| ),)
| class Media:
| css = dict(all=['generic.css','fixadmin.css'])
| js = ('generic.js','models/{1}.js')
| {3}
| def get_actions(self, request):
| actions = super({0}AdminAbstract, self).get_actions(request)
| user = request.user
| #if not user.groups.filter(name=settings.HTML_EXPORTER_PROFILE_GROUP).exists(): del actions['printable_html']
| #if not user.groups.filter(name=settings.EXCEL_EXPORTER_PROFILE_GROUP).exists(): del actions['export_xlsx']
| return actions
| def construct_change_message(self, request, form, formsets, add=False):
| message = super({0}AdminAbstract, self).construct_change_message(request, form, formsets)
| change_message = []
| if form.changed_data:
| values = []
| for x in form.changed_data:
| field = form.fields[x]
| initial = form.initial.get(x,None)
| value = form.cleaned_data[x]
| if isinstance(field, ModelMultipleChoiceField):
| value = [int(y.pk) for y in value]
| initial = [int(y) for y in initial] if initial!=None else []
| values.append( _(": %s -> %s" % (str(initial), str(value)) ) )
| change_message.append( '%s' % ','.join(values) )
| message += ' '.join(change_message)
| return message
""".format(
self._name, self._name.lower(), list_display,
createdby, list_filter, search_fields, self._application,
readonly_fields, fieldsets, tsuit_form_tabs, radio_fields )
res = res.replace('\n\t\t\t|', '\n')
return res
@property
def dependencies(self):
"""return a dictionary of fields dependencies configuration"""
showhide = {}
for table in self._orderedTables:
for field in table._fields:
if field._columnDependency!='':
k = self[field._columnDependency]
key = "{0}-{1}".format( k.fieldname, field._valuesDependency )
if key not in showhide: showhide[key]=(k.fieldname, field._valuesDependency,[])
showhide[key][2].append(str(field.fieldname))
return showhide
@property
def js(self):
showhide = self.dependencies
res = '(function($){ $(document).ready(function(){\n\n'
for key, values, columns in showhide.values():
res += "\tShowHideIf( '%s', '%s', %s, true );\n" % (key,values, columns)
res += '\n\n }); }(Suit.$));'
return res
def __getitem__(self, key):
for row in self._orderedTables:
for field in row._fields:
name = field.fieldname
if name == key: return field
return None
def __findModelInFile(self, infile):
infile.seek(0)
start, end = None, None
for i, line in enumerate(infile):
if start==None and ('auto:start:%s' % self._name) in line: start = i
if start!=None and ('auto:end:%s' % self._name) in line:
end = i
return start, end
return None
def __findModelAdminRegistration(self, infile):
infile.seek(0)
for i, line in enumerate(infile):
word = 'admin.site.register(%s, %sAdmin)' % (self._name,self._name)
if word in line: return i
return None
def saveAdmin(self, parentPath):
app_path = os.path.join(parentPath, self._application)
if not os.path.isdir(app_path): os.mkdir(app_path)
init_filename = os.path.join(app_path, '__init__.py')
if not os.path.isfile(app_path):
outfile = open(init_filename, 'w'); outfile.close()
admins_path = os.path.join(app_path, 'admins')
if not os.path.isdir(admins_path): os.mkdir(admins_path)
init_filename = os.path.join(admins_path, '__init__.py')
if not os.path.isfile(init_filename):
outfile = open(init_filename, 'w'); outfile.close()
admin_filename = os.path.join(admins_path, self._name+'Admin'+'.py')
outfile = open(admin_filename, 'w')
outfile.write( self.admin )
outfile.close()
def saveJs(self, parentPath):
static_path = os.path.join(parentPath, 'static')
if not os.path.isdir(static_path): os.mkdir(static_path)
js_path = os.path.join(static_path,'js')
if not os.path.isdir(js_path): os.mkdir(js_path)
js_path = os.path.join(js_path,'models')
if not os.path.isdir(js_path): os.mkdir(js_path)
js_filename = os.path.join(js_path, self._name.lower()+'.js')
outfile = open(js_filename, 'w')
outfile.write( self.js )
outfile.close()
def saveModel(self, parentPath):
app_path = os.path.join(parentPath, self._application)
if not os.path.isdir(app_path): os.mkdir(app_path)
init_filename = os.path.join(app_path, '__init__.py')
if not os.path.isfile(app_path):
outfile = open(init_filename, 'w'); outfile.close()
models_path = os.path.join(app_path, 'abstractmodels')
if not os.path.isdir(models_path): os.mkdir(models_path)
init_filename = os.path.join(models_path, '__init__.py')
if not os.path.isfile(init_filename):
outfile = open(init_filename, 'w'); outfile.close()
model_filename = os.path.join(models_path, self._name+'.py')
print model_filename
outfile = open(model_filename, 'w')
outfile.write( str(self) )
outfile.close()
def updateModel(self, parentPath):
app_path = os.path.join(parentPath, self._application)
if not os.path.isdir(app_path): os.mkdir(app_path)
model_filename = os.path.join(app_path, 'models.py')
if not os.path.isfile(model_filename): open(model_filename, 'w').close()
infile = open(model_filename, 'r+a')
position = self.__findModelInFile(infile)
if position==None:
infile.write(self.model)
else:
infile.seek(0)
start, end = position
tmp_filename = os.path.join(app_path, 'tmp.py')
outfile = open(tmp_filename, 'w')
for i, line in enumerate(infile):
if i<start: outfile.write(line)
if i==start: outfile.write(self.model)
if i>end: outfile.write(line)
outfile.close()
os.rename(tmp_filename, model_filename)
infile.close()
def updateAdmin(self, parentPath):
app_path = os.path.join(parentPath, self._application)
if not os.path.isdir(app_path): os.mkdir(app_path)
model_filename = os.path.join(app_path, 'admin.py')
if not os.path.isfile(model_filename): open(model_filename, 'w').close()
infile = open(model_filename, 'r+a')
position = self.__findModelInFile(infile)
if position==None:
infile.write(self.modelAdmin)
else:
infile.seek(0)
start, end = position
tmp_filename = os.path.join(app_path, 'tmp.py')
outfile = open(tmp_filename, 'w')
for i, line in enumerate(infile):
if i<start: outfile.write(line)
if i==start: outfile.write(self.modelAdmin)
if i>end: outfile.write(line)
adminRegistrationLine = self.__findModelAdminRegistration(infile)
if adminRegistrationLine==None:
outfile.write( 'admin.site.register(%s, %sAdmin)\n' % (self._name,self._name) )
outfile.close()
os.rename(tmp_filename, model_filename)
infile.close()
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,572
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/answers/answers.py
|
from googlespreadsheet2django.answers.choice import *
class Answers(object):
def __init__(self, name):
self._name = name
self._choices = []
def __unicode__(self):
res = '%s = (\n' % self._name
res += ",\n".join( map( str, self._choices ) )+','
res += '\n)\n'
return res
def __str__(self): return self.__unicode__()
def addChoice(self, code, label):
choice = Choice(code, label)
self._choices.append( choice )
@property
def name(self): return self._name
@property
def columnSize(self): return max( [len(x._code) for x in self._choices] )
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,573
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/admins/CountryAdmin.py
|
from details.models import Country
from django.forms import Textarea, CheckboxSelectMultiple
from django.forms.models import ModelMultipleChoiceField
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.conf import settings
from django.db import models
from common.admintools import export_xlsx, printable_html
class CountryAdminAbstract(admin.ModelAdmin):
change_form_template = 'admin/my_change_form.html'
list_display = ('country_id','country_name',)
search_fields = ['country_name',]
readonly_fields = ('country_id',)
fieldsets = [
('Identification',{
'classes': ('suit-tab suit-tab-country',),
'fields': ['country_id']
}),
('Name',{
'classes': ('suit-tab suit-tab-country',),
'fields': ['country_name']
}),
]
suit_form_tabs = [
(u'country', u'Country')
]
actions = [export_xlsx,]
formfield_overrides = dict((
(models.TextField,dict((( 'widget',Textarea(attrs=dict(rows=5, cols=120,style='width: 600px;') )),) )),
(models.ManyToManyField,dict((('widget',CheckboxSelectMultiple),)))
),)
class Media:
css = dict(all=['generic.css','fixadmin.css'])
js = ('generic.js','models/country.js')
def get_actions(self, request):
actions = super(CountryAdminAbstract, self).get_actions(request)
user = request.user
#if not user.groups.filter(name=settings.HTML_EXPORTER_PROFILE_GROUP).exists(): del actions['printable_html']
if not user.groups.filter(name=settings.EXCEL_EXPORTER_PROFILE_GROUP).exists(): del actions['export_xlsx']
return actions
def construct_change_message(self, request, form, formsets):
message = super(CountryAdminAbstract, self).construct_change_message(request, form, formsets)
change_message = []
if form.changed_data:
values = []
for x in form.changed_data:
field = form.fields[x]
initial = form.initial[x]
value = form.cleaned_data[x]
if isinstance(field, ModelMultipleChoiceField):
value = [int(y.pk) for y in value]
initial = [int(y) for y in initial]
values.append( _("<b>%s</b>: <span style='color:#4682B4' >%s</span> -> <span style='color:#00A600' >%s</span>" % (x, str(initial), str(value)) ) )
change_message.append( '<ul><li>%s</li></ul>' % '</li><li>'.join(values) )
message += ' '.join(change_message)
return message
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,574
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/models/models_loader.py
|
from googlespreadsheet2django.models.field import *
from googlespreadsheet2django.models.model import *
class ModelsLoader(object):
def __init__(self, workbook, answers):
self._models = []
for worksheetName in workbook.sheet_names():
if worksheetName.startswith('Table_'):
worksheet = workbook.sheet_by_name(worksheetName)
modelname = worksheetName[6:]
model = Model(self, modelname, worksheet, answers)
self._models.append( model )
def getModel(self, name):
for m in self._models:
if m._name == name: return m
return None
def saveModel(self, path):
for model in self._models: model.saveModel(path)
def updateModel(self, path):
for model in self._models: model.updateModel(path)
def updateAdmin(self, path):
for model in self._models: model.updateAdmin(path)
def saveAdmin(self, path):
for model in self._models: model.saveAdmin(path)
def saveJs(self, path):
for model in self._models: model.saveJs(path)
@property
def applications(self):
res = []
for m in self._models:
res.append(m._application)
return list(set(res))
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,575
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/answers/answers_loader.py
|
from googlespreadsheet2django.answers.choice import *
from googlespreadsheet2django.answers.answers import *
class AnswersLoader(object):
def __init__(self, workbook):
self._answersList = []
worksheet = workbook.sheet_by_name('Answers')
for line in range(1, worksheet.nrows, 2):
answersname = worksheet.cell(line, 1).value
if answersname=='': continue
answers = Answers(answersname)
self._answersList.append(answers)
nofCols = worksheet.ncols
for col in range(2, nofCols):
answer_code = worksheet.cell(line, col).value
if answer_code == '': continue
if isinstance(answer_code, float): answer_code = str(int(answer_code))
answer_code = answer_code.upper()
answer_label = worksheet.cell(line+1, col).value
if isinstance(answer_label, float): answer_label = str(int(answer_label))
if answer_code=='': continue
answers.addChoice(answer_code, answer_label)
def __unicode__(self):
res = ""
for answers in self._answersList:
res += "%s\n" % str(answers)
return res
def codeFor(self, answers):
res = ""
for a in self._answersList:
if a._name in answers:
res += "%s\n" % str(a)
return res
def __str__(self): return self.__unicode__()
def __getitem__(self, key):
for row in self._answersList:
if row._name == key: return row
return None
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,576
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/builder.py
|
# -*- coding: utf-8 -*-
import argparse, os, requests, xlrd
from googlespreadsheet2django.models.models_loader import ModelsLoader
from googlespreadsheet2django.answers.answers_loader import AnswersLoader
def export_code(documentID, path):
INPUTFILE = os.path.join(path, documentID+'.xlsx' )
r = requests.get('https://docs.google.com/spreadsheet/ccc?key=%s&output=xlsx' % documentID)
outfile = open(INPUTFILE, 'wb'); outfile.write(r.content); outfile.close()
workbook = xlrd.open_workbook( INPUTFILE )
answers = AnswersLoader(workbook)
models = ModelsLoader(workbook, answers)
models.saveModel(path)
models.updateModel(path)
models.saveAdmin(path)
models.updateAdmin(path)
models.saveJs(path)
os.remove(INPUTFILE)
return models.applications
def main():
parser = argparse.ArgumentParser()
parser.add_argument("googlespreadsheetid")
parser.add_argument("--export-path", default='.')
args = parser.parse_args()
export_code(args.googlespreadsheetid, args.export_path)
if __name__ == "__main__": main()
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,577
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/models/field.py
|
# -*- coding: utf-8 -*-
import os
class Field(object):
CHOICES_ABBR_LEN = 10 # Number of characters in each answer abbreviation (choice)
def __init__(self, model, answers, tab, group, worksheet, row):
self._answers = answers
self._model = model
self._tab = tab
self._group = group
self._column = worksheet.cell(row, 1).value
self._label = worksheet.cell(row, 0).value
self._help = worksheet.cell(row, 10).value
self._type = worksheet.cell(row, 4).value
self._size = worksheet.cell(row, 5).value
self._choices = worksheet.cell(row, 6).value
self._mandatory = worksheet.cell(row, 7).value=='Yes'
self._columnDependency = worksheet.cell(row, 8).value
self._valuesDependency = worksheet.cell(row, 9).value
self._visible = worksheet.cell(row, 11).value=='Yes'
self._showinlist = worksheet.cell(row, 12).value
self._filterby = worksheet.cell(row, 13).value=='Yes'
self._searchby = worksheet.cell(row, 14).value=='Yes'
self._useonname = worksheet.cell(row, 15).value
self._unique = worksheet.cell(row, 16).value=='Yes'
self._default = worksheet.cell(row, 17).value
def __str__(self): return self.__unicode__()
def __unicode__(self):
if self.fieldtype==None:
function = '\n\tdef %s(self): pass\n' % self._column
function += '\t%s.short_description="%s"\n' % (self._column, self._label)
function += '\t%s.allow_tags=True' % (self._column, )
return function
return "\t%s = models.%s(%s)" % ( self.fieldname, self.fieldtype, ", ".join(self.parameters) )
@property
def choices(self):
if 'range' in self._type:
data = self._choices.replace('[','').replace(']','').split(';')
return map( float, data )
else:
return self._choices
@property
def size(self):
if self._type=='Decimal number' or self._type=='Decimal numbers range':
vals = ("%s" % self._size).split('.')
return len(vals[0]), len(vals[1])
if self._type=='Integer' or self._type=='Integers range':
return len("%d" % self._size)
else:
return self._size
@property
def fieldname(self): return self._column
@property
def label(self): return self._label
@property
def help(self): return self._help.replace('\n','')
@property
def fieldtype(self):
if self._type=='Created by user':
return 'ForeignKey'
elif self._type=='Creation date and time':
return 'DateTimeField'
elif self._type=='Date':
return 'DateField'
elif self._type=='Date time':
return 'DateTimeField'
elif self._type=='Decimal number':
return 'DecimalField'
elif self._type=='Decimal numbers range':
return 'DecimalField'
elif self._type=='Drop down list':
return 'CharField'
elif self._type=='Email':
return 'EmailField'
elif self._type=='File':
return 'FileField'
elif self._type=='Foreign key':
return 'ForeignKey'
elif self._type=='Function field':
return None
elif self._type=='Integer':
return 'IntegerField'
elif self._type=='Integers range':
return 'IntegerField'
elif self._type=='Multiple choice':
return 'ManyToManyField'
elif self._type=='Number of identification':
return 'AutoField'
elif self._type=='Radio buttons list':
return 'CharField'
elif self._type=='Small text':
return 'CharField'
elif self._type=='Slug':
return 'SlugField'
elif self._type=='Text':
return 'TextField'
elif self._type=='Update date and time':
return 'DateTimeField'
elif self._type=='Updated by user':
return 'ForeignKey'
elif self._type=='Boolean':
return 'BooleanField'
return None
@property
def parameters(self):
params = []
if self._type=='Created by user':
params.append('User')
params.append('verbose_name="%s"' % self.label)
params.append('related_name="{0}_created_by_user"'.format(self.fieldname))
elif self._type=='Creation date and time':
params.append('"%s"' % self.label)
params.append('auto_now_add=True')
elif self._type=='Date':
params.append('"%s"' % self.label)
elif self._type=='Date time':
params.append('"%s"' % self.label)
elif self._type=='Decimal number':
params.append('"%s"' % self.label)
params.append( "max_digits=%s, decimal_places=%s" % self.size )
elif self._type=='Decimal numbers range':
params.append('"%s"' % self.label)
params.append( "max_digits=%s, decimal_places=%s" % self.size )
params.append( "validators=[MinValueValidator(%f),MaxValueValidator(%f)]" % tuple(self.choices) )
elif self._type=='Drop down list':
params.append('"%s"' % self.label)
elif self._type=='Email':
params.append('"%s"' % self.label)
params.append( "max_length=100" )
elif self._type=='File':
params.append('"%s"' % self.label)
params.append( "max_length=255" )
upload_path = os.path.join('uploads', self._model.tablename.lower() )
params.append( "upload_to='{0}'".format(upload_path) )
elif self._type=='Foreign key':
params.append('"%s"' % self._choices)
params.append('verbose_name="%s"' % self._label)
elif self._type=='Function field':
params.append('"%s"' % self.label)
elif self._type=='Integer':
params.append('"%s"' % self.label)
params.append( "max_length=%s" % self.size )
elif self._type=='Integers range':
params.append('"%s"' % self.label)
params.append( "max_length=%s" % self.size )
params.append( "validators=[MinValueValidator(%d),MaxValueValidator(%d)]" % tuple(self.choices) )
elif self._type=='Multiple choice':
params.append('"%s"' % self._choices)
params.append('related_name="%s"' % self.fieldname)
params.append('verbose_name="%s"' % self.label)
elif self._type=='Number of identification':
params.append('"%s"' % self.label)
params.append('primary_key=True')
elif self._type=='Radio buttons list':
params.append('"""%s"""' % self.label)
elif self._type=='Small text':
params.append('"%s"' % self.label)
params.append( "max_length=%d" % self._size )
elif self._type=='Slug':
params.append('"%s"' % self.label)
params.append( "max_length=%d" % self._size )
elif self._type=='Text':
params.append('"%s"' % self.label)
elif self._type=='Boolean':
params.append('"%s"' % self.label)
elif self._type=='Update date and time':
params.append('"%s"' % self.label)
params.append('auto_now=True')
elif self._type=='Updated by user':
params.append('User')
params.append('verbose_name="Created by user"')
params.append('related_name="updated_by_user"')
if self._choices and self.fieldtype=='CharField':
params.append( "choices=%s" % self._choices )
#params.append( "max_length=%d" % self._answers[self._choices].columnSize )
params.append( "max_length=%d" % Field.CHOICES_ABBR_LEN )
if self._help: params.append( 'help_text="""%s"""' % self.help )
if not self._mandatory and self._type!='Number of identification':
params.append('null=True,blank=True')
if self._unique and self._type!='Number of identification':
params.append('unique=True')
if self._default!='':
default = '"""%s"""' % self._default if isinstance(self._default, basestring) else self._default==1
params.append( 'default={0}'.format(default) )
return params
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,578
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/models.py
|
##### auto:start:Person #####
from abstractmodels.Person import AbstractPerson
class Person(AbstractPerson):
pass
##### auto:end:Person #####
##### auto:start:Country #####
from abstractmodels.Country import AbstractCountry
class Country(AbstractCountry):
pass
##### auto:end:Country #####
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,579
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/models/abstract_model.py
|
from googlespreadsheet2django.models.field import *
class AbstractModel(object):
def __init__(self, tab, group):
self._tab = tab
self._group = group
self._fields = []
def addField(self, field):
self._fields.append(field)
def __unicode__(self):
res = "class %s(models.Model):\n" % self.tablename
res += '\n'.join( map( str, self._fields ) )
res += '\n\n\tclass Meta: abstract = True\n'
return res
def __str__(self): return self.__unicode__()
def __strip(self, string):
for x in [' ','.','-','_','\\','/','(', ')']:#'0','1','2','3','4','5','6','7','8','9']:
string = string.replace(x, '')
return string
@property
def fieldsList(self):
return [x.fieldname for x in self._fields if x._visible ]
@property
def tablename(self):
if len(self._group.strip())>0:
return "Abstract"+self.__strip(self._group)
elif len(self._tab.strip())>0:
return "Abstract"+self.__strip(self._tab).title()
else:
return "Abstract"+self._fields[0]._column.split('_')[0].title()
@property
def tab(self):
tab = self.__strip(self._tab).replace('\\','')
return tab.lower()
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,580
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "0.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "ricardojvr@gmail.com"
__status__ = "Development"
from setuptools import setup
setup(
name ='googlespreadsheet2django',
version ='1.0.0',
description ="""""",
author ='Ricardo Ribeiro',
author_email ='ricardojvr@gmail.com',
license ='MIT',
packages=[
'googlespreadsheet2django',
'googlespreadsheet2django.answers',
'googlespreadsheet2django.models'
],
install_requires=['xlrd', 'requests', 'argparse'],
entry_points={
'console_scripts':['gsheet2django=googlespreadsheet2django.builder:main']
}
)
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,589
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/a3c.py
|
import os
import sys
import tensorflow as tf
import numpy as np
import math
import threading
import signal
from a3c_network import A3CFFNetwork, A3CLSTMNetwork
from a3c_actor_thread import A3CActorThread
from config import *
def log_uniform(lo, hi, rate):
log_lo = math.log(lo)
log_hi = math.log(hi)
v = log_lo * (1 - rate) + log_hi * rate
return math.exp(v)
class A3C(object):
def __init__(self):
self.device = '/gpu:0' if USE_GPU else '/cpu:0'
self.stop_requested = False
self.global_t = 0
if USE_LSTM:
self.global_network = A3CLSTMNetwork(STATE_DIM, STATE_CHN, ACTION_DIM, self.device, -1)
else:
self.global_network = A3CFFNetwork(STATE_DIM, STATE_CHN, ACTION_DIM, self.device, -1)
self.initial_learning_rate = log_uniform(INITIAL_ALPHA_LOW, INITIAL_ALPHA_HIGH, INITIAL_ALPHA_LOG_RATE)
self.learning_rate_input = tf.placeholder('float')
self.optimizer = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate_input,
decay=RMSP_ALPHA, momentum=0.0, epsilon=RMSP_EPSILON)
self.actor_threads = []
for i in range(PARALLEL_SIZE):
actor_thread = A3CActorThread(i, self.global_network, self.initial_learning_rate,
self.learning_rate_input, self.optimizer, MAX_TIME_STEP, self.device)
self.actor_threads.append(actor_thread)
self.sess = tf.Session(config=tf.ConfigProto(log_device_placement=False, allow_soft_placement=True))
self.sess.run(tf.global_variables_initializer())
self.reward_input = tf.placeholder(tf.float32)
tf.summary.scalar('reward', self.reward_input)
self.time_input = tf.placeholder(tf.float32)
tf.summary.scalar('living_time', self.time_input)
self.summary_op = tf.summary.merge_all()
self.summary_writer = tf.summary.FileWriter(LOG_FILE, self.sess.graph)
self.saver = tf.train.Saver()
self.restore()
self.lock = threading.Lock()
return
def restore(self):
checkpoint = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if checkpoint and checkpoint.model_checkpoint_path:
self.saver.restore(self.sess, checkpoint.model_checkpoint_path)
print("checkpoint loaded:", checkpoint.model_checkpoint_path)
tokens = checkpoint.model_checkpoint_path.split("-")
# set global step
self.global_t = int(tokens[1])
print(">>> global step set: ", self.global_t)
else:
print("Could not find old checkpoint")
return
def backup(self):
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
self.saver.save(self.sess, CHECKPOINT_DIR + '/' + 'checkpoint', global_step=self.global_t)
return
def train_function(self, parallel_index, lock):
actor_thread = self.actor_threads[parallel_index]
while True:
if self.stop_requested or (self.global_t > MAX_TIME_STEP):
break
# need to lock only when updating global gradients
# lock.acquire()
diff_global_t = actor_thread.process(
self.sess, self.global_t,
self.summary_writer, self.summary_op,
self.reward_input, self.time_input
)
# lock.release()
self.global_t += diff_global_t
if self.global_t % 1000000 < LOCAL_T_MAX:
self.backup()
# print 'global_t:', self.global_t
return
def signal_handler(self, signal_, frame_):
print 'You pressed Ctrl+C !'
self.stop_requested = True
return
def run(self):
train_treads = []
for i in range(PARALLEL_SIZE):
train_treads.append(threading.Thread(target=self.train_function, args=(i, self.lock)))
signal.signal(signal.SIGINT, self.signal_handler)
for t in train_treads:
t.start()
print 'Press Ctrl+C to stop'
signal.pause()
print 'Now saving data....'
for t in train_treads:
t.join()
self.backup()
return
if __name__ == '__main__':
print 'a3c.py'
net = A3C()
net.run()
# print log_uniform(1e-4, 1e-2, 0.4226)
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,590
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/game/test_client.py
|
import socket
import sys
import numpy as np
import cPickle
import time
HOST, PORT = "localhost", 9600
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def main():
# data = " ".join(sys.argv[1:])
# send action
sock.sendto(str(1), (HOST, PORT))
header = sock.recv(1000)
header = cPickle.loads(header)
print header
data = str()
buffer_size = header["buffer_size"]
total_size = header["total_size"]
block_num = header["block_num"]
for i in range(block_num):
receive_size = total_size - len(data)
receive_size = receive_size if receive_size < buffer_size else buffer_size
data += sock.recv(receive_size)
data = cPickle.loads(data)
# print "Sent: {}".format(data)
print "Received: {}".format(np.shape(data))
return
if __name__ == '__main__':
for i in range(100):
main()
# time.sleep(1 / 30.0)
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,591
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/DRQN.py
|
import tensorflow as tf
import numpy as np
import random
import time
import os
import sys
from netutil import *
from game.flappy_bird import FlappyBird
from replay_buffer import ReplayBuffer
INPUT_SIZE = 84
INPUT_CHANNEL = 4
ACTIONS_DIM = 2
LSTM_UNITS = 256
LSTM_MAX_STEP = 8
GAMMA = 0.99
FINAL_EPSILON = 0.0001
INITIAL_EPSILON = 0.0001
ALPHA = 1e-6 # the learning rate of optimizer
TAU = 0.001
UPDATE_FREQUENCY = 5 # the frequency to update target network
MAX_TIME_STEP = 10 * 10 ** 7
EPSILON_TIME_STEP = 1 * 10 ** 6 # for annealing the epsilon greedy
EPSILON_ANNEAL = float(INITIAL_EPSILON - FINAL_EPSILON) / EPSILON_TIME_STEP
BATCH_SIZE = 4
REPLAY_MEMORY = 2000
CHECKPOINT_DIR = 'tmp_drqn/checkpoints'
LOG_FILE = 'tmp_drqn/log'
class Network(object):
def __init__(self, scope_name):
with tf.variable_scope(scope_name) as scope:
# input layer
self.state_input = tf.placeholder('float', shape=[None, INPUT_SIZE, INPUT_SIZE, INPUT_CHANNEL])
# hidden conv layer
self.W_conv1 = weight_variable([8, 8, INPUT_CHANNEL, 32])
self.b_conv1 = bias_variable([32])
h_conv1 = tf.nn.relu(conv2d(self.state_input, self.W_conv1, 4) + self.b_conv1)
h_poo1 = max_pool_2x2(h_conv1)
self.W_conv2 = weight_variable([4, 4, 32, 64])
self.b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_poo1, self.W_conv2, 2) + self.b_conv2)
self.W_conv3 = weight_variable([3, 3, 64, 64])
self.b_conv3 = bias_variable([64])
h_conv3 = tf.nn.relu(conv2d(h_conv2, self.W_conv3, 1) + self.b_conv3)
h_conv3_out_size = np.prod(h_conv3.get_shape().as_list()[1:])
h_conv3_flat = tf.reshape(h_conv3, [-1, h_conv3_out_size])
self.W_fc1 = weight_variable([h_conv3_out_size, LSTM_UNITS])
self.b_fc1 = bias_variable([LSTM_UNITS])
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, self.W_fc1) + self.b_fc1)
# reshape to fit lstm (batch_size, timestep, LSTM_UNITS)
self.timestep = tf.placeholder(dtype=tf.int32)
self.batch_size = tf.placeholder(dtype=tf.int32)
h_fc1_reshaped = tf.reshape(h_fc1, [self.batch_size, self.timestep, LSTM_UNITS])
self.lstm_cell = tf.contrib.rnn.BasicLSTMCell(num_units=LSTM_UNITS, state_is_tuple=True)
self.initial_lstm_state = self.lstm_cell.zero_state(self.batch_size, tf.float32)
lstm_outputs, self.lstm_state = tf.nn.dynamic_rnn(
self.lstm_cell,
h_fc1_reshaped,
initial_state=self.initial_lstm_state,
sequence_length=self.timestep,
time_major=False,
dtype=tf.float32,
scope=scope
)
print 'lstm shape:', lstm_outputs.get_shape()
# shape: [batch_size*timestep, LSTM_UNITS]
lstm_outputs = tf.reshape(lstm_outputs, [-1, LSTM_UNITS])
# option1: for separate channel
# streamA, streamV = tf.split(lstm_outputs, 2, axis=1)
# self.AW = tf.Variable(tf.random_normal([LSTM_UNITS / 2, ACTIONS_DIM]))
# self.VW = tf.Variable(tf.random_normal([LSTM_UNITS / 2, 1]))
# advantage = tf.matmul(streamA, self.AW)
# value = tf.matmul(streamV, self.VW)
# self.Q_value = value + tf.subtract(advantage, tf.reduce_mean(advantage, axis=1, keep_dims=True))
# option2: for fully-connected
self.W_fc2 = weight_variable([LSTM_UNITS, ACTIONS_DIM])
self.b_fc2 = bias_variable([ACTIONS_DIM])
self.Q_value = tf.matmul(lstm_outputs, self.W_fc2) + self.b_fc2
self.Q_action = tf.argmax(self.Q_value, 1)
print 'Q shape:', self.Q_value.get_shape()
scope.reuse_variables()
self.W_lstm = tf.get_variable("basic_lstm_cell/weights")
self.b_lstm = tf.get_variable("basic_lstm_cell/biases")
return
def get_vars(self):
return [
self.W_conv1, self.b_conv1,
self.W_conv2, self.b_conv2,
self.W_conv3, self.b_conv3,
self.W_fc1, self.b_fc1,
self.W_lstm, self.b_lstm,
# self.AW, self.VW
self.W_fc2, self.b_fc2,
]
class DRQN(object):
def __init__(self):
self.global_t = 0
self.replay_buffer = ReplayBuffer(REPLAY_MEMORY)
# q-network parameter
self.create_network()
self.create_minimize()
# init session
self.session = tf.InteractiveSession()
self.session.run(tf.global_variables_initializer())
# update_target(self.session, self.target_ops)
self.saver = tf.train.Saver(tf.global_variables())
self.restore()
self.epsilon = INITIAL_EPSILON - float(INITIAL_EPSILON - FINAL_EPSILON) \
* min(self.global_t, EPSILON_TIME_STEP) / float(EPSILON_TIME_STEP)
# for recording the log into tensorboard
self.time_input = tf.placeholder(tf.float32)
self.reward_input = tf.placeholder(tf.float32)
tf.summary.scalar('living_time', self.time_input)
tf.summary.scalar('reward', self.reward_input)
self.summary_op = tf.summary.merge_all()
self.summary_writer = tf.summary.FileWriter(LOG_FILE, self.session.graph)
self.episode_start_time = 0.0
self.episode_reward = 0.0
return
def create_network(self):
self.main_net = Network(scope_name='main')
# self.target_net = Network(scope_name='target')
# self.target_ops = update_target_graph_op(tf.trainable_variables(), TAU)
return
def create_minimize(self):
self.a = tf.placeholder('float', shape=[None, ACTIONS_DIM])
self.y = tf.placeholder('float', shape=[None])
Q_action = tf.reduce_sum(tf.multiply(self.main_net.Q_value, self.a), axis=1)
self.full_loss = tf.reduce_mean(tf.square(self.y - Q_action))
# maskA = tf.zeros([BATCH_SIZE, LSTM_MAX_STEP // 2])
# maskB = tf.ones([BATCH_SIZE, LSTM_MAX_STEP // 2])
# mask = tf.concat([maskA, maskB], axis=1)
# mask = tf.reshape(mask, [-1])
# just use a half loss with the mask:[0 0 0 0 1 1 1 1]
# self.loss = tf.multiply(self.full_loss, mask)
self.optimizer = tf.train.AdamOptimizer(learning_rate=ALPHA)
self.apply_gradients = self.optimizer.minimize(self.full_loss)
# self.optimizer = tf.train.RMSPropOptimizer(learning_rate=ALPHA, decay=0.99)
# self.gradients = tf.gradients(self.loss, self.main_net.get_vars())
# clip_grads = [tf.clip_by_norm(grad, 40.0) for grad in self.gradients]
# self.apply_gradients = self.optimizer.apply_gradients(zip(clip_grads, self.main_net.get_vars()))
return
def perceive(self, state, action, reward, next_state, terminal):
self.global_t += 1
self.episode_reward += reward
if self.episode_start_time == 0.0:
self.episode_start_time = time.time()
if terminal or self.global_t % 20 == 0:
living_time = time.time() - self.episode_start_time
self.record_log(self.episode_reward, living_time)
if terminal:
self.episode_reward = 0.0
self.episode_start_time = time.time()
if self.replay_buffer.size() > BATCH_SIZE:
self.train_Q_network()
if self.global_t % 100000 == 0:
self.backup()
return
def epsilon_greedy(self, state, lstm_state_in):
"""
:param state: 1x84x84x3
"""
Q_value_t, lstm_state_out = self.session.run(
[self.main_net.Q_value, self.main_net.lstm_state],
feed_dict={
self.main_net.state_input: [state],
self.main_net.initial_lstm_state: lstm_state_in,
self.main_net.batch_size: 1,
self.main_net.timestep: 1
})
Q_value_t = Q_value_t[0]
action_index = 0
if random.random() <= self.epsilon:
action_index = random.randrange(ACTIONS_DIM)
print 'random-index:', action_index
else:
action_index = np.argmax(Q_value_t)
if self.epsilon > FINAL_EPSILON:
self.epsilon -= EPSILON_ANNEAL
max_q_value = np.max(Q_value_t)
return action_index, max_q_value, lstm_state_out
def train_Q_network(self):
'''
do backpropogation
'''
# len(minibatch) = BATCH_SIZE * LSTM_MAX_STEP
# if self.global_t % (UPDATE_FREQUENCY * 1000) == 0:
# update_target(self.session, self.target_ops)
# limit the training frequency
# if self.global_t % UPDATE_FREQUENCY != 0:
# return
minibatch = self.replay_buffer.sample(BATCH_SIZE, LSTM_MAX_STEP)
state_batch = [t[0] for t in minibatch]
action_batch = [t[1] for t in minibatch]
reward_batch = [t[2] for t in minibatch]
next_state_batch = [t[3] for t in minibatch]
terminal_batch = [t[4] for t in minibatch]
y_batch = []
# todo: need to feed with batch_size, timestep, lstm_state
lstm_state_train = (np.zeros([BATCH_SIZE, LSTM_UNITS]), np.zeros([BATCH_SIZE, LSTM_UNITS]))
Q_target = self.session.run(
self.main_net.Q_value,
feed_dict={
self.main_net.state_input: next_state_batch,
self.main_net.initial_lstm_state: lstm_state_train,
self.main_net.batch_size: BATCH_SIZE,
self.main_net.timestep: LSTM_MAX_STEP
}
)
# Q_action = self.session.run(
# self.target_net.Q_action,
# feed_dict={
# self.target_net.state_input: next_state_batch,
# self.target_net.initial_lstm_state: lstm_state_train,
# self.target_net.batch_size: BATCH_SIZE,
# self.target_net.timestep: LSTM_MAX_STEP
# }
# )
for i in range(len(state_batch)):
terminal = terminal_batch[i]
if terminal:
y_batch.append(reward_batch[i])
else:
y_batch.append(reward_batch[i] + GAMMA * np.max(Q_target[i]))
# y_batch.append(reward_batch[i] + GAMMA * Q_value[i][Q_action[i]])
self.session.run(self.apply_gradients, feed_dict={
self.y: y_batch,
self.a: action_batch,
self.main_net.state_input: state_batch,
self.main_net.initial_lstm_state: lstm_state_train,
self.main_net.batch_size: BATCH_SIZE,
self.main_net.timestep: LSTM_MAX_STEP
})
# print loss
return
def record_log(self, reward, living_time):
'''
record the change of reward into tensorboard log
'''
summary_str = self.session.run(self.summary_op, feed_dict={
self.reward_input: reward,
self.time_input: living_time
})
self.summary_writer.add_summary(summary_str, self.global_t)
self.summary_writer.flush()
return
def restore(self):
checkpoint = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if checkpoint and checkpoint.model_checkpoint_path:
self.saver.restore(self.session, checkpoint.model_checkpoint_path)
print("checkpoint loaded:", checkpoint.model_checkpoint_path)
tokens = checkpoint.model_checkpoint_path.split("-")
# set global step
self.global_t = int(tokens[1])
print(">>> global step set: ", self.global_t)
else:
print("Could not find old checkpoint")
return
def backup(self):
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
self.saver.save(self.session, CHECKPOINT_DIR + '/' + 'checkpoint', global_step=self.global_t)
return
def main():
'''
the function for training
'''
agent = DRQN()
env = FlappyBird()
while True:
env.reset()
episode_buffer = []
lstm_state = (np.zeros([1, LSTM_UNITS]), np.zeros([1, LSTM_UNITS]))
s_t = env.s_t
while not env.terminal:
# action_id = random.randint(0, 1)
action_id, action_q, lstm_state = agent.epsilon_greedy(s_t, lstm_state)
env.process(action_id)
action = np.zeros(ACTIONS_DIM)
action[action_id] = 1
s_t1, reward, terminal = (env.s_t1, env.reward, env.terminal)
# frame skip
episode_buffer.append((s_t, action, reward, s_t1, terminal))
agent.perceive(s_t, action, reward, s_t1, terminal)
if agent.global_t % 10 == 0:
print 'global_t:', agent.global_t, '/ epsilon:', agent.epsilon, '/ terminal:', terminal, \
'/ action:', action_id, '/ reward:', reward, '/ q_value:', action_q
# s_t <- s_t1
s_t = s_t1
if len(episode_buffer) >= 50:
# start a new episode buffer, in case of an over-long memory
agent.replay_buffer.add(episode_buffer)
episode_buffer = []
print '----------- episode buffer > 100---------'
# reset the state
if len(episode_buffer) > LSTM_MAX_STEP:
agent.replay_buffer.add(episode_buffer)
print 'episode_buffer', len(episode_buffer)
print 'replay_buffer.size:', agent.replay_buffer.size()
# break
return
if __name__ == '__main__':
main()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,592
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/customgame/custom_flappy_bird.py
|
import os
import sys
import numpy as np
import random
import pygame
import pygame.surfarray as surfarray
# from pygame.locals import *
from itertools import cycle
class CustomFlappyBird(object):
def __init__(self, fps=60, screen_width=288, screen_height=512, display_screen=True, frame_skip=1):
pygame.init()
self._fps = fps
self._screen_width = screen_width
self._screen_height = screen_height
self._display_screen = display_screen
self._frame_skip = frame_skip
self._fps_clock = pygame.time.Clock()
self._screen = pygame.display.set_mode((self._screen_width, self._screen_height))
pygame.display.set_caption('Flappy Bird')
self._images, self._sounds, self._hit_masks = self._load_resources()
self._pip_gap_size = 100 # gap between upper and lower part of pipe
self._basey = self._screen_height * 0.79
self._player_width = self._images['player'][0].get_width()
self._player_height = self._images['player'][0].get_height()
self._pipe_width = self._images['pipe'][0].get_width()
self._pip_height = self._images['pipe'][0].get_height()
self._bg_width = self._images['background'].get_width()
self.reset()
return
def _new_game(self):
self._player_index_gen = cycle([0, 1, 2, 1])
self._score = self._player_index = self._loop_iter = 0
self._player_x = int(self._screen_width * 0.2)
self._player_y = int((self._screen_height - self._player_height) / 2)
self._base_x = 0
self._base_shift = self._images[
'base'].get_width() - self._bg_width
newPipe1 = self._get_random_pipe()
newPipe2 = self._get_random_pipe()
self._upper_pipes = [
{'x': self._screen_width, 'y': newPipe1[0]['y']},
{'x': self._screen_width + (self._screen_width / 2), 'y': newPipe2[0]['y']},
]
self._lower_pipes = [
{'x': self._screen_width, 'y': newPipe1[1]['y']},
{'x': self._screen_width + (self._screen_width / 2), 'y': newPipe2[1]['y']},
]
# player velocity, max velocity, downward accleration, accleration on
# flap
self._pipe_vel_x = -4
self._player_vel_y = 0 # player's velocity along Y, default same as _player_flapped
self._player_max_vel_x = 10 # max vel along Y, max descend speed
self._player_min_vel_y = -8 # min vel along Y, max ascend speed
self._player_acc_y = 1 # players downward accleration
self._player_flap_acc = -9 # players speed on flapping
self._player_flapped = False # True when player flaps
return
def _frame_step(self, input_actions):
pygame.event.pump()
reward = 0.1
terminal = False
if sum(input_actions) != 1:
raise ValueError('Multiple input actions!')
# input_actions[0] == 1: do nothing
# input_actions[1] == 1: flap the bird
if input_actions[1] == 1:
if self._player_y > -2 * self._player_height:
self._player_vel_y = self._player_flap_acc
self._player_flapped = True
# self._sounds['wing'].play()
# check for score
playerMidPos = self._player_x + self._player_width / 2
for pipe in self._upper_pipes:
pipeMidPos = pipe['x'] + self._pipe_width / 2
if pipeMidPos <= playerMidPos < pipeMidPos + 4:
self._score += 1
# self._sounds['point'].play()
reward = 1.0
# _player_index basex change
if (self._loop_iter + 1) % 3 == 0:
self._player_index = next(self._player_index_gen)
self._loop_iter = (self._loop_iter + 1) % 30
self._base_x = -((-self._base_x + 100) % self._base_shift)
# player's movement
if self._player_vel_y < self._player_max_vel_x and not self._player_flapped:
self._player_vel_y += self._player_acc_y
if self._player_flapped:
self._player_flapped = False
self._player_y += min(self._player_vel_y, self._basey -
self._player_y - self._player_height)
if self._player_y < 0:
self._player_y = 0
# move pipes to left
for uPipe, lPipe in zip(self._upper_pipes, self._lower_pipes):
uPipe['x'] += self._pipe_vel_x
lPipe['x'] += self._pipe_vel_x
# add new pipe when first pipe is about to touch left of screen
if 0 < self._upper_pipes[0]['x'] < 5:
newPipe = self._get_random_pipe()
self._upper_pipes.append(newPipe[0])
self._lower_pipes.append(newPipe[1])
# remove first pipe if its out of the screen
if self._upper_pipes[0]['x'] < -self._pipe_width:
self._upper_pipes.pop(0)
self._lower_pipes.pop(0)
# check if crash here
isCrash = self._check_crash({'x': self._player_x, 'y': self._player_y,
'index': self._player_index},
self._upper_pipes, self._lower_pipes)
if isCrash:
# self._sounds['hit'].play()
# self._sounds['die'].play()
terminal = True
reward = -1.0
# self.reset()
# draw sprites
self._screen.blit(self._images['background'], (0, 0))
for uPipe, lPipe in zip(self._upper_pipes, self._lower_pipes):
self._screen.blit(self._images['pipe'][0], (uPipe['x'], uPipe['y']))
self._screen.blit(self._images['pipe'][1], (lPipe['x'], lPipe['y']))
self._screen.blit(self._images['base'], (self._base_x, self._basey))
# print score so player overlaps the score
self._screen.blit(self._images['player'][self._player_index],
(self._player_x, self._player_y))
img = self._capture_screen()
if self._display_screen:
pygame.display.update()
self._fps_clock.tick(self._fps)
# print self._upper_pipes[0]['y'] + self._pip_height - int(self._basey * 0.2)
if terminal:
self.reset()
return img, reward, terminal
def _capture_screen(self):
img = surfarray.array3d(pygame.display.get_surface())
return img
def reset(self):
self._new_game()
o_t = self._capture_screen()
return o_t
def step(self, action_id):
action = np.zeros([2])
action[action_id] = 1
total_reward = 0.0
for _ in range(self._frame_skip):
o_t1, reward, terminal = self._frame_step(action)
total_reward += reward
if terminal:
break
return o_t1, total_reward, terminal
@property
def action_size(self):
return 2
@property
def action_set(self):
return [0, 1]
def _get_random_pipe(self):
"""returns a randomly generated pipe"""
# y of gap between upper and lower pipe
gapYs = [20, 30, 40, 50, 60, 70, 80, 90]
index = random.randint(0, len(gapYs) - 1)
gapY = gapYs[index]
gapY += int(self._basey * 0.2)
pipeX = self._screen_width + 10
return [
{'x': pipeX, 'y': gapY - self._pip_height}, # upper pipe
{'x': pipeX, 'y': gapY + self._pip_gap_size}, # lower pipe
]
def _check_crash(self, player, upperPipes, lowerPipes):
"""returns True if player collders with base or pipes."""
pi = player['index']
player['w'] = self._images['player'][0].get_width()
player['h'] = self._images['player'][0].get_height()
# if player crashes into ground
if player['y'] + player['h'] >= self._basey - 1:
return True
else:
playerRect = pygame.Rect(player['x'], player['y'],
player['w'], player['h'])
for uPipe, lPipe in zip(upperPipes, lowerPipes):
# upper and lower pipe rects
uPipeRect = pygame.Rect(
uPipe['x'], uPipe['y'], self._pipe_width, self._pip_height)
lPipeRect = pygame.Rect(
lPipe['x'], lPipe['y'], self._pipe_width, self._pip_height)
# player and upper/lower pipe self.hit_masks_
pHitMask = self._hit_masks['player'][pi]
uHitmask = self._hit_masks['pipe'][0]
lHitmask = self._hit_masks['pipe'][1]
# if bird collided with upipe or lpipe
uCollide = self._pixel_collision(
playerRect, uPipeRect, pHitMask, uHitmask)
lCollide = self._pixel_collision(
playerRect, lPipeRect, pHitMask, lHitmask)
if uCollide or lCollide:
return True
return False
def _pixel_collision(self, rect1, rect2, hitmask1, hitmask2):
"""Checks if two objects collide and not just their rects"""
rect = rect1.clip(rect2)
if rect.width == 0 or rect.height == 0:
return False
x1, y1 = rect.x - rect1.x, rect.y - rect1.y
x2, y2 = rect.x - rect2.x, rect.y - rect2.y
for x in range(rect.width):
for y in range(rect.height):
if hitmask1[x1 + x][y1 + y] and hitmask2[x2 + x][y2 + y]:
return True
return False
def _load_resources(self):
dir_path = os.path.dirname(os.path.abspath(__file__))
# path of player with different states
player_path = (
os.path.join(dir_path, 'assets/sprites/redbird-upflap.png'),
os.path.join(dir_path, 'assets/sprites/redbird-midflap.png'),
os.path.join(dir_path, 'assets/sprites/redbird-downflap.png')
)
# path of background
background_path = os.path.join(dir_path, 'assets/sprites/background-black.png')
# background_path = os.path.join(dir_path, 'assets/sprites/background-day.png')
# background_path = os.path.join(dir_path, 'assets/sprites/background-night.png')
# path of pipe
PIPE_PATH = os.path.join(dir_path, 'assets/sprites/pipe-green.png')
images, sounds, hit_masks = {}, {}, {}
# numbers sprites for score display
images['numbers'] = (
pygame.image.load(os.path.join(dir_path, 'assets/sprites/0.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/1.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/2.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/3.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/4.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/5.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/6.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/7.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/8.png')).convert_alpha(),
pygame.image.load(os.path.join(dir_path, 'assets/sprites/9.png')).convert_alpha()
)
# base (ground) sprite
images['base'] = pygame.image.load(os.path.join(dir_path, 'assets/sprites/base.png')).convert_alpha()
# sounds
if 'win' in sys.platform:
soundExt = '.wav'
else:
soundExt = '.ogg'
# sounds['die'] = pygame.mixer.Sound('assets/audio/die' + soundExt)
# sounds['hit'] = pygame.mixer.Sound('assets/audio/hit' + soundExt)
# sounds['point'] = pygame.mixer.Sound('assets/audio/point' + soundExt)
# sounds['swoosh'] = pygame.mixer.Sound('assets/audio/swoosh' + soundExt)
# sounds['wing'] = pygame.mixer.Sound('assets/audio/wing' + soundExt)
# select random background sprites
images['background'] = pygame.image.load(background_path).convert()
# select random player sprites
images['player'] = (
pygame.image.load(player_path[0]).convert_alpha(),
pygame.image.load(player_path[1]).convert_alpha(),
pygame.image.load(player_path[2]).convert_alpha(),
)
# select random pipe sprites
images['pipe'] = (
pygame.transform.rotate(
pygame.image.load(PIPE_PATH).convert_alpha(), 180),
pygame.image.load(PIPE_PATH).convert_alpha(),
)
# hismask for pipes
hit_masks['pipe'] = (
self._get_hit_mask(images['pipe'][0]),
self._get_hit_mask(images['pipe'][1]),
)
# hitmask for player
hit_masks['player'] = (
self._get_hit_mask(images['player'][0]),
self._get_hit_mask(images['player'][1]),
self._get_hit_mask(images['player'][2]),
)
return images, sounds, hit_masks
def _get_hit_mask(self, image):
"""returns a hitmask using an image's alpha."""
mask = []
for x in range(image.get_width()):
mask.append([])
for y in range(image.get_height()):
mask[x].append(bool(image.get_at((x, y))[3]))
return mask
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,593
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/customgame/__init__.py
|
from customgame.custom_flappy_bird import CustomFlappyBird
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,594
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/a3c_actor_thread.py
|
import tensorflow as tf
import numpy as np
import random
import time
from a3c_network import A3CFFNetwork, A3CLSTMNetwork
from config import *
from game.game_state import GameState
def timestamp():
return time.time()
class A3CActorThread(object):
def __init__(self,
thread_index,
global_network,
initial_learning_rate,
learning_rate_input,
optimizer,
max_global_time_step,
device
):
self.thread_index = thread_index
self.learning_rate_input = learning_rate_input
self.max_global_time_step = max_global_time_step
if USE_LSTM:
self.local_network = A3CLSTMNetwork(STATE_DIM, STATE_CHN, ACTION_DIM, device, thread_index)
else:
self.local_network = A3CFFNetwork(STATE_DIM, STATE_CHN, ACTION_DIM, device, thread_index)
self.local_network.create_loss(ENTROPY_BETA)
self.gradients = tf.gradients(self.local_network.total_loss, self.local_network.get_vars())
clip_accum_grads = [tf.clip_by_norm(accum_grad, 10.0) for accum_grad in self.gradients]
self.apply_gradients = optimizer.apply_gradients(zip(clip_accum_grads, global_network.get_vars()))
# self.apply_gradients = optimizer.apply_gradients(zip(self.gradients, global_network.get_vars()))
self.sync = self.local_network.sync_from(global_network)
self.game_state = GameState(thread_index)
self.local_t = 0
self.initial_learning_rate = initial_learning_rate
# for log
self.episode_reward = 0.0
self.episode_start_time = 0.0
self.prev_local_t = 0
return
def _anneal_learning_rate(self, global_time_step):
learning_rate = self.initial_learning_rate * \
(self.max_global_time_step - global_time_step) / self.max_global_time_step
if learning_rate < 0.0:
learning_rate = 0.0
return learning_rate
def choose_action(self, policy_output):
return np.random.choice(range(len(policy_output)), p=policy_output)
def _record_log(self, sess, global_t, summary_writer, summary_op, reward_input, reward, time_input, living_time):
summary_str = sess.run(summary_op, feed_dict={
reward_input: reward,
time_input: living_time
})
summary_writer.add_summary(summary_str, global_t)
summary_writer.flush()
return
def _discount_accum_reward(self, rewards, running_add=0.0, gamma=0.99):
""" discounted the reward using gamma
"""
discounted_r = np.zeros_like(rewards, dtype=np.float32)
for t in reversed(range(len(rewards))):
running_add = rewards[t] + running_add * gamma
discounted_r[t] = running_add
return list(discounted_r)
def process(self, sess, global_t, summary_writer, summary_op, reward_input, time_input):
batch_state = []
batch_action = []
batch_reward = []
terminal_end = False
# reduce the influence of socket connecting time
if self.episode_start_time == 0.0:
self.episode_start_time = timestamp()
# copy weight from global network
sess.run(self.sync)
start_local_t = self.local_t
if USE_LSTM:
start_lstm_state = self.local_network.lstm_state_out
for i in range(LOCAL_T_MAX):
policy_ = self.local_network.run_policy(sess, self.game_state.s_t)
if self.thread_index == 0 and self.local_t % 1000 == 0:
print 'policy=', policy_
action_id = self.choose_action(policy_)
action_onehot = np.zeros([ACTION_DIM])
action_onehot[action_id] = 1
batch_state.append(self.game_state.s_t)
batch_action.append(action_onehot)
self.game_state.process(action_id)
reward = self.game_state.reward
terminal = self.game_state.terminal
self.episode_reward += reward
batch_reward.append(np.clip(reward, -1.0, 1.0))
self.local_t += 1
# s_t1 -> s_t
self.game_state.update()
if terminal:
terminal_end = True
episode_end_time = timestamp()
living_time = episode_end_time - self.episode_start_time
self._record_log(sess, global_t, summary_writer, summary_op,
reward_input, self.episode_reward, time_input, living_time)
print ("global_t=%d / reward=%.2f / living_time=%.4f") % (global_t, self.episode_reward, living_time)
# reset variables
self.episode_reward = 0.0
self.episode_start_time = episode_end_time
self.game_state.reset()
if USE_LSTM:
self.local_network.reset_lstm_state()
break
# log
if self.local_t % 40 == 0:
living_time = timestamp() - self.episode_start_time
self._record_log(sess, global_t, summary_writer, summary_op,
reward_input, self.episode_reward, time_input, living_time)
# -----------end of batch (LOCAL_T_MAX)--------------------
R = 0.0
if not terminal_end:
R = self.local_network.run_value(sess, self.game_state.s_t)
# print ('global_t: %d, R: %f') % (global_t, R)
batch_value = self.local_network.run_batch_value(sess, batch_state, start_lstm_state)
batch_R = self._discount_accum_reward(batch_reward, R, GAMMA)
batch_td = np.array(batch_R) - np.array(batch_value)
cur_learning_rate = self._anneal_learning_rate(global_t)
# print("=" * 60)
# print(batch_value)
# print(self.local_network.run_batch_value(sess, batch_state, start_lstm_state))
# print("=" * 60)
# import sys
# sys.exit()
if USE_LSTM:
sess.run(self.apply_gradients, feed_dict={
self.local_network.state_input: batch_state,
self.local_network.action_input: batch_action,
self.local_network.td: batch_td,
self.local_network.R: batch_R,
self.local_network.step_size: [len(batch_state)],
self.local_network.initial_lstm_state: start_lstm_state,
self.learning_rate_input: cur_learning_rate
})
else:
sess.run(self.apply_gradients, feed_dict={
self.local_network.state_input: batch_state,
self.local_network.action_input: batch_action,
self.local_network.td: batch_td,
self.local_network.R: batch_R,
self.learning_rate_input: cur_learning_rate
})
diff_local_t = self.local_t - start_local_t
return diff_local_t
if __name__ == '__main__':
# game_state = GameState()
# game_state.process(1)
# print np.shape(game_state.s_t)
print timestamp()
print time.time()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,595
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/config.py
|
GAME = 'flappy-bird'
STATE_DIM = 84
STATE_CHN = 4
ACTION_DIM = 2
LOCAL_T_MAX = 5 # repeat step size
RMSP_ALPHA = 0.99 # decay parameter for RMSProp
RMSP_EPSILON = 0.1 # epsilon parameter for RMSProp
GAMMA = 0.99
ENTROPY_BETA = 0.0 # 0.01 for FFNet
MAX_TIME_STEP = 10 * 10**7
INITIAL_ALPHA_LOW = 1e-4 # log_uniform low limit for learning rate
INITIAL_ALPHA_HIGH = 1e-2 # log_uniform high limit for learning rate
INITIAL_ALPHA_LOG_RATE = 0.4226 # log_uniform interpolate rate for learning rate (around 7 * 10^-4)
PARALLEL_SIZE = 4 # parallel thread size, please start game_server first
USE_GPU = True
USE_LSTM = True
LSTM_UNITS = 256
CHECKPOINT_DIR = 'tmp_a3c/checkpoints'
LOG_FILE = 'tmp_a3c/log'
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,596
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/replay_buffer.py
|
from collections import deque
import random
import numpy as np
class ReplayBuffer(object):
def __init__(self, capacity):
self.capacity = capacity
self.buffer = deque(maxlen=self.capacity)
return
def sample(self, batch_size, timestep):
'''
sample from buffer, get [batch_size][timestep]
return a reshaped array with size: batch_size*timestep
'''
episode_batch = random.sample(self.buffer, batch_size)
experience = []
for episode in episode_batch:
start = random.randint(0, len(episode) - timestep)
experience += episode[start:start + timestep]
return experience
def capacity(self):
return self.capacity
def size(self):
return len(self.buffer)
def add(self, episode_buffer):
'''
note: each element in replay buffer is an array, contains a series of episodes
like: [(s, a, r, s1, d)]
'''
self.buffer.append(episode_buffer)
return
def get_recent_state(self):
return self.buffer[-1][-1]
if __name__ == '__main__':
rp = ReplayBuffer(10000)
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,597
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/netutil.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.01)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.01, shape=shape)
return tf.Variable(initial)
def conv2d(x, W, stride):
return tf.nn.conv2d(x, W, strides=[1, stride, stride, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
def output_size(in_size, filter_size, stride):
return (in_size - filter_size) / stride + 1
def lstm_last_relevant(output, length):
'''
get the last relevant frame of the output of tf.nn.dynamica_rnn()
'''
batch_size = tf.shape(output)[0]
max_length = int(output.get_shape()[1])
output_size = int(output.get_shape()[2])
index = tf.range(0, batch_size) * max_length + (length - 1)
flat = tf.reshape(output, [-1, output_size])
relevant = tf.gather(flat, index)
return relevant
def update_target_graph_op(trainable_vars, tau=0.001):
'''
theta_prime = tau * theta + (1 - tau) * theta_prime
'''
size = len(trainable_vars)
update_ops = []
for i, var in enumerate(trainable_vars[0:size / 2]):
target = trainable_vars[size // 2 + i]
# op = tf.assign(target, tau * var.value() + (1 - tau) * target.value())
op = tf.assign(target, var.value())
update_ops.append(op)
return update_ops
def update_target(session, update_ops):
session.run(update_ops)
tf_vars = tf.trainable_variables()
size = len(tf.trainable_variables())
theta = session.run(tf_vars[0])
theta_prime = session.run(tf_vars[size // 2])
assert(theta.all() == theta_prime.all())
return
def fc_initializer(input_channels, dtype=tf.float32):
def _initializer(shape, dtype=dtype, partition_info=None):
d = 1.0 / np.sqrt(input_channels)
return tf.random_uniform(shape, minval=-d, maxval=d)
return _initializer
def conv_initializer(kernel_width, kernel_height, input_channels, dtype=tf.float32):
def _initializer(shape, dtype=dtype, partition_info=None):
d = 1.0 / np.sqrt(input_channels * kernel_width * kernel_height)
return tf.random_uniform(shape, minval=-d, maxval=d)
return _initializer
def fc_variable(shape, name):
name_w = "W_{0}".format(name)
name_b = "b_{0}".format(name)
W = tf.get_variable(name_w, shape, initializer=fc_initializer(shape[0]))
b = tf.get_variable(name_b, shape[1:], initializer=fc_initializer(shape[0]))
variable_summaries(W, name_w)
variable_summaries(b, name_b)
return W, b
def conv_variable(weight_shape, name, deconv=False):
name_w = "W_{0}".format(name)
name_b = "b_{0}".format(name)
w = weight_shape[0]
h = weight_shape[1]
if deconv:
input_channels = weight_shape[3]
output_channels = weight_shape[2]
else:
input_channels = weight_shape[2]
output_channels = weight_shape[3]
bias_shape = [output_channels]
weight = tf.get_variable(name_w, weight_shape, initializer=conv_initializer(w, h, input_channels))
bias = tf.get_variable(name_b, bias_shape, initializer=conv_initializer(w, h, input_channels))
variable_summaries(weight, name_w)
variable_summaries(bias, name_b)
return weight, bias
def deconv2d(x, W, input_width, input_height, stride):
filter_height = W.get_shape()[0].value
filter_width = W.get_shape()[1].value
out_channel = W.get_shape()[2].value
out_height, out_width = get2d_deconv_output_size(
input_height, input_width, filter_height, filter_width, stride, "VALID")
batch_size = tf.shape(x)[0]
output_shape = tf.stack([batch_size, out_height, out_width, out_channel])
return tf.nn.conv2d_transpose(x, W, output_shape, strides=[1, stride, stride, 1], padding="VALID")
def get2d_deconv_output_size(input_height, input_width, filter_height, filter_width, stride, padding_type):
if padding_type == "VALID":
out_height = (input_height - 1) * stride + filter_height
out_width = (input_width - 1) * stride + filter_width
elif padding_type == "SAME":
out_height = input_height * stride
out_width = input_width * stride
return out_height, out_width
def flatten_conv_layer(h_conv):
h_conv_flat_size = np.prod(h_conv.get_shape().as_list()[1:])
h_conv_flat = tf.reshape(h_conv, [-1, h_conv_flat_size])
return h_conv_flat_size, h_conv_flat
def variable_summaries(var, name=None):
""" Attach a lot of summaries to a Tensor (for TensorBoard visualization).
"""
with tf.name_scope("summaries"):
with tf.name_scope(name):
mean = tf.reduce_mean(var)
tf.summary.scalar("mean", mean)
with tf.name_scope("stddev"):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar("stddev", stddev)
tf.summary.scalar("max", tf.reduce_max(var))
tf.summary.scalar("min", tf.reduce_min(var))
tf.summary.histogram("histogram", var)
return
def restore_session(saver, sess, model_dir):
""" restore the session from given model_dir
Args:
saver: tf.train.Saver,
sess: tf.Session,
model_dir: string, the path to save model
Returns:
global_t:
n_episode:
"""
checkpoint = tf.train.get_checkpoint_state(model_dir)
if checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("checkpoint loaded:", checkpoint.model_checkpoint_path)
tokens = checkpoint.model_checkpoint_path.split("-")
# set global step
global_t = int(tokens[2])
n_episode = int(tokens[1])
print(">>> global step set: ", global_t)
else:
print("Could not find old checkpoint")
global_t = 0
n_episode = 0
return global_t, n_episode
def backup_session(saver, sess, model_dir, global_t, n_episode=0):
""" backup the session to given model_dir
Args:
saver: tf.train.Saver,
sess: tf.Session,
model_dir: string, the path to save model
global_t: int, the number of timestep
n_episode: int
"""
if not os.path.exists(model_dir):
os.makedirs(model_dir)
filename = "checkpoint-%d" % (n_episode)
saver.save(sess, model_dir + "/" + filename, global_step=global_t)
return
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,598
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/game_server.py
|
import sys
import cPickle
import math
from game.flappy_bird import FlappyBird
from SocketServer import BaseRequestHandler, UDPServer
flapp_bird = FlappyBird()
class UDPHandler(BaseRequestHandler):
def handle(self):
action = self.request[0]
action = cPickle.loads(action)
socket = self.request[1]
global flapp_bird
x_t, reward, terminal = flapp_bird.frame_step(action)
data = cPickle.dumps((x_t, reward, terminal))
# not larger than 8192 due to the limitation of MXU of udp
buffer_size = 8192
total_size = len(data)
block_num = int(math.ceil(total_size / float(buffer_size)))
# send the length
offset = 0
header = {
"buffer_size": buffer_size,
"total_size": total_size,
"block_num": block_num
}
header = cPickle.dumps(header)
socket.sendto(header, self.client_address)
while offset < total_size:
end = offset + buffer_size
end = end if end < total_size else total_size
socket.sendto(data[offset: end], self.client_address)
offset = end
return
class GameServer(UDPServer):
def __init__(self, server_address, handler_class=UDPHandler):
UDPServer.__init__(self, server_address, handler_class)
return
# how to use:
# args: index, please be consistent for your a3c agent thread index
# python game_server.py 0
if __name__ == "__main__":
host, port = "localhost", 9600
if len(sys.argv) > 1:
index = int(sys.argv[1])
port = port + index
print port
server = GameServer((host, port), UDPHandler)
server.serve_forever()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,599
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/dqn_custom.py
|
import tensorflow as tf
import numpy as np
import random
import time
import os
from collections import deque
from netutil import conv_variable, fc_variable, conv2d, flatten_conv_layer, max_pool_2x2
from customgame import CustomFlappyBird
from PIL import Image
INPUT_SIZE = 84
INPUT_CHANNEL = 4
ACTIONS_DIM = 2
GAMMA = 0.99
FINAL_EPSILON = 0.0001
INITIAL_EPSILON = 0.0001
ALPHA = 1e-6 # the learning rate of optimizer
MAX_TIME_STEP = 10 * 10 ** 7
EPSILON_TIME_STEP = 1 * 10 ** 6 # for annealing the epsilon greedy
EPSILON_ANNEAL = float(INITIAL_EPSILON - FINAL_EPSILON) / EPSILON_TIME_STEP
REPLAY_MEMORY = 50000
BATCH_SIZE = 32
CHECKPOINT_DIR = 'tmp_dqn_cus/checkpoints'
LOG_FILE = 'tmp_dqn_cus/log'
class DQN(object):
def __init__(self):
self.global_t = 0
self.replay_buffer = deque(maxlen=REPLAY_MEMORY)
# q-network parameter
with tf.device("/gpu:0"), tf.variable_scope("net"):
self.create_network()
self.create_minimize()
# init session
# self.session = tf.InteractiveSession()
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4)
sess_config = tf.ConfigProto(
# intra_op_parallelism_threads=NUM_THREADS
log_device_placement=False,
allow_soft_placement=True,
gpu_options=gpu_options
)
self.session = tf.Session(config=sess_config)
self.session.run(tf.global_variables_initializer())
self.saver = tf.train.Saver(tf.global_variables())
self.restore()
self.epsilon = INITIAL_EPSILON - float(INITIAL_EPSILON - FINAL_EPSILON) \
* min(self.global_t, EPSILON_TIME_STEP) / float(EPSILON_TIME_STEP)
# for recording the log into tensorboard
self.time_input = tf.placeholder(tf.float32)
self.reward_input = tf.placeholder(tf.float32)
tf.summary.scalar('living_time', self.time_input)
tf.summary.scalar('reward', self.reward_input)
self.summary_op = tf.summary.merge_all()
self.summary_writer = tf.summary.FileWriter(LOG_FILE, self.session.graph)
self.episode_start_time = 0.0
self.episode_reward = 0.0
return
def create_network(self):
# input layer
s = tf.placeholder('float', shape=[None, INPUT_SIZE, INPUT_SIZE, INPUT_CHANNEL], name='s')
# hidden conv layer
W_conv1, b_conv1 = conv_variable([8, 8, INPUT_CHANNEL, 32], "conv1")
h_conv1 = tf.nn.relu(conv2d(s, W_conv1, 4) + b_conv1)
h_poo1 = max_pool_2x2(h_conv1)
W_conv2, b_conv2 = conv_variable([4, 4, 32, 64], "conv2")
h_conv2 = tf.nn.relu(conv2d(h_poo1, W_conv2, 2) + b_conv2)
W_conv3, b_conv3 = conv_variable([3, 3, 64, 64], "conv3")
h_conv3 = tf.nn.relu(conv2d(h_conv2, W_conv3, 1) + b_conv3)
h_conv3_out_size, h_conv3_flat = flatten_conv_layer(h_conv3)
W_fc1, b_fc1 = fc_variable([h_conv3_out_size, 512], "fc1")
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, W_fc1) + b_fc1)
W_fc2, b_fc2 = fc_variable([512, ACTIONS_DIM], "fc2")
Q_value = tf.matmul(h_fc1, W_fc2) + b_fc2
self.s = s
self.Q_value = Q_value
return
def create_minimize(self):
# self.a = tf.placeholder('float', shape=[None, ACTIONS_DIM])
self.a = tf.placeholder(tf.int32, shape=[None])
a_onehot = tf.one_hot(self.a, ACTIONS_DIM)
self.y = tf.placeholder('float', shape=[None])
Q_action = tf.reduce_sum(tf.multiply(self.Q_value, a_onehot), axis=1)
self.loss = tf.reduce_mean(tf.square(self.y - Q_action))
# self.loss = tf.reduce_mean(tf.abs(self.y - Q_action))
optimizer = tf.train.AdamOptimizer(ALPHA)
# vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="net")
# for v in vars:
# print vars
# gradients = tf.gradients(self.loss, vars)
# gradients_clipped = [tf.clip_by_norm(grad, 10.0) for grad in gradients]
# self.apply_gradients = optimizer.apply_gradients(zip(gradients_clipped, vars))
self.apply_gradients = optimizer.minimize(self.loss)
return
def perceive(self, state, action, reward, next_state, terminal):
self.global_t += 1
self.replay_buffer.append((state, action, reward, next_state, terminal))
self.episode_reward += reward
if self.episode_start_time == 0.0:
self.episode_start_time = time.time()
if terminal or self.global_t % 600 == 0:
living_time = time.time() - self.episode_start_time
self.record_log(self.episode_reward, living_time)
if terminal:
self.episode_reward = 0.0
self.episode_start_time = time.time()
if len(self.replay_buffer) > BATCH_SIZE * 4:
self.train_Q_network()
return
def get_action_index(self, state):
Q_value_t = self.session.run(self.Q_value, feed_dict={self.s: [state]})[0]
return np.argmax(Q_value_t), np.max(Q_value_t)
def epsilon_greedy(self, state):
"""
:param state: 1x84x84x3
"""
Q_value_t = self.session.run(self.Q_value, feed_dict={self.s: [state]})[0]
action_index = 0
if random.random() <= self.epsilon:
action_index = random.randrange(ACTIONS_DIM)
else:
action_index = np.argmax(Q_value_t)
if self.epsilon > FINAL_EPSILON:
self.epsilon -= EPSILON_ANNEAL
max_q_value = np.max(Q_value_t)
return action_index, max_q_value
def train_Q_network(self):
'''
do backpropogation
'''
minibatch = random.sample(self.replay_buffer, BATCH_SIZE)
state_batch = [t[0] for t in minibatch]
action_batch = [t[1] for t in minibatch]
reward_batch = [t[2] for t in minibatch]
next_state_batch = [t[3] for t in minibatch]
terminal_batch = [t[4] for t in minibatch]
y_batch = []
Q_value_batch = self.session.run(self.Q_value, feed_dict={self.s: next_state_batch})
for i in range(BATCH_SIZE):
terminal = terminal_batch[i]
if terminal:
y_batch.append(reward_batch[i])
else:
y_batch.append(reward_batch[i] + GAMMA * np.max(Q_value_batch[i]))
self.session.run(self.apply_gradients, feed_dict={
self.y: y_batch,
self.a: action_batch,
self.s: state_batch
})
if self.global_t % 100000 == 0:
self.backup()
return
def record_log(self, reward, living_time):
'''
record the change of reward into tensorboard log
'''
summary_str = self.session.run(self.summary_op, feed_dict={
self.reward_input: reward,
self.time_input: living_time
})
self.summary_writer.add_summary(summary_str, self.global_t)
self.summary_writer.flush()
return
def restore(self):
checkpoint = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if checkpoint and checkpoint.model_checkpoint_path:
self.saver.restore(self.session, checkpoint.model_checkpoint_path)
print("checkpoint loaded:", checkpoint.model_checkpoint_path)
tokens = checkpoint.model_checkpoint_path.split("-")
# set global step
self.global_t = int(tokens[1])
print(">>> global step set: ", self.global_t)
else:
print("Could not find old checkpoint")
return
def backup(self):
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
self.saver.save(self.session, CHECKPOINT_DIR + '/' + 'checkpoint', global_step=self.global_t)
return
def create_process_fn(use_rgb=False):
""" preprocess inputted image according to different games
Args:
use_rgb: boolean, whether use rgb or gray image
Returns:
f: function
"""
scale_size = (84, 110)
crop_area = (0, 0, 84, 84)
def f(img_array):
img = Image.fromarray(img_array)
# img = img.resize(scale_size, Image.ANTIALIAS) # blurred
img = img.resize(scale_size)
if crop_area is not None:
img = img.crop(crop_area)
if not use_rgb:
# img = img.convert('L')
# img = img.convert('1')
img = img.convert('L').point(lambda p: p > 100 and 255)
# img = img.convert('L').point(lambda p: p > 100)
img = np.reshape(img, (img.size[1], img.size[0], 1))
return img.astype(np.uint8)
else:
return np.array(img)
return f
def test():
process_fn = create_process_fn(use_rgb=False)
img = Image.open('tmp2.png')
img = np.array(img)
img = process_fn(img)
img = img.reshape([84, 84])
Image.fromarray(img).save("tmp2-bin.png")
# import cv2
# image_data = cv2.imread("tmp2.png")
# image_data = cv2.cvtColor(cv2.resize(image_data, (84, 84)), cv2.COLOR_BGR2GRAY)
# ret, image_data = cv2.threshold(image_data, 1, 255, cv2.THRESH_BINARY)
# cv2.imwrite("tmp2-bin-cv2.png", image_data)
return
def main():
'''
the function for training
'''
# test()
# return
agent = DQN()
env = CustomFlappyBird()
process_fn = create_process_fn(use_rgb=False)
while agent.global_t < MAX_TIME_STEP:
o_t = env.reset()
o_t = process_fn(o_t)
s_t = np.concatenate([o_t] * INPUT_CHANNEL, axis=2)
terminal = False
while not terminal:
action_id, action_q = agent.epsilon_greedy(s_t)
o_t1, reward, terminal = env.step(action_id)
o_t1 = process_fn(o_t1)
s_t1 = np.concatenate([s_t[:, :, 1:], o_t1], axis=2)
# action = np.zeros(ACTIONS_DIM)
# action[action_id] = 1
agent.perceive(s_t, action_id, reward, s_t1, terminal)
if agent.global_t % 100 == 0 or terminal or reward == 1.0:
print 'global_t:', agent.global_t, '/ epsilon:', agent.epsilon, '/ terminal:', terminal, \
'/ action:', action_id, '/ reward:', reward, '/ q_value:', action_q
s_t = s_t1
return
if __name__ == '__main__':
main()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,600
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/DQN.py
|
import tensorflow as tf
import numpy as np
import random
import time
import os
import sys
from netutil import *
from game.flappy_bird import FlappyBird
from collections import deque
INPUT_SIZE = 84
INPUT_CHANNEL = 4
ACTIONS_DIM = 2
LSTM_UNITS = 512
GAMMA = 0.99
FINAL_EPSILON = 0.0001
INITIAL_EPSILON = 0.0001
ALPHA = 1e-6 # the learning rate of optimizer
MAX_TIME_STEP = 10 * 10 ** 7
EPSILON_TIME_STEP = 1 * 10 ** 6 # for annealing the epsilon greedy
EPSILON_ANNEAL = float(INITIAL_EPSILON - FINAL_EPSILON) / EPSILON_TIME_STEP
BATCH_SIZE = 32
REPLAY_MEMORY = 20000
CHECKPOINT_DIR = 'tmp_dqn/checkpoints'
LOG_FILE = 'tmp_dqn/log'
class Network(object):
def __init__(self, scope_name):
with tf.variable_scope(scope_name) as scope:
# input layer
self.state_input = tf.placeholder('float', shape=[None, INPUT_SIZE, INPUT_SIZE, INPUT_CHANNEL])
# hidden conv layer
self.W_conv1 = weight_variable([8, 8, INPUT_CHANNEL, 32])
self.b_conv1 = bias_variable([32])
h_conv1 = tf.nn.relu(conv2d(self.state_input, self.W_conv1, 4) + self.b_conv1)
h_poo1 = max_pool_2x2(h_conv1)
self.W_conv2 = weight_variable([4, 4, 32, 64])
self.b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_poo1, self.W_conv2, 2) + self.b_conv2)
self.W_conv3 = weight_variable([3, 3, 64, 64])
self.b_conv3 = bias_variable([64])
h_conv3 = tf.nn.relu(conv2d(h_conv2, self.W_conv3, 1) + self.b_conv3)
h_conv3_out_size = np.prod(h_conv3.get_shape().as_list()[1:])
h_conv3_flat = tf.reshape(h_conv3, [-1, h_conv3_out_size])
self.W_fc1 = weight_variable([h_conv3_out_size, LSTM_UNITS])
self.b_fc1 = bias_variable([LSTM_UNITS])
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, self.W_fc1) + self.b_fc1)
self.W_fc2 = weight_variable([LSTM_UNITS, ACTIONS_DIM])
self.b_fc2 = bias_variable([ACTIONS_DIM])
self.Q_value = tf.matmul(h_fc1, self.W_fc2) + self.b_fc2
return
def get_vars(self):
return [
self.W_conv1, self.b_conv1,
self.W_conv2, self.b_conv2,
self.W_fc1, self.b_fc1,
self.W_fc2, self.b_fc2,
]
class DQN(object):
def __init__(self):
self.global_t = 0
self.replay_buffer = deque(maxlen=REPLAY_MEMORY)
# q-network parameter
self.create_network()
self.create_minimize()
# init session
self.session = tf.InteractiveSession()
self.session.run(tf.global_variables_initializer())
self.saver = tf.train.Saver(tf.global_variables())
self.restore()
self.epsilon = INITIAL_EPSILON - float(INITIAL_EPSILON - FINAL_EPSILON) \
* min(self.global_t, EPSILON_TIME_STEP) / float(EPSILON_TIME_STEP)
# for recording the log into tensorboard
self.time_input = tf.placeholder(tf.float32)
self.reward_input = tf.placeholder(tf.float32)
tf.summary.scalar('living_time', self.time_input)
tf.summary.scalar('reward', self.reward_input)
self.summary_op = tf.summary.merge_all()
self.summary_writer = tf.summary.FileWriter(LOG_FILE, self.session.graph)
self.episode_start_time = 0.0
self.episode_reward = 0.0
return
def create_network(self):
self.main_net = Network(scope_name='main')
self.target_net = Network(scope_name='target')
return
def create_minimize(self):
self.a = tf.placeholder('float', shape=[None, ACTIONS_DIM])
self.y = tf.placeholder('float', shape=[None])
Q_action = tf.reduce_sum(tf.multiply(self.main_net.Q_value, self.a), axis=1)
self.loss = tf.reduce_mean(tf.square(self.y - Q_action))
self.optimizer = tf.train.AdamOptimizer(learning_rate=ALPHA)
# self.optimizer = tf.train.RMSPropOptimizer(learning_rate=ALPHA, decay=0.99)
self.apply_gradients = self.optimizer.minimize(self.loss)
# self.gradients = tf.gradients(self.loss, self.main_net.get_vars())
# clip_grads = [tf.clip_by_norm(grad, 40.0) for grad in self.gradients]
# self.apply_gradients = self.optimizer.apply_gradients(zip(clip_grads, self.main_net.get_vars()))
return
def perceive(self, state, action, reward, next_state, terminal):
self.global_t += 1
self.replay_buffer.append((state, action, reward, next_state, terminal))
self.episode_reward += reward
if self.episode_start_time == 0.0:
self.episode_start_time = time.time()
if terminal or self.global_t % 20 == 0:
living_time = time.time() - self.episode_start_time
self.record_log(self.episode_reward, living_time)
if terminal:
self.episode_reward = 0.0
self.episode_start_time = time.time()
if len(self.replay_buffer) > BATCH_SIZE:
self.train_Q_network()
if self.global_t % 100000 == 0:
self.backup()
return
def epsilon_greedy(self, state):
"""
:param state: 1x84x84x3
"""
Q_value_t = self.session.run(
self.main_net.Q_value,
feed_dict={
self.main_net.state_input: [state],
})
Q_value_t = Q_value_t[0]
action_index = 0
if random.random() <= self.epsilon:
action_index = random.randrange(ACTIONS_DIM)
print 'random-index:', action_index
else:
action_index = np.argmax(Q_value_t)
if self.epsilon > FINAL_EPSILON:
self.epsilon -= EPSILON_ANNEAL
max_q_value = np.max(Q_value_t)
return action_index, max_q_value
def train_Q_network(self):
'''
do backpropogation
'''
# len(minibatch) = BATCH_SIZE * LSTM_MAX_STEP
minibatch = random.sample(self.replay_buffer, BATCH_SIZE)
state_batch = [t[0] for t in minibatch]
action_batch = [t[1] for t in minibatch]
reward_batch = [t[2] for t in minibatch]
next_state_batch = [t[3] for t in minibatch]
terminal_batch = [t[4] for t in minibatch]
y_batch = []
Q_target = self.session.run(
self.main_net.Q_value,
feed_dict={
self.main_net.state_input: next_state_batch,
}
)
for i in range(len(minibatch)):
terminal = terminal_batch[i]
if terminal:
y_batch.append(reward_batch[i])
else:
y_batch.append(reward_batch[i] + GAMMA * np.max(Q_target[i]))
# y_batch.append(reward_batch[i] + GAMMA * Q_value[i][Q_action[i]])
_, loss = self.session.run([self.apply_gradients, self.loss], feed_dict={
self.y: y_batch,
self.a: action_batch,
self.main_net.state_input: state_batch,
})
# print loss
return
def record_log(self, reward, living_time):
'''
record the change of reward into tensorboard log
'''
summary_str = self.session.run(self.summary_op, feed_dict={
self.reward_input: reward,
self.time_input: living_time
})
self.summary_writer.add_summary(summary_str, self.global_t)
self.summary_writer.flush()
return
def restore(self):
checkpoint = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if checkpoint and checkpoint.model_checkpoint_path:
self.saver.restore(self.session, checkpoint.model_checkpoint_path)
print("checkpoint loaded:", checkpoint.model_checkpoint_path)
tokens = checkpoint.model_checkpoint_path.split("-")
# set global step
self.global_t = int(tokens[1])
print(">>> global step set: ", self.global_t)
else:
print("Could not find old checkpoint")
return
def backup(self):
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
self.saver.save(self.session, CHECKPOINT_DIR + '/' + 'checkpoint', global_step=self.global_t)
return
def main():
'''
the function for training
'''
agent = DQN()
env = FlappyBird()
env.reset()
s_t = env.s_t
while True:
action_id, action_q = agent.epsilon_greedy(s_t)
env.process(action_id)
action = np.zeros(ACTIONS_DIM)
action[action_id] = 1
s_t1, reward, terminal = (env.s_t1, env.reward, env.terminal)
agent.perceive(s_t, action, reward, s_t1, terminal)
if agent.global_t % 10 == 0:
print 'global_t:', agent.global_t, '/ epsilon:', agent.epsilon, '/ terminal:', terminal, \
'/ action:', action_id, '/ reward:', reward, '/ q_value:', action_q
if terminal:
env.reset()
s_t = s_t1
# env.update() # it doesn't work, and cause Q NaN
# break
return
if __name__ == '__main__':
main()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,601
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/game/game_state.py
|
import socket
import numpy as np
import cPickle
class GameState:
def __init__(self, index=0, host='localhost', port=9600):
self.host = host
self.port = port + index
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.reset()
return
def frame_step(self, input_actions):
sock = self.sock
sock.sendto(cPickle.dumps(input_actions), (self.host, self.port))
header = sock.recv(1000)
header = cPickle.loads(header)
# print header
data = str()
buffer_size = header["buffer_size"]
total_size = header["total_size"]
block_num = header["block_num"]
for i in range(block_num):
receive_size = total_size - len(data)
receive_size = receive_size if receive_size < buffer_size else buffer_size
data += sock.recv(receive_size)
data = cPickle.loads(data)
return data
def reset(self):
action = np.zeros([2])
action[0] = 1
x_t, reward, terminal = self.frame_step(action)
self.s_t = np.stack((x_t, x_t, x_t, x_t), axis=2)
self.reward = reward
self.terminal = terminal
return
def process(self, action_id):
action = np.zeros([2])
action[action_id] = 1
x_t1, reward, terminal = self.frame_step(action)
x_t1 = np.reshape(x_t1, (84, 84, 1))
self.s_t1 = np.append(self.s_t[:, :, 1:], x_t1, axis=2)
self.reward = reward
self.terminal = terminal
return
def update(self):
self.s_t = self.s_t1
return
if __name__ == '__main__':
gamestate = GameState()
for i in range(200):
gamestate.process(0)
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,602
|
yinchuandong/A3C-FlappyBird
|
refs/heads/master
|
/game_gym_server.py
|
import sys
import cPickle
import math
from SocketServer import BaseRequestHandler, UDPServer
from ple.games.flappybird import FlappyBird
from ple import PLE
flapp_bird = FlappyBird()
ple = PLE(flapp_bird, fps=30, display_screen=True)
print ple.getActionSet()
ple.init()
def test():
global ple
for i in range(300):
if ple.game_over():
ple.reset_game()
observation = ple.getScreenRGB()
# action = agent.pickAction(reward, observation)
reward = ple.act(119)
return
class UDPHandler(BaseRequestHandler):
def handle(self):
action = self.request[0]
action = cPickle.loads(action)
socket = self.request[1]
global flapp_bird
x_t, reward, terminal = flapp_bird.frame_step(action)
data = cPickle.dumps((x_t, reward, terminal))
# not larger than 8192 due to the limitation of MXU of udp
buffer_size = 8192
total_size = len(data)
block_num = int(math.ceil(total_size / float(buffer_size)))
# send the length
offset = 0
header = {
"buffer_size": buffer_size,
"total_size": total_size,
"block_num": block_num
}
header = cPickle.dumps(header)
socket.sendto(header, self.client_address)
while offset < total_size:
end = offset + buffer_size
end = end if end < total_size else total_size
socket.sendto(data[offset: end], self.client_address)
offset = end
return
class GameServer(UDPServer):
def __init__(self, server_address, handler_class=UDPHandler):
UDPServer.__init__(self, server_address, handler_class)
return
# how to use:
# args: index, please be consistent for your a3c agent thread index
# python game_server.py 0
if __name__ == "__main__":
# host, port = "localhost", 9600
# if len(sys.argv) > 1:
# index = int(sys.argv[1])
# port = port + index
# print port
# server = GameServer((host, port), UDPHandler)
# server.serve_forever()
test()
|
{"/customgame/__init__.py": ["/customgame/custom_flappy_bird.py"]}
|
20,606
|
airring/pubg
|
refs/heads/master
|
/pubg-flask.py
|
from flask import Flask,redirect,url_for,render_template,request,session
import os,socket,time,json
import config
from flask import current_app
app = Flask(__name__)
############ๅ ่ฝฝdb้
็ฝฎ
app.config.from_object(config)
##############sessionๅ ็
app.config['SECRET_KEY'] = '~\xc8\xc6\xe0\xf3,\x98O\xa8z4\xfb=\rNd'
#db=SQLAlchemy(app)
@app.route('/',methods=['GET'])
def hello_world():
return redirect('index.html')
@app.route('/index.html',methods=['GET'])
def index_html():
return render_template('index.html')
@app.route('/<fileurl>.html')
def viewhtml(fileurl):
return render_template(fileurl + '.html')
if __name__ == '__main__':
app.run(debug=True)
|
{"/pubg-flask.py": ["/config.py"]}
|
20,607
|
airring/pubg
|
refs/heads/master
|
/config.py
|
dialect='mysql'
driver='mysqldb'
username='pd'
password='test123456'
host='192.168.3.105'
port='3306'
database='uppor'
#dialect+driver://username:password@host:port/database
SQLALCHEMY_DATABASE_URI="{}+{}://{}:{}@{}:{}/{}?charset=utf8".format(dialect,driver,username,password,host,port,database)
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
{"/pubg-flask.py": ["/config.py"]}
|
20,609
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/terminals.py
|
from functools import partial
from whiskeynode import whiskeycache
from whiskeynode import WhiskeyNode
from whiskeynode.edges import Edge
from whiskeynode.terminaltypes import TerminalType
from whiskeynode.exceptions import (BadEdgeRemovalException,
InvalidEdgeDataException,
InvalidTerminalException,
InvalidTerminalOperationException,
InvalidTerminalParameterException,
InvalidTerminalStateException,
)
'''
Requirements of connection terminals
-Lazy loading, only grab data if you have to
-Caching in memory - only grab data once
-easy to declare - simple dictionary declaration
-easy to use - dot notation syntax
'''
OUTBOUND='OUTBOUND'
INBOUND='INBOUND'
BIDIRECTIONAL = 'BIDIRECTIONAL'
IDID = 0
def outbound_node( to_node_class,
create_on_request=False,
render=False,
voteable=False,
):
return partial(NodeTerminal, to_node_class, OUTBOUND, render=render, create_on_request=create_on_request)
def inbound_node( to_node_class,
inbound_name,
render=False,
voteable=False,
):
''' inbound nodes just grab the first node. if there could ever be more than one connection use a list '''
return partial(NodeTerminal, to_node_class, INBOUND, inbound_name=inbound_name, render=render)
def outbound_list( to_node_class,
render=False,
attributes=None,
sort_func=None,
voteable=False,
):
if attributes is not None:
return partial(AttributedListOfNodesTerminal, to_node_class, OUTBOUND, render=render, attributes=attributes, sort_func=sort_func)
else:
return partial(ListOfNodesTerminal, to_node_class, OUTBOUND, render=render)
def inbound_list( to_node_class,
inbound_name,
attributes=None,
sort_func=None,
render=False,
voteable=False,
):
if attributes is not None:
return partial(AttributedListOfNodesTerminal, to_node_class, INBOUND, inbound_name=inbound_name, attributes=attributes, sort_func=sort_func, render=render)
else:
return partial(ListOfNodesTerminal, to_node_class, INBOUND, inbound_name=inbound_name, render=render)
def bidirectional_list( to_node_class,
render=False,
voteable=False,
):
return partial(ListOfNodesTerminal, to_node_class, BIDIRECTIONAL, render=render)
'''
class BaseTerminal():
def __init__(self, to_node_class, direction, origin_node, name, inbound_name, render, terminaltype):
self.activated = False
self.name = inbound_name if inbound_name is not None else name
self.node = origin_node
self.to_node_class = to_node_class
self.terminaltype = terminaltype
self.direction = direction
self._render = render
self._insave = False
if self.direction == INBOUND and inbound_name == None:
raise InvalidTerminalException('inbound_name cannot be none when direction is INBOUND')
def edge_display_name(self):
return '%s:%s' % (self.name, self.to_node_class.COLLECTION_NAME)
def edge_query(self):
raise NotImplementedError()
def get(self):
raise NotImplementedError()
def set(self, value):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
def render(self):
raise NotImplementedError()
def exists(self):
raise NotImplementedError()
def add_inbound_edge(self):
raise NotImplementedError()
def remove_inbound_edge(self):
raise NotImplementedError()
def remove_outbound_edge(self, edge):
raise NotImplementedError()
'''
class NodeTerminal():
def __init__(self, to_node_class, direction, origin_node, name, inbound_name=None, render=False, create_on_request=False ): #, inbound_edges, outbound_edges):
self.activated = False
self.name = inbound_name if inbound_name is not None else name
self.original_name = name
self.node = origin_node
self.to_node_class = to_node_class
self.terminaltype = TerminalType.NODE
self.direction = direction
self._render = render
self._insave = False
if self.direction == INBOUND and inbound_name == None:
raise InvalidTerminalException('inbound_name cannot be none when direction is INBOUND')
self._edge = None
self._to_node = None
self.create_on_request = create_on_request
if self.direction != OUTBOUND and self.direction != INBOUND:
raise InvalidTerminalException('Node terminals can only be INBOUND or OUTBOUND')
def __repr__(self):
return '%s node to %s.%s named %s' % (self.direction, self.to_node_class.__module__, self.to_node_class.__name__, self.name)
def _get_to_node_id(self):
self.get_edge()
if self._edge:
return self._edge.inboundId if self.direction == OUTBOUND else self._edge.outboundId
return None
def _get_to_node_from_cache(self):
''' without going to the database '''
if self._to_node is None:
_id = self._get_to_node_id()
if _id:
self._to_node = whiskeycache.RAM.get(_id, None)
return self._to_node
def add_inbound_edge(self, edge):
assert self.direction == INBOUND, \
'Terminal [%s] on [%s] is an outbound node, you can\'t add inbound connections to an outbound node' % (self.name, self.node.__class__)
if self._edge is not None and self._edge != edge:
self._to_node = None
if self._to_node is None:
self.activated = True
self._edge = edge
self.get()
def add_outbound_edge(self, edge):
self.activated = True
self._edge = edge
self._to_node = self.to_node_class.from_id(self._get_to_node_id())
def delete(self):
#print "DELETE!!! "+str(self._edge)+" : "+self.name+" : "+str(self.node.__class__)
assert self.direction == OUTBOUND, \
'Terminal [%s] on [%s] is an inbound node, you can\'t remove connections from an inbound node' % (self.name, self.node.__class__)
self.set(None)
def edge_display_name(self):
return '%s:%s' % (self.name, self.to_node_class.COLLECTION_NAME)
def edge_query(self):
if self.direction == OUTBOUND:
return {'outboundId':self.node._id, 'name':self.name}
else: #if self.direction == INBOUND
return {'inboundId':self.node._id, 'outboundCollection':self.to_node_class.COLLECTION_NAME, 'name':self.name}
def exists(self):
return self._edge != None or Edge.find(self.edge_query()).count() > 0
def get_self(self):
return self.get()
def get(self):
if self._to_node == None:
self.get_edge()
if self._edge is None and self.create_on_request:
self.set(self.to_node_class())
elif self._edge:
self._to_node = self.to_node_class.from_id(self._get_to_node_id())
assert self._to_node is not None, 'to node should not be none ' + str(self)
if self.direction == OUTBOUND:
self._to_node.add_inbound_edge(self.name, self._edge)
else:
self._to_node.add_outbound_edge(self.name, self._edge)
return self._to_node
def get_edge(self):
if not self.activated or self._edge is None:
assert self._edge is None, 'edge should be none'
self._edge = Edge.find_one(self.edge_query())
assert self.direction == INBOUND or \
self._edge is None or \
self._edge.inboundCollection == self.to_node_class.COLLECTION_NAME, \
'Edge collection doesn not match to_node_class on node named [%s] on class [%s] edge: %s' % (self.name, self.node.__class__, str(self._edge.to_dict()))
self.activated = True
return self._edge
def remove_inbound_edge(self, edge):
assert self.direction == INBOUND, \
'Terminal [%s] on [%s] is an outbound node, you can\'t remove inbound connections from an outbound node' % (self.name, self.node.__class__)
if self.activated:
if self.get_edge() is not None and self._edge._id == edge._id:
self._edge = None
self._to_node = None
#leaving activated as true, so lazy traversals know that something has changed
def remove_outbound_edge(self, edge):
assert self.direction == OUTBOUND
if self.activated:
if self.get_edge() is not None and self._edge._id == edge._id:
self._edge = None
self._to_node = None
#leaving activated as true, so lazy traversals know that something has changed
def render(self, render_terminals=False, *args, **kwargs):
self.get()
if self._to_node:
return self._to_node.render(render_terminals=render_terminals, *args, **kwargs)
else:
return {}
def render_pretty(self, do_print=True, *args, **kwargs):
ret_val = pformat(self.render(*args, **kwargs))
if do_print:
print ret_val
else:
return ret_val
def save(self, *args, **kwargs):
if not self._insave:
self._insave = True
#print "SAVE!!! "+str(self._edge)+" : "+self.name+" : "+str(self.node.__class__)
if self.activated and self._edge:
if self._to_node:
self._to_node.save(*args, **kwargs)
self._edge.save(*args, **kwargs)
self._insave = False
def set(self, value):
assert self.direction == OUTBOUND, \
'Terminal [%s] on [%s] is an inbound node, you can\'t add connections to an inbound node' % (self.name, self.node.__class__)
if value and value._id == self._get_to_node_id():
return
if value is None and self._get_to_node_id() is None:
return
self._get_to_node_from_cache()
if self._to_node:
self._to_node.remove_inbound_edge(self.name, self._edge)
if self._edge:
self._edge.remove()
self._edge = None
self._to_node = None
if value is not None:
if value.COLLECTION_NAME != self.to_node_class.COLLECTION_NAME:
raise InvalidTerminalException('Terminal [%s] on [%s] takes [%s] not [%s]' % (
self.name, self.node.__class__, self.to_node_class, value.__class__))
#print "SET!!! "+str(self._edge)+" : "+self.name+" : "+str(self.node.__class__)
self._edge = Edge.from_nodes(self.node, value, self.name, self.terminaltype)
self._to_node = value
self._to_node.add_inbound_edge(self.name, self._edge)
self.activated = True
class ListOfNodesTerminal():
def __init__(self, to_node_class, direction, origin_node, name, inbound_name = None, render=False, **kwargs):
self.activated = False
self.name = inbound_name if inbound_name is not None else name
self.original_name = name
self.node = origin_node
self.to_node_class = to_node_class
self.terminaltype = TerminalType.LIST_OF_NODES
self.direction = direction
self._render = render
self._insave = False
self._temp_yup_reference = [] #wanted to make appending o(1), so need to save a reference to the node so the whiskey weak reference cache doesn't drop it
if self.direction == INBOUND and inbound_name == None:
raise InvalidTerminalException('inbound_name cannot be none when direction is INBOUND')
self._list = None
self._edges = None
self._initialized = False
global IDID
self._idid = IDID
IDID += 1
if self.direction == BIDIRECTIONAL and type(origin_node) != to_node_class:
raise InvalidTerminalException('Bidirectional lists can only be created between nodes of the same type')
def __repr__(self):
return '%s list to %s.%s named %s' % (self.direction, self.to_node_class.__module__, self.to_node_class.__name__, self.name)
def __len__(self):
return len(self.get_edges())
def __getitem__(self, i):
#if self.activated:
self.get()
return self._list[i]
def __delitem__(self, i):
raise NotImplementedError()
#def __contains__(self, node):
# return node._id in self.get_edges()
def _add_node(self, to_node):
assert self.direction != INBOUND, \
'(wrong direction) Terminal [INBOUND:%s] on [%s] is an inbound node, you can\'t add connections to an inbound node' % (self.name, self.node.__class__)
assert to_node.COLLECTION_NAME == self.to_node_class.COLLECTION_NAME, \
'Terminal [%s] on [%s] takes [%s] not [%s]' % (self.name, self.node.__class__, self.to_node_class, to_node.__class__)
if not to_node._id in self.get_edges():
self._edges[to_node._id] = Edge.from_nodes(self.node, to_node, self.name, self.terminaltype)
to_node.add_inbound_edge(self.name, self._edges[to_node._id])
if self._list is not None:
self._list.append(to_node)
self.sort()
else:
self._temp_yup_reference.append(to_node)
def _remove_node(self, to_node):
assert self.direction != INBOUND, \
'Terminal [%s] on [%s] is an inbound node, you can\'t remove connections from an inbound node' % (self.name, self.node.__class__)
if to_node._id in self.get_edges():
self.get()
edge = self._edges[to_node._id]
if edge.inboundId == to_node._id:
to_node.remove_inbound_edge(self.name, edge)
else:
to_node.remove_outbound_edge(self.name, edge)
edge.remove()
del self._edges[to_node._id]
self._list.remove(to_node)
self.sort()
def add_inbound_edge(self, edge):
assert self.direction != OUTBOUND
#we have to add inbound nodes here so that we know a save will
#traverse all nodes and make the proper saves
#self.get()
if edge.outboundId not in self.get_edges():
self._edges[edge.outboundId] = edge
if self._list is not None:
self._list.append(self.to_node_class.from_id(edge.outboundId))
self.sort()
def add_outbound_edge(self, edge):
pass #don't think we need to do anything here
def append(self, node):
self._add_node(node)
def count(self):
''' counts all items in db and in local cache '''
return Edge.find(self.edge_query()).count()
def delete(self):
self.set([])
self._temp_yup_reference = []
def edge_display_name(self):
return '%s:%s' % (self.name, self.to_node_class.COLLECTION_NAME)
def edge_query(self, direction=None): #todo include to_node=None
if direction == None: direction = self.direction
if direction == INBOUND:
rv = {'inboundId':self.node._id, 'outboundCollection':self.to_node_class.COLLECTION_NAME, 'name':self.name}
elif direction == OUTBOUND:
rv = {'outboundId':self.node._id, 'name':self.name}
elif direction == BIDIRECTIONAL:
rv = {
'$or':[
{'inboundId':self.node._id, 'outboundCollection':self.to_node_class.COLLECTION_NAME, 'name':self.name},
{'outboundId':self.node._id, 'name':self.name}
]
}
else:
raise NotImplementedError('direction %s is not supported' % direction)
return rv
def exists(self):
return len(self.get_edges()) > 0
def extend(self, nodes):
for node in nodes:
self._add_node(node)
def get_self(self):
return self
def get(self):
if self._list is None:
self.get_edges()
self._list = self.to_node_class.from_ids(self._edges.keys())
self.sort()
def get_edge(self, node):
#todo run edge_query with to_node
return self.get_edges()[node._id]
def get_edges(self):
if self.activated == False:
assert self._edges is None, '_edges should be None'
self._edges = {}
self.activated = True
if self.direction == INBOUND or self.direction == BIDIRECTIONAL:
for edge in Edge.find(self.edge_query(INBOUND), limit=200): #hack here, if there is an edge filter, skip the cache
self._edges[edge.outboundId] = edge
if self.direction == OUTBOUND or self.direction == BIDIRECTIONAL:
for edge in Edge.find(self.edge_query(OUTBOUND), limit=200): #hack here, if there is an edge filter, skip the cache
self._edges[edge.inboundId] = edge
#if self.check_errors
assert edge.inboundCollection == self.to_node_class.COLLECTION_NAME, \
'On node named [%s] on class [%s] data: %s' % (self.name, self.node.__class__, str(edge.to_dict()))
return self._edges
def insert(self, i, node):
raise NotImplementedError()
def pop(self, index=-1):
self.get()
node = self._list[index]
self._remove_node(node)
return node
def remove(self, node):
self._remove_node(node)
def remove_inbound_edge(self, edge):
assert self.direction != OUTBOUND
if self.activated:
if edge.outboundId in self._edges:
if self._list is not None:
self._list.remove(self.to_node_class.from_id(edge.outboundId))
del self._edges[edge.outboundId]
self.sort()
def remove_outbound_edge(self, edge):
''' called when a node we're connected to is removed '''
if self.activated:
if edge.inboundId in self._edges:
del self._edges[edge.inboundId]
if self._list != None:
self._list.remove(self.to_node_class.from_id(edge.inboundId))
def render(self, render_terminals=False, *args, **kwargs):
self.get()
return[x.render(render_terminals=render_terminals, *args, **kwargs) for x in self._list]
def render_pretty(self, do_print=True, *args, **kwargs):
ret_val = pformat(self.render(*args, **kwargs))
if do_print:
print ret_val
else:
return ret_val
def save(self, *args, **kwargs):
if not self._insave:
self._insave = True
if self.activated and len(self._edges) > 0:
if self._list:
for node in self._list:
node.save(*args, **kwargs) #saves shouldn't call the db if nothing has changed
for edge in self._edges.values():
edge.save(*args, **kwargs) #saves shouldn't call the db if nothing has changed
for node in self._temp_yup_reference:
node.save()
self._temp_yup_reference = []
self._insave = False
def set(self, nodes):
if type(nodes) != list:
raise InvalidTerminalException('Terminal [%s] on [%s] should not be set to anything other than a list' % (self.name, self.to_node_class))
self.get()
old_nodes = self._list[:]
for node in old_nodes:
self._remove_node(node)
assert len(self) == 0, 'Why didn\'t we clear our list?'
for node in reversed(nodes):
self._add_node(node)
def sort(self, key=None):
if self._list != None:
if key is None:
edges_for_sort = [(k,v) for k,v in self._edges.items()]
edges_for_sort.sort(key=lambda x: x[1]._id, reverse=True)
_ids = [x[0] for x in edges_for_sort]
self._list.sort(key=lambda x: _ids.index(x._id))
else:
self._list.sort(key=key)
class AttributedListOfNodesTerminal(ListOfNodesTerminal):
def __init__(self, *args, **kwargs):
ListOfNodesTerminal.__init__(self, *args, **kwargs)
self.attributes = kwargs['attributes']
self.sort_func = kwargs.get('sort_func', None)
def __repr__(self):
return '%s list to %s.%s named %s with %s attributes' % (self.direction, self.to_node_class.__module__, self.to_node_class.__name__, self.name, str(self.attributes))
def add(self, node, **kwargs):
return self.append(node, **kwargs)
def append(self, node, **kwargs):
ListOfNodesTerminal.append(self, node)
self.update(node, **kwargs)
def render(self, render_terminals=False, custom_sort_func=None, *args, **kwargs):
self.get()
self.sort()
ret_val = [self.render_one(x, render_terminals=render_terminals, *args, **kwargs) for x in self._list]
if custom_sort_func:
return custom_sort_func(ret_val)
elif self.sort_func:
return self.sort_func(ret_val)
else:
return ret_val
def render_one(self, node, render_terminals=False, *args, **kwargs):
return dict(self.get_edge(node).data, **node.render(render_terminals, *args, **kwargs))
def update(self, node, **kwargs):
changes = {}
edge = self.get_edge(node)
for k,v in kwargs.items():
if k in self.attributes:
if v != edge.data.get(k):
changes[k] = v
edge.data[k] = v
else:
raise InvalidEdgeDataException('Edge attribute [%s] has not been explicitly defined for terminal [%s] in class [%s]' % (k, self.name, self.node.__class__))
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,610
|
texuf/whiskeynode
|
refs/heads/master
|
/examples/friendsoffriends.py
|
'''
to run in python terminal:
python -c "execfile('examples/friendsoffriends.py')"
'''
from examples.helpers import Nameable
from random import random
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.edges import Edge
from whiskeynode.terminals import outbound_node, bidirectional_list, inbound_list, bidirectional_list
'''
this is an example of finding friends of friends. The query is pretty borked because our
bidirectional friends terminal isn't directed, so we have to search for inbound and outbound relationsships
'''
class User(WhiskeyNode, Nameable):
COLLECTION_NAME = 'example_friendsoffriends_users'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'name':unicode,
}
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'friends': bidirectional_list(User),
}
if __name__ == '__main__':
print '\n===Friends of Friends Example===\n'
users = [
User.from_name('George Carlin'),
User.from_name('Tom Waits'),
User.from_name('Bubba'),
User.from_name('George Harison'),
User.from_name('Montell Williams'),
User.from_name('George Clooney'),
User.from_name('Kevin Bacon'),
]
previous_user = None
for user in users:
if previous_user:
previous_user.friends.append(user)
previous_user = user
for user in users:
print '%s is friends with: ' % user.name, [x.name for x in user.friends]
map(lambda x:x.save(), users)
user_a = users[0]
user_b = users[-1]
friend_ids = [user_a._id]
count = 0
#look at all george's friends, then look at all of their friends, then look at all of their friends, until kevin's id is returned
while(True):
#get friends
friends_of_friend_ids = Edge.COLLECTION.find({
'$or':[
{
'$and':[
{
'name':'friends',
'outboundCollection':User.COLLECTION_NAME,
'outboundId':{'$in':friend_ids},
},
{
'name':'friends',
'outboundCollection':User.COLLECTION_NAME,
'inboundId':{'$nin':friend_ids},
}
]
},
{
'$and':[
{
'name':'friends',
'outboundCollection':User.COLLECTION_NAME,
'inboundId':{'$in':friend_ids},
},
{
'name':'friends',
'outboundCollection':User.COLLECTION_NAME,
'outboundId':{'$nin':friend_ids},
}
]
}
]
}).distinct('inboundId')
if len(friends_of_friend_ids) == 0:
print '%s and %s are not connected' % (user_a.name, user_b.name)
break
if user_b._id in friends_of_friend_ids:
print 'Found %s and %s are seperated by %d relationships' % (user_a.name, user_b.name, count + 1)
break
else:
count = count + 1
friend_ids = friend_ids + friends_of_friend_ids
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,611
|
texuf/whiskeynode
|
refs/heads/master
|
/tests/__main__.py
|
if __name__ == "__main__":
import nose
if not nose.run():
import sys
global mongo
import mongomock as mongo
sys.exit(123) #if the tests fail, return non zero value to break build script
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,612
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/voter.py
|
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.exceptions import InvalidEdgeParameterException
from whiskeynode.fieldtypes import _none
class Voter(WhiskeyNode):
'''
DOCUMENTBASE PROPERTIES
'''
COLLECTION_NAME = 'edges_voters'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
}
ENSURE_INDEXES = [
]
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
'''
from whiskeynode.terminals import outbound_list
from whiskeynode.traversals import lazy_traversal
from whiskeynode.users.user import User
cls.TRAVERSALS = {
'votes':lazy_traversal('voters.count'),
'why':lazy_traversal('voters.edges.why')
}
cls.TERMINALS = {
'voters' : outbound_list(User, attributes=['why']),
}
'''
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,613
|
texuf/whiskeynode
|
refs/heads/master
|
/tests/test_whiskeynode.py
|
from bson.objectid import ObjectId
from bson.dbref import DBRef
from datetime import datetime, timedelta
from functools import partial
from unittest import TestCase
from whiskeynode import WhiskeyNode
from whiskeynode import whiskeycache
from whiskeynode.db import db
from whiskeynode.exceptions import InvalidFieldNameException, FieldNameNotDefinedException
import mock
#properties that aren't listed in fields shouldn'd save
class D1(WhiskeyNode):
COLLECTION_NAME = 'D1'
COLLECTION = db[COLLECTION_NAME]
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@property
def myJaws(self):
return 'how_small'
#properties that are listed in fields should save
how_big = 'Very big.'
class D2(WhiskeyNode):
COLLECTION_NAME = 'D2'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'myJaws':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@property
def myJaws(self):
return how_big
class D3(WhiskeyNode):
COLLECTION_NAME = 'D3'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'myJaws':unicode,
'some_dict':dict,
'some_list':list,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
class D5(WhiskeyNode):
COLLECTION_NAME = 'd5'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'myJaws':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
class DInvalid(WhiskeyNode):
COLLECTION_NAME = 'DInvalid'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'connections':unicode,
'recepeptors':unicode,
'_dict':dict
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
class DocumentBaseTest(TestCase):
def tearDown(self):
WhiskeyNode.COLLECTION.drop()
def test_init_should_return_a_document(self):
class A(WhiskeyNode):pass
d = A()
self.assertIsInstance(d, WhiskeyNode)
def test_render(self):
d = D3()
dic = d.render()
self.assertTrue('myJaws' in dic.keys())
self.assertTrue('_id' not in dic.keys())
def test_save_find_remove(self):
d = D3()
d.save()
c = D3.COLLECTION.find({'_id':d._id})
self.assertTrue(c.count() == 1)
d.remove()
c = D3.COLLECTION.find({'_id':d._id})
self.assertTrue(c.count() == 0)
def test_properties_save(self):
how_small = 'Very small.'
d1 = D1()
d1.save()
d1_returned = d1.COLLECTION.find_one({'_id':d1._id})
self.assertTrue(d1_returned is not None)
self.assertTrue(d1_returned.get('myJaws') is None)
d2 = D2()
d2.save()
d2_returned = d2.COLLECTION.find_one({'_id':d2._id})
#print "d2_returned: " + str(d2_returned)
self.assertTrue(d2_returned is not None)
self.assertTrue(d2_returned.get('myJaws') is not None)
self.assertTrue(d2_returned['myJaws'] == how_big)
def test_update(self):
how_big = 'So big.'
class A(WhiskeyNode):pass
d = A()
try:
d.update({'myJaws':'Are so small.'}) #updates should ignore properties that aren't in Fields
except FieldNameNotDefinedException as e:
pass
else:
raise FieldNameNotDefinedException('Updating with invalid field names should raise an exception.')
d1 = D3()
d1.update({'myJaws':how_big})
self.assertTrue(d1.myJaws == how_big)
d1.save()
d1_returned = D3.COLLECTION.find_one({'_id':d1._id})
self.assertTrue(d1_returned['myJaws'] == how_big)
d2 = D3.from_dict({'myJaws':how_big, 'someOtherProp':True})
d2.save()
d2_returned = D3.COLLECTION.find_one({'_id':d2._id})
self.assertTrue(d2_returned.get('myJaws') == how_big)
self.assertTrue(d2_returned.get('someOtherProp') == True)
def test_from_dict(self):
how_big = 'So big.'
d = D3.from_dict({'myJaws':how_big})
d.save()
d2 = D3.COLLECTION.find_one({'_id':d._id})
self.assertTrue(d2['myJaws'] == how_big)
def test_ne(self):
how_big = 'So big.'
d = D3.from_dict({'myJaws':how_big})
d2 = D3.find({'myJaws':{'$ne':'small'}})
self.assertTrue(d in list(d2))
def test_invalid_field_raises_error(self):
try:
d1 = DInvalid()
except InvalidFieldNameException:
pass
else:
raise InvalidFieldNameException("invalid field names should raise an error")
def test_save(self):
d1 = D3({'some_prop':'prop', 'some_dict':{'hey':'heyhey', 'um':{'yeah':'thats right'}}, 'some_list':['a', 'b', 'c']})
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
self.assertFalse(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should not save'
self.assertTrue(save_mock.call_count == 0)
d1.myJaws = 'Big.'
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
#print 'should not save'
d = d1._to_dict()
d1 = D3.from_dict(d)
self.assertFalse(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should not save'
self.assertTrue(save_mock.call_count == 0)
#print 'should save'
d1.lastModified = datetime.now()+timedelta(seconds=1)
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
d1.some_dict['hey'] = 'heyheyhey'
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
d1.some_dict['um']['yeah'] = 'what you say?'
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
d1.some_list.append('f')
self.assertTrue(d1._diff_dict(d1._to_dict()) or d1._dirty)
with mock.patch('mongomock.Collection.save') as save_mock:
d1.save()
#print 'should save'
self.assertTrue(save_mock.call_count == 1)
def test_find(self):
D3.COLLECTION.drop()
d1 = D3()
d1.save()
d2 = D3()
d2.save()
d3 = D3()
d3.save()
d4 = D3()
d4.save()
result = list(D3.find())
#print "result: "+ str(len(result))
self.assertIsInstance(result, list)
self.assertTrue(len(result) == 4)
self.assertIsInstance(result[0], D3)
result2 = list(D3.find({'_id':{'$in':[d1._id, d2._id, d3._id]}}))
self.assertTrue(len(result2) == 3)
def test_whiskeycursor_next(self):
D3.COLLECTION.drop()
dees = [D3(), D3(), D3()]
for d in dees:
d.save()
whiskeycache.clear_cache()
whiskey_cursor = D3.find()
nexted = whiskey_cursor.next()
self.assertTrue(nexted._id == dees[2]._id)
self.assertTrue(len(whiskey_cursor)==len(dees)-1)
whiskey_cursor = D3.find()
for i,d in enumerate(whiskey_cursor):
index = len(dees) - i - 1
self.assertTrue(d._id == dees[index]._id)
def test_from_db_ref(self):
#create a doc, tell it how big my balls are
how_big = 'So big.'
d = D5()
d.myJaws = how_big
d.save()
#create a db ref, these save natively in the db, but they are about as usefull as a whole in a piece of toast
dbref = DBRef(d.COLLECTION_NAME, d._id)
whiskeycache.clear_cache()
class A(WhiskeyNode):pass
from_ref = A.from_dbref(dbref.collection, dbref.id)
self.assertTrue(from_ref.get_field('myJaws') == how_big)
#test that i can save a property on this generic document,
even_bigger = 'even bigger...'
from_ref.add_field('myJaws', unicode)
self.assertTrue(from_ref.myJaws == how_big)
from_ref.myJaws = even_bigger
from_ref.save()
whiskeycache.clear_cache()
#make sure we saved
from_ref3 = A.from_dbref(dbref.collection, dbref.id)
self.assertTrue(from_ref3.get_field('myJaws') == even_bigger)
whiskeycache.clear_cache()
#retreving the doc with the proper class should make things happy
from_ref2 = D5.from_dbref(dbref.collection, dbref.id)
self.assertTrue(from_ref2.get_field('myJaws') == even_bigger)
self.assertFalse(from_ref2.myJaws == how_big)
self.assertTrue(from_ref2.myJaws == even_bigger)
def test_or_query(self):
D3.COLLECTION.drop()
whiskeycache.clear_cache()
theese_dees = [D3({'myJaws':'big'}),D3({'myJaws':'small'}),D3({'myJaws':'just right'})]
for d in theese_dees:
d.save()
queries = [
{'myJaws':'big'
},
{
'myJaws':'big',
'someOtherVal':None,
'$or':[
{'myJaws':'small'},
{'myJaws':'just right'},
]
},
{
'$or':[
{'myJaws':'small'},
{'myJaws':'just right'},
]
},
{
'some_dict':[],
'$or':[
{'myJaws':'big'},
{'myJaws':'just right'},
]
},
{
'$or':[
{'some_list':[]},
{'some_dict':{}}
]
}
]
i = 1
for query in queries:
print '='*72
print str(i) + 'query ' + str(query)
self.assertEqual( len(whiskeycache.find(D3, query, [('_id', -1)] )), D3.COLLECTION.find(query).count())
i += 1
def test_skip(self):
D3.COLLECTION.drop()
whiskeycache.clear_cache()
theese_dees = [D3({'myJaws':'1'}),D3({'myJaws':'2'}),D3({'myJaws':'3'})]
self.assertEqual(D3.find({}, skip=2).count(), 1)
self.assertEqual(D3.find({}, sort=[('myJaws',1)], skip=2).next().myJaws, '3')
self.assertEqual(D3.find({}, skip=4).count(), 0)
def test_dequeue(self):
D3.drop()
dees = []
D_COUNT = 100
for i in range(D_COUNT):
d = D3({'myJaws':'so sweaty'})
d.save()
dees.append(d)
cursor = D3.find({}, sort=[('myJaws', 1)])
count = 0
for x in cursor:
count += 1
self.assertEqual(count, D_COUNT)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,614
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/fieldtypes.py
|
'''
because sometimes you want a boolean that defaults to true
'''
#suppored types from pymongo:
'''
=================================== ============= ===================
Python Type BSON Type Supported Direction
=================================== ============= ===================
None null both
bool boolean both
int [#int]_ int32 / int64 py -> bson
long int64 both
float number (real) both
string string py -> bson
unicode string both
list array both
dict / `SON` object both
datetime.datetime [#dt]_ [#dt2]_ date both
compiled re regex both
`bson.binary.Binary` binary both
`bson.objectid.ObjectId` oid both
`bson.dbref.DBRef` dbref both
None undefined bson -> py
unicode code bson -> py
`bson.code.Code` code py -> bson
unicode symbol bson -> py
bytes (Python 3) [#bytes]_ binary both
=================================== ============= ===================
'''
#more types
def _true_bool():
return True
def _none():
return None
class FieldDict(dict):
def __getattr__(self, attr):
return self.get(attr, None)
__setattr__= dict.__setitem__
__delattr__= dict.__delitem__
def __repr__(self):
ret_val = ['{\n']
keys = self.keys()
keys.sort()
for key in keys:
ret_val.append(' %s: %r\n' % (key, self[key]))
ret_val.append('}')
return ''.join(ret_val)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,615
|
texuf/whiskeynode
|
refs/heads/master
|
/profile.py
|
import sys
import timeit
print 'fuck yes'
def run_profile():
print 'oh yeah'
setup='''
from whiskeynode import WhiskeyNode
from whiskeynode import whiskeycache
from whiskeynode.db import db
default_sort = [('_id', -1)]
class Node(WhiskeyNode):
COLLECTION_NAME = 'test_node'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'myVar':int,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
nodes = [Node({'myVar':i}) for i in range(10000)]
'''
query='''
whiskeycache.find(Node, {"myVar":{"$gt":500}}, default_sort)
'''
N = 1
R = 3
print timeit.repeat(query, setup=setup, repeat=R, number=N)
if __name__ == "__main__":
run_profile()
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,616
|
texuf/whiskeynode
|
refs/heads/master
|
/examples/activities.py
|
'''
to run in python terminal:
python -c "execfile('examples/activities.py')"
'''
from bson.code import Code
from examples.helpers import Nameable, make_list
from random import random
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.edges import Edge
from whiskeynode.terminals import outbound_node, outbound_list, inbound_list, bidirectional_list
#
# User
# - User object, contains a list of activities
#
class User(WhiskeyNode, Nameable):
COLLECTION_NAME = 'example_activities_users'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'name':unicode,
}
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'activities': outbound_list(Activity),
}
#
# Activity
# - Activity Object, contans a list of users that have this activity
#
class Activity(WhiskeyNode, Nameable):
COLLECTION_NAME = 'example_activities_activities'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'name':unicode,
}
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'users': inbound_list(User, 'activities'),
'relatedAbilities': outbound_list(Activity),
}
if __name__ == '__main__':
print '\nACTIVITIES\n'
print 'PART 1: A User Named John and an Activity Called Dancing'
#init a user and an activity
john = User.from_name('John')
dancing = Activity.from_name('dancing')
print 'John starts dancing.'
john.activities.append(dancing)
if john in dancing.users:
print 'John is dancing.'
else:
print 'John is not dancing'
print '\nPART 2: Users Participate in Activities'
users = [
john,
User.from_name('George Carlin'),
User.from_name('Tom Waits'),
User.from_name('Bubba'),
]
print 'Our users are', make_list(users)
activities = [
dancing,
Activity.from_name('flying'),
Activity.from_name('comedy'),
Activity.from_name('enormous jaws'),
Activity.from_name('karate'),
Activity.from_name('hula hooping'),
Activity.from_name('knitting'),
Activity.from_name('x-ray vision'),
]
print 'Our activities are', make_list(activities)
#give each person a few activities at random
print 'Users are (randomly) starting to do activities...'
for user in users:
index = len(activities)-1
while(True):
index = int(round(float(index) - random() * len(activities) /2.0 ))
if index >= 0:
user.activities.append(activities[index])
else:
break
print user.name, 'started', make_list(user.activities)
#do some exploration
print 'Look at who is doing activities together.'
for user in users:
for activity in user.activities:
print user.name, 'does', activity.name, 'with', make_list([x for x in activity.users if x != user])
print '\nPART 3: Use edge queries to find users'
users = map(lambda x: x.save(), users)
activities = map(lambda x: x.save(), activities)
for activity in activities:
user_ids = Edge.COLLECTION.find(
{
'name':'activities',
'outboundCollection':User.COLLECTION_NAME,
'inboundCollection':Activity.COLLECTION_NAME,
'inboundId':activity._id
}
).distinct('outboundId')
print 'Who is %s?' % activity.name, make_list(User.from_ids(user_ids))
print '\nPART 4: Establish (Random) Activity Relationships, Find Related Activities Partners'
#give each activity some related activities
print 'Establishing activity relationships...'
for activity in activities:
for a2 in activities:
if activity != a2 and random() > .75:
activity.relatedAbilities.append(a2)
activity.save()
print activity.name.capitalize(), 'is now related to', make_list(activity.relatedAbilities)
print 'Done...'
print '\nPart 5: Using Silly Slow Way to Find Related Users...'
#search for related activities in the traditional way (lots of database queries here, lots of loops)
for user in users:
print 'Looking for users with activities related to %s\'s activities' % user.name, make_list(user.activities)
for activity in user.activities:
print activity.name.capitalize() ,'is related to', make_list(activity.relatedAbilities)
for related_ability in activity.relatedAbilities:
if related_ability not in user.activities:
print user.name, 'should do', related_ability.name, 'with', make_list(filter(lambda x: x != user, related_ability.users))
else:
print user.name, 'is already doing', related_ability.name, 'with', make_list(filter(lambda x: x != user, related_ability.users))
#instead use the graph, lets see if we can reduce the number of queries and loops
print '\nPart 6: Using Edge queries to find related users...'
for user in users:
#get this user's activity ids
ability_ids = Edge.COLLECTION.find(
{
'name':'activities',
'outboundId':user._id
}
).distinct('inboundId')
#get activities related to this users activities
related_ability_ids = Edge.COLLECTION.find(
{
'name':'relatedAbilities',
'outboundId':{'$in':ability_ids},
'inboundId':{'$nin':ability_ids}
}
).distinct('inboundId')
#get users who have those activities
edge_cursor = Edge.COLLECTION.find(
{
'name':'activities',
'outboundCollection':user.COLLECTION_NAME,
'outboundId':{'$ne':user._id},
'inboundId':{'$in':related_ability_ids},
}
)
#print the result
print 'Who has activities related to %s\'s activities?' % user.name, \
make_list(['%s does %s' % (User.from_id(x['outboundId']).name, Activity.from_id(x['inboundId']).name) for x in edge_cursor])
print '\nPart 7: Using MongoDB Group aggregation to find users with common activites.'
comp_user = User.find_one()
print "Finding users with activites in common with %s. \n%s's activities are: %s" %(comp_user.name, comp_user.name, str(make_list(comp_user.activities)))
#Hark! Javascript?! Tell the database to tally results; we initialize the count to zero when we make our group call.
reducer=Code("function(obj, result) {result.count+=1 }")
query = {
'inboundId':{'$in':[act._id for act in list(comp_user.activities)]},
'name':'activities',
'outboundCollection':User.COLLECTION_NAME,
'outboundId': {'$ne':comp_user._id},
}
common_activities_users = Edge.COLLECTION.group(key=['outboundId'],
condition=query,
initial={"count": 0},
reduce=reducer)
print common_activities_users
for cau in common_activities_users:
print '%s has %s activities in common with %s'%(comp_user.name, cau['count'], User.from_id(cau['outboundId']).name)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,617
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/events.py
|
from bson.objectid import ObjectId
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.exceptions import InvalidEdgeParameterException
from whiskeynode.fieldtypes import _none
class WhiskeyEvent(WhiskeyNode):
'''
DOCUMENTBASE PROPERTIES
'''
COLLECTION_NAME = 'whiskeynode_events'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'nodeId': _none, #ObjectId
'collection':unicode,
'currentUserId':_none,
'data':dict,
'type':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
}
@classmethod
def create(cls, node, event_type, data, current_user_id):
return cls.COLLECTION.save({
'_id':ObjectId(),
'nodeId':node._id,
'collection':node.COLLECTION_NAME,
'currentUserId':current_user_id,
'type':event_type,
'data':data,
})
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,618
|
texuf/whiskeynode
|
refs/heads/master
|
/examples/helpers.py
|
class Nameable():
'''little mixin to pull records by name'''
@classmethod
def from_name(cls, name):
c = cls.find_one({'name':name})
return c if c else cls({'name':name})
def make_list(items):
''' takes list of Nameable or string, returns punctiated string - any library version shouldn't include a period '''
if len(items) > 1:
if isinstance(items[0], Nameable):
return '%s and %s.' % (
', '.join([x.name for x in items[0:len(items)-1]]),
items[-1].name
)
else:
return '%s and %s.' % (
', '.join([x for x in items[0:len(items)-1]]),
items[-1]
)
elif len(items) > 0:
if isinstance(items[0], Nameable):
return '%s.' % items[0].name
else:
return '%s.' % items[0]
else:
return 'none.'
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,619
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/terminaltypes.py
|
class TerminalType():
NODE = 'node'
LIST_OF_NODES = 'list_of_nodes'
class TerminalDict(dict):
def __getattr__(self, attr):
return self.get(attr, None)
__setattr__= dict.__setitem__
__delattr__= dict.__delitem__
def __repr__(self):
ret_val = ['{\n']
keys = self.keys()
keys.sort()
for key in keys:
ret_val.append(' %s: %r\n' % (key, self[key]))
ret_val.append('}')
return ''.join(ret_val)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,620
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/db.py
|
import os
## Environment
environment = os.environ.get('ENVIRONMENT', 'test')
## Database
global mongo
if environment == 'test':
global mongo
import mongomock as mongo
else:
global mongo
import pymongo as mongo
db_uri = os.environ.get('MONGOLAB_URI', 'mongodb://localhost')
db_name = os.environ.get('MONGOLAB_DB', 'whiskeynode')
db = mongo.MongoClient(db_uri)[db_name]
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,621
|
texuf/whiskeynode
|
refs/heads/master
|
/tests/test_edge.py
|
from unittest import TestCase
from whiskeynode.edges import Edge
class EdgeBaseTest(TestCase):
def tearDown(self):
Edge.COLLECTION.drop()
def test_init_should_return_yeah(self):
d = Edge()
self.assertIsInstance(d, Edge)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,622
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/__init__.py
|
from bson.objectid import ObjectId, InvalidId
from collections import deque
from datetime import datetime
from functools import partial
from pprint import pformat
from pprintpp import pprint
from whiskeynode import whiskeycache
from whiskeynode.db import db
from whiskeynode.exceptions import (BadEdgeRemovalException,
CollectionNotDefinedException,
ConnectionNotFoundException,
FieldNameNotDefinedException,
InvalidFieldNameException,
InvalidConnectionNameException,)
from whiskeynode.fieldtypes import FieldDict
from whiskeynode.terminaltypes import TerminalDict, TerminalType
from copy import copy, deepcopy
import itertools
import os
environment = os.environ.get('ENVIRONMENT')
save_id = 0
#helper function for current user id
def get_current_user_id():
return None
def get_new_save_id():
global save_id
i = save_id
save_id = save_id + 1
return i
''' WhiskeyNode '''
class WhiskeyNode(object):
''' REQUIRED OVERRIDES '''
#DATABASE, override these variables in your class
COLLECTION_NAME = '_whiskeynode'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
#'name':type
}
PRE_RENDER_FIELDS = ['createdAt', 'lastModified']
''' DEFAULT PROPERTIES '''
#DATABASE FIELDS, fields must be bson types
DEFAULT_FIELDS = {
'_id': ObjectId,
'createdAt' : datetime.now, #storing created at to send to the client - to search on createdAt, use the _id's date properties
'lastModified': datetime.now,
}
#ENSURE_INDEXES, indexed fields are indexed during the database migration,
# for performance reasons try not to index anything, if an index should be unique,
# it should also be added to ENSURE_UNIQUE_INDEXES
ENSURE_INDEXES = set(
[
#'name',
])
ENSURE_UNIQUE_INDEXES = set(
[
#'name'
])
#DATABASE FIELD MANAGEMENT, these properties auto manage data sent to and from the client
DO_NOT_UPDATE_FIELDS = set([])
DEFAULT_DO_NOT_UPDATE_FIELDS = set(
[
'_id',
'createdAt',
'lastModified',
])
DO_NOT_RENDER_FIELDS = set([])
DEFAULT_DO_NOT_RENDER_FIELDS = set(
[
'_id',
])
RESERVED_FIELDS = set(
[
'messages',
'terminals',
'fields',
])
TERMINALS = {}
TRAVERSALS = {}
fields = {} #PRIVATE
check_errors = True #really speeds up initialization
def __init__(self, init_with=None, dirty=True):
if self.check_errors:
assert self.__class__ != WhiskeyNode, 'WhiskeyNode is meant to be an abstract class'
self._dict = init_with if init_with else {} #why a variable here? store everything that we get out of mongo, so we don't have data loss
self._dirty = dirty
self._is_new_local = False
self._save_record = {}
self._terminals = None
self._traversals = None
self.DO_NOT_RENDER_FIELDS.update(self.DEFAULT_DO_NOT_RENDER_FIELDS)
self.DO_NOT_UPDATE_FIELDS.update(self.DEFAULT_DO_NOT_UPDATE_FIELDS)
#INIT CLASS FIELDS
if self.__class__.fields == {}:
self.__class__.fields = FieldDict(self.DEFAULT_FIELDS, **self.FIELDS)
#self.fields = self.__class__._FIELDS.copy()
#INIT CLASS TERMINALS
if self.__class__.TERMINALS == {}:
self.__class__.init_terminals()
for name in self.__class__.TERMINALS:
self.__class__._add_terminal_property(self, name)
if self.check_errors:
bad_fields = set(self.__class__.fields.keys()).intersection(list(self.__class__.RESERVED_FIELDS) + self.__class__.TERMINALS.keys() + self.__dict__.keys())
if len(bad_fields) > 0:
raise InvalidFieldNameException('Fields %s cannot be used on class %s because they are reserved or terminals.' % (str(bad_fields), str(self.__class__)))
for name in self.__class__.TRAVERSALS:
self.__class__._add_traversal_property(self, name)
#INIT INSTANCE FIELDS
for field, field_type in self.fields.items():
try: #this is in two places to prevent a function call in a loop
if field_type is dict or field_type is list:
self.__dict__[field] = deepcopy(self._dict[field]) #make a copy so we can compare it later (this is in two places)
else:
self.__dict__[field] = self._dict[field]
except KeyError:
self.__dict__[field] = field_type()
if field == '_id':
self._is_new_local = True
for field, trav in self.traversals.items():
try:
self.__dict__[field] = self._dict[field]
except KeyError:
self.__dict__[field] = trav.default_value
whiskeycache.save( self )
##
## classmethods
##
@classmethod
def distinct(self, field):
return self.COLLECTION.distinct(field)
@classmethod
def drop(cls):
''' usefull for testing, not sure if this should be used in production ever '''
for node in cls.find():
node.remove()
@classmethod
def find(cls, query={}, limit=0, skip_cache=False, sort=None, skip=0):
'''
Returns an iterator of whiskeynodes SORTED HIGHEST TO LOWEST _id (most recent first)
all params are passed to pymongo except skip_cache - this allows you to make complex queries to mongodb
'''
if sort is None:
sort = [('_id', -1)]
else:
assert isinstance(sort, list) and len(sort) >= 1, 'sort should be a list of tuples'
assert isinstance(sort[0], tuple), 'sort should be a list of tuples'
existing = deque( whiskeycache.find(cls, query, sort)) if not skip_cache else [] #grab the items we already have in RAM
if limit > 0:
cursor = cls.COLLECTION.find(query, limit=limit+skip).sort(sort) #otherwise, hit the db, todo, pass a $notin:_ids
else:
cursor = cls.COLLECTION.find(query).sort(sort) #todo - take out the if else after fixing mongo mock
class WhiskeyCursor():
def __init__(self, existing, cursor, limit=0, skip=0):
self.existing = existing
self.cursor = cursor
self.__count = None
self.__limit = limit
self.__retrieved = 0
self.__d = None
if skip > 0:
skipped = 0
for s in self:
skipped += 1
if skipped >= skip:
self.__retrieved = 0
break
def __iter__(self):
return self
def __next__(self):
''' python 3 '''
return self.next()
def next(self):
''' this will return the items in cache and the db'''
if self.__limit == 0 or self.__retrieved < self.__limit:
self.__retrieved = self.__retrieved + 1
if len(self.existing) > 0:
if self.__d is None:
try:
self.__d = self.cursor.next()
except StopIteration:
return self.existing.popleft()
d = self.__d
attr_existing = getattr(self.existing[0], sort[0][0])
attr_d = d.get(sort[0][0])
if sort[0][1] == -1:
if attr_existing > attr_d:
return self.existing.popleft()
else:
if attr_existing < attr_d:
return self.existing.popleft()
if self.existing[0]._id == d['_id']:
self.__d = None
return self.existing.popleft()
else:
self.__d = None
rv = whiskeycache.from_cache(cls, d, dirty=False)
try:
self.existing.remove(rv) #todo test to see if "rv in self.existing" is faster than try excepting
except ValueError:
pass
return rv
else:
if self.__d:
d = self.__d
self.__d = None
return whiskeycache.from_cache(cls, d, dirty=False)
else:
return whiskeycache.from_cache(cls, self.cursor.next(), dirty=False)
raise StopIteration()
def count(self):
''' NOTE - this count isn't exactly accurate
since we don't know how many items will already be in the cache, but it's pretty close '''
if self.__count is None:
#self.__count = len(self.existing) + self.cursor.count()
self.__count = self.cursor.count() #we're only looking at what's actually in the db for now...
for x in self.existing:
if x._is_new_local:
self.__count = self.__count + 1
return self.__count
def limit(self, limit):
self.__limit = self.cursor.limit = limit
def __len__(self):
return self.count()
return WhiskeyCursor(existing, cursor, limit, skip)
@classmethod
def find_one(cls, query={}):
'''Returns one node as a Node object or None.'''
from_cache = whiskeycache.find_one(cls, query)
if from_cache is not None:
return from_cache
else:
data = cls.COLLECTION.find_one(query, sort=[('_id',-1)])
if data is not None:
return whiskeycache.from_cache(cls, data, dirty=False)
else:
return None
@classmethod
def from_dbref(cls, collection, _id):
''' try to avoid using this function - it's not recomended in the mongodb docs '''
data = db[collection].find_one({'_id':_id})
if data:
c = cls.from_dict(data)
c.COLLECTION_NAME = collection
c.COLLECTION = db[collection]
return c
else:
return None
@classmethod
def from_dict(cls, data, dirty=False):
if data is None:
return None
return whiskeycache.from_cache(cls, data, dirty)
@classmethod
def from_id(cls, _id):
'''Returns a node based on the _id field.
if objectid is a string it will try to cast it to an objectid'''
if type(_id) is not ObjectId:
try:
_id = ObjectId(_id)
except InvalidId:
return None
rv = whiskeycache.from_id(_id, cls.COLLECTION_NAME)
return rv if rv else cls.find_one({'_id': _id})
@classmethod
def from_ids(cls, ids):
if len(ids) == 0:
return []
if not isinstance(ids[0], ObjectId):
ids = [ObjectId(x) for x in ids]
to_query = []
to_return = []
for _id in ids:
if _id in whiskeycache.RAM:
to_return.append(whiskeycache.RAM[_id])
else:
to_query.append(_id)
if len(to_query) > 0:
cursor = cls.COLLECTION.find({'_id':{'$in':to_query}})
to_return.extend([whiskeycache.from_cache(cls, data, dirty=False) for data in cursor])
return to_return
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
}
cls.TRAVERSALS = {
}
##
## properties
##
@property
def guid(self):
''' for migrating to the new code base, this doens't get saved to the db '''
return str(self._id)
@property
def terminals(self):
if self._terminals is None:
self._terminals = TerminalDict()
self._init_terminals()
return self._terminals
@property
def traversals(self):
if self._traversals is None:
self._traversals = TerminalDict()
self._init_traversals()
return self._traversals
##
## functions
##
def add_field(self, field, field_type, render=True, update=True, dirty=True):
if self.check_errors:
self._check_add_field_errors(field, field_type)
try: #this is in two places to prevent a function call in a loop
if field_type is dict or field_type is list:
self.__dict__[field] = deepcopy(self._dict[field]) #make a copy so we can compare it later
else:
self.__dict__[field] = self._dict[field]
except KeyError:
self.__dict__[field] = field_type()
self.fields[field] = field_type
if render == False:
self.DO_NOT_RENDER_FIELDS.add(field)
if update == False:
self.DO_NOT_UPDATE_FIELDS.add(field)
self._dirty = self._dirty or dirty
def add_inbound_edge(self, name, edge):
terminal = self._get_inbound_terminal(name, edge)
if terminal is not None:
terminal.add_inbound_edge(edge)
def add_outbound_edge(self, name, edge):
if name in self.terminals:
self.terminals[name].add_outbound_edge(edge)
def add_terminal(self, name, connection_def):
self._add_terminal(self, name, connection_def)
self._add_terminal_property(self, name)
def get_field(self, name, default=None):
''' for generically getting fields on a whiskey node '''
try:
return self.__dict__[name]
except KeyError:
return self._dict.get(name, default)
def get_inbound_edges(self):
from whiskeynode.edges import Edge
return Edge.find({'inboundId':self._id}) #don't worry, find's are cached at the WN level
def _get_inbound_terminal(self, name, edge):
inbound_terminals = [terminal for terminal in self.terminals.values() if \
terminal.name == name and \
(terminal.direction == 'INBOUND' or terminal.direction == 'BIDIRECTIONAL') and \
terminal.to_node_class.COLLECTION_NAME == edge.outboundCollection]
assert len(inbound_terminals) <= 1, 'why do we have more than one terminal?'
return inbound_terminals[0] if len(inbound_terminals) > 0 else None
def get_outbound_edges(self):
from whiskeynode.edges import Edge
return Edge.find({'outboundId':self._id}) #don't worry, find's are cached at the WN level
def has_terminal(self, name):
return name in self.terminals
def pre_render(self):
data = {}
for field in self.PRE_RENDER_FIELDS:
try:
data[field] = self.__dict__[field]
except KeyError:
try:
data[field] = self._dict[field]
except KeyError:
pass
data['guid'] = str(self._id)
return data
def remove(self):
''' removes this node and all inbound and outbound edges pointing to this node'''
ob = list(self.get_outbound_edges())
for edge in ob:
if edge.inboundId in whiskeycache.RAM:
whiskeycache.RAM[edge.inboundId].remove_inbound_edge(edge.name, edge)
edge.remove()
ib = list(self.get_inbound_edges())
for edge in ib:
if edge.outboundId in whiskeycache.RAM:
whiskeycache.RAM[edge.outboundId].remove_outbound_edge(edge.name, edge)
edge.remove()
whiskeycache.remove(self)
self.COLLECTION.remove(self._id)
def remove_field(self, field):
if field in self.__dict__:
del self.__dict__[field]
if field in self._dict:
del self._dict[field]
def remove_inbound_edge(self, name, edge):
terminal = self._get_inbound_terminal(name, edge)
if terminal is not None:
terminal.remove_inbound_edge(edge)
def remove_outbound_edge(self, name, edge):
if name in self.terminals:
terminal = self.terminals[name]
if self.check_errors:
assert (terminal.direction == 'OUTBOUND' or terminal.direction== 'BIDIRECTIONAL') and \
terminal.to_node_class.COLLECTION_NAME == edge.inboundCollection, 'bad edge removal'
terminal.remove_outbound_edge(edge)
def render(self, render_terminals=True):
data = self._to_dict()
for field in self.DO_NOT_RENDER_FIELDS:
try:
del data[field]
except KeyError:
pass
if render_terminals:
for key, terminal in self.terminals.items():
if terminal._render and terminal.exists():
data[key] = terminal.render()
data['guid'] = str(self._id)
return data
def render_pretty(self, do_print=True, *args, **kwargs):
rendr = self.render(*args, **kwargs)
r = pprint(rendr)
if do_print:
print r
else:
return r
def save(self, update_last_modified=True, current_user_id = None, save_id=None, save_terminals=True):
if save_id is None:
save_id = get_new_save_id()
if current_user_id is None:
current_user_id=get_current_user_id()
if save_id not in self._save_record:
self._save_record[save_id] = True #prevent infinite recursive loops
data = self._to_dict()
is_saving = self._dirty or self._diff_dict(data)
#from logger import logger
#logger.debug( '--------------- save ' + str(self) + " : " + str(data.get('name','')))
if is_saving:
#for k in data:
# if self._dict.get(k) is None or cmp(data[k], self._dict.get(k)) != 0:
# try:
# logger.debug( '!! ' + k + " : " + str(data[k]) + " : " + str(self._dict.get(k)))
# except UnicodeEncodeError:
# logger.debug( '!! ' + k + " : bad UnicodeEncodeError")
if self.check_errors:
assert self._id is not None and self._id != ''
if update_last_modified:
data['lastModified'] = self.lastModified = datetime.now()
if self.check_errors:
assert self.COLLECTION_NAME != '_whiskeynode', 'COLLECTION_NAME has not ben defined for class %s' % self.__class__
#save to db
#logger.debug('+++++++++++++++++ save ' + str(self) + " : " + str(data.get('name','')))
key = self.COLLECTION.save(data, safe=True)
self._dirty = False
self._is_new_local = False
#record changes in event if requested
self.on_save(new_dict=data, old_dict=self._dict)
#reset our current state
self._dict = data
#save our terminals
if save_terminals:
for name, terminal in self.terminals.items():
terminal.save(update_last_modified=update_last_modified, current_user_id=current_user_id, save_id=save_id)
return self
def on_save(self, new_dict, old_dict):
pass
def set_field(self, name, value):
''' for generically getting fields on a whiskey node '''
if name not in self.fields:
self.add_field(name, type(value))
self.__dict__[name] = value
def _to_dict(self):
data = self._dict.copy()
for field, field_type in self.fields.items():
value = getattr(self, field)
if value is not None:
if field_type is dict or field_type is list:
data[field] = deepcopy(value) #make a copy so we can compare it later
else:
data[field] = value
for field in self.TRAVERSALS:
value = getattr(self, field)
if value is not None:
data[field] = value
return data
def to_dbref(self):
return {
'_id':self._id,
'collection':self.COLLECTION_NAME,
}
def update(self, data):
'''Performs an update on the node from a dict. Does not save.'''
fields = set(self.fields.keys()) - self.DO_NOT_UPDATE_FIELDS
for field in fields:
if field in data:
self.__dict__[field] = data[field]
if self.check_errors and environment != 'production':
legit_fields = self.fields.keys() + self.terminals.keys() + self.traversals.keys() + ['guid']
bad_fields = set(data.keys()) - set(legit_fields)
if len(bad_fields) > 0:
raise FieldNameNotDefinedException('Fields names %s with values %s are not defined in class [%s]' % (str(list(bad_fields)), str([(x,data[x]) for x in bad_fields]), self.__class__))
##
## Class level helpers
##
def _check_add_field_errors(self, field, field_type):
if field in self.__dict__ or field in self.RESERVED_FIELDS or field in self.TERMINALS:
raise InvalidFieldNameException('Field name [%s] on %s is not valid because it is a reserved field or a terminal' % (field, self.__class__))
def _init_terminals(self):
for name, connection_def in self.TERMINALS.items():
self._add_terminal(self, name, connection_def)
@classmethod
def _add_terminal(cls, self, name, connection_def):
if cls.check_errors:
if name in self.terminals:
raise InvalidConnectionNameException('Terminal name [%s] on %s is not valid because it is already in use.' % (name, self.__class__))
self.terminals[name] = connection_def(self, name)
@classmethod
def _add_terminal_property(cls, self, name):
if cls.check_errors:
if name in self.RESERVED_FIELDS or name in self.fields or name in self.__dict__:
raise InvalidConnectionNameException('Terminal name [%s] on %s is not valid because it is a reserved field.' % (name, self.__class__))
if not hasattr(cls, name):
setattr(cls,
name,
property(
partial(cls.__get_terminal, name=name),
partial(cls.__set_terminal, name=name),
partial(cls.__del_terminal, name=name)))
def __get_terminal(self, name):
return self.terminals[name].get_self()
def __set_terminal(self, value, name):
return self.terminals[name].set(value)
def __del_terminal(self, name):
return self.terminals[name].delete()
def _init_traversals(self):
for name, traversal_def in self.TRAVERSALS.items():
if self.check_errors:
if name in self.traversals:
raise InvalidConnectionNameException('Traversal name [%s] on %s is not valid because it is already in use.' % (name, self.__class__))
self.traversals[name] = traversal_def(self, name)
self._add_traversal_property(self, name)
@classmethod
def _add_traversal_property(cls, self, name):
if cls.check_errors:
if name in self.RESERVED_FIELDS:
raise InvalidConnectionNameException('Traversal name [%s] on %s is not valid because it is a reserved field.' % (name, self.__class__))
if not hasattr(cls, name):
setattr(cls,
name,
property(
partial(cls.__get_traversal, name=name),
partial(cls.__set_traversal, name=name),
partial(cls.__del_traversal, name=name)))
def __get_traversal(self, name):
return self.traversals[name].get()
def __set_traversal(self, value, name):
return self.traversals[name].set(value)
def __del_traversal(self, name):
return self.traversals[name].delete()
def _diff_dict(self, target_dict):
''' return false if same same, true if we find diffs '''
return cmp(self._dict, target_dict) != 0
def __eq__(self, other):
return other != None and self._id == other._id
def __ne__(self, other):
return other == None or self._id != other._id
def to_string(self):
''' must return string that is key safe (no periods) '''
return '%s:%s' % (self.__class__.__name__, self.guid)
def str_to_objectid(guid):
#guid should be a string, try to cast the guid to an ObjectId - hopefully it works maybe
if guid is None:
return None
if type(guid) is ObjectId:
return guid
try:
return ObjectId(guid)
except:
return guid
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,623
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/traversals.py
|
from functools import partial
from whiskeynode.terminaltypes import TerminalType
def lazy_traversal(path, render=True, default_value=None, default_attr=None):
return partial(LazyTraversal, path, render=render, default_value=default_value, default_attr=default_attr)
class LazyTraversal():
def __init__(self, path, origin_node, name, render=True, default_value=None, default_attr=None):
self.render = render
self.name = name
self.node = origin_node
self.path_parts = path.split('.')
self.default_value = default_value
self.default_attr = default_attr
if len(self.path_parts) < 2:
assert 0, 'Lazy traversals should be declared as <terminal_name>.<field_value>'
if len(self.path_parts) > 2:
assert 0, 'Support for more than one traversal hasn\'t been developed, why don\'t you give it a shot?'
self.terminal_name = self.path_parts[0]
self.field_name = self.path_parts[1]
def get(self):
if self.node.terminals[self.terminal_name].activated:
if self.field_name == 'exists':
return self.node.terminals[self.terminal_name].exists()
#LISTS
if self.node.terminals[self.terminal_name].terminaltype == TerminalType.LIST_OF_NODES:
terminal = getattr(self.node, self.terminal_name, [])
if self.field_name == 'count':
return terminal.count()
elif len(terminal) > 0:
#just grab the property off the first item in the list
return getattr(terminal[0], self.field_name)
#NODES
else:
if self.default_attr is not None:
return getattr(getattr(self.node, self.terminal_name, {}), self.field_name, getattr(self.node, self.default_attr, self.default_value))
else:
return getattr(getattr(self.node, self.terminal_name, {}), self.field_name, self.default_value)
#defalut bahavior
if self.default_attr is not None:
return self.node.__dict__.get(self.name, getattr(self.node, self.default_attr, self.default_value))
else:
return self.node.__dict__.get(self.name, self.default_value)
def set(self, value):
assert 0, 'Traversals don\'t support set... yet'
def delete(self):
assert 0, 'Traversals don\'t suppot delete... yet'
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,624
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/edges.py
|
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.exceptions import InvalidEdgeParameterException
from whiskeynode.fieldtypes import _none
class Edge(WhiskeyNode):
'''
DOCUMENTBASE PROPERTIES
'''
COLLECTION_NAME = 'whiskeynode_edges'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'inboundId': _none, #query for edges with an inboundId that matches mine for all connections pointing to me
'inboundCollection':_none,
'name':unicode,
'outboundId': _none, #query for edges with an outboundId that matches mine for all my connections
'outboundCollection': _none,
'terminalType':unicode,
'data':dict, #don't use this if you can help it. created for AttributedNodeListManager
}
ENSURE_INDEXES = [
#todo - i want to sort these by _id - newest first, may need to update the indexes
[('inboundId',1), ('outboundCollection',1), ('name',1)],
[('outboundId',1), ('name',1)],
[('name', 1), ('outboundCollection', 1), ('createdAt', 1)], #for the metrics
]
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
'''
from whiskeynode.terminals import outbound_list
from whiskeynode.traversals import lazy_traversal
from whiskeynode.users.user import User
cls.TRAVERSALS = {
'votes':lazy_traversal('voters.count'),
'why':lazy_traversal('voters.edges.why')
}
cls.TERMINALS = {
'voters' : outbound_list(User, attributes=['why']),
}
'''
@classmethod
def create(cls, outbound_id, outbound_collection, inbound_id, inbound_collection, name, terminaltype):
return cls({
'inboundId':inbound_id,
'inboundCollection':inbound_collection,
'outboundId':outbound_id,
'outboundCollection':outbound_collection,
'name':name,
'terminalType':terminaltype,
})
@classmethod
def from_nodes(cls, outbound_node, inbound_node, name, terminaltype):
#if checkerrors
if not isinstance(outbound_node, WhiskeyNode):
raise InvalidEdgeParameterException()
if not isinstance(inbound_node, WhiskeyNode):
raise InvalidEdgeParameterException()
return cls.create(
outbound_node._id,
outbound_node.COLLECTION_NAME,
inbound_node._id,
inbound_node.COLLECTION_NAME,
name,
terminaltype,
)
def __str__(self):
return '<Edge %s %s::%s->%s>' % (self.guid, self.name, self.outboundCollection, self.inboundCollection)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,625
|
texuf/whiskeynode
|
refs/heads/master
|
/tests/test_whiskeynode_terminals.py
|
from bson.objectid import ObjectId
from unittest import TestCase
from whiskeynode import WhiskeyNode
from whiskeynode.db import db
from whiskeynode.edges import Edge
from whiskeynode.exceptions import InvalidConnectionNameException, InvalidTerminalException, InvalidTerminalStateException
from whiskeynode.terminals import outbound_node, inbound_node, outbound_list, inbound_list, bidirectional_list
from whiskeynode.terminaltypes import TerminalType
from whiskeynode import whiskeycache
#Define a sub doc
class SubNode(WhiskeyNode):
COLLECTION_NAME = 'subnode_collection'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'sub_prop':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'parent': inbound_node(ParentNode, 'sub_node'),
'parents': inbound_list(ParentNode, 'sub_node_list')
}
#Define a parent doc that connects to the sub doc
class ParentNode(WhiskeyNode):
COLLECTION_NAME = 'parent_collection'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'parent_prop':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'sub_node':outbound_node(SubNode,create_on_request=True),
'sub_node_list':outbound_list(SubNode),
}
class InvaldConnectionsNode(WhiskeyNode):
COLLECTION_NAME = 'invalid_collection'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
}
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'terminals':outbound_node(
to_node_class=SubNode,
create_on_request=True,
)
}
class TreeNode(WhiskeyNode):
COLLECTION_NAME = 'treenode_collection'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'name':unicode
}
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'parent':outbound_node(TreeNode),
'children':inbound_list(TreeNode, 'parent'),
}
class NodeBaseConnectionTest(TestCase):
def tearDown(self):
WhiskeyNode.COLLECTION.drop()
Edge.COLLECTION.drop()
ParentNode.COLLECTION.drop()
SubNode.COLLECTION.drop()
def test_terminals(self):
parent_node = ParentNode()
self.assertIsInstance(parent_node, ParentNode)
sub_node = parent_node.sub_node
self.assertIsInstance(sub_node, SubNode)
#save parent_node
parent_node.parent_prop = 'Oh no'
parent_node.sub_node.sub_prop = 'Oh yes'
parent_node.save()
#pull parent_node back out of the db
parent_node_retrieved = ParentNode.from_id(parent_node._id)
#make sure the parent doc matches the previous one, and that the sub doc id's match
self.assertTrue(parent_node_retrieved._id == parent_node._id)
self.assertTrue(parent_node_retrieved.sub_node._id == sub_node._id)
#pull the sub doc out of the db, make sure the _id's match
sub_node_retrieved = SubNode.from_id(sub_node._id)
self.assertTrue(parent_node.sub_node._id == sub_node_retrieved._id)
#make sure the property that we set matches
self.assertTrue(parent_node.sub_node.sub_prop == sub_node_retrieved.sub_prop)
def test_remove_node_removes_parent_connection(self):
parent_node = ParentNode()
sub_node = parent_node.sub_node
parent_node.save()
#remove the doc (delete it from the db)
sub_node.remove()
#make sure it no longer exists in the db
self.assertTrue(SubNode.from_id(sub_node._id)==None)
#make sure requesting it again makes a fresh copy
#print "requesting fresh copy"
sub_node2 = parent_node.sub_node
#print "%s : %s " % (str(sub_node), str(sub_node2))
self.assertTrue(sub_node._id != sub_node2._id)
def test_assigning_subdoc(self):
whiskeycache.clear_cache()
#print '\n\nRAM: %s\n\nMORE_RAM: %s\n\n' % (whiskeycache.RAM, whiskeycache.MORE_RAM)
sub_node = SubNode()
parent_node = ParentNode()
#print '\n\nRAM: %s\n\nMORE_RAM: %s\n\n' % (whiskeycache.RAM, whiskeycache.MORE_RAM)
self.assertTrue(sub_node.parent == None)
#you should be able to set the value of a connection before it's created
parent_node.sub_node = sub_node
#print '\n\nRAM: %s\n\nMORE_RAM: %s\n\n' % (whiskeycache.RAM, whiskeycache.MORE_RAM)
#print 'sub.p '+str(sub_node.parent)
#print 'parent '+str(parent_node)
self.assertTrue(sub_node.parent == parent_node)
parent_node.save()
whiskeycache.clear_cache()
parent_node2 = ParentNode.from_id(parent_node._id)
self.assertTrue(parent_node2 == parent_node)
#print "parent node id %s subnode id %s" % (str(parent_node2.sub_node._id), str(sub_node._id))
self.assertTrue(parent_node2.sub_node._id == sub_node._id)
#print "START"
#print "DONE"
#self.assertTrue(False)
#setting the value again should throw an error
def test_connection_with_reserved_name_throws_error(self):
try:
invalid_doc = InvaldConnectionsNode()
self.assertTrue(False, "Invalid connection node should raise error")
except InvalidConnectionNameException:
pass
def test_outbound_list_terminal(self):
Edge.COLLECTION.drop()
parent = ParentNode()
for i in range(4):
parent.sub_node_list.append(SubNode())
parent.save()
self.assertTrue(Edge.COLLECTION.find().count() == 4)
whiskeycache.clear_cache()
parent2 = ParentNode.from_id(parent._id)
self.assertTrue(len(parent2.sub_node_list) == 4)
parent2.sub_node_list.pop()
self.assertTrue(len(parent2.sub_node_list) == 3)
parent2.sub_node_list.extend([SubNode(), SubNode()])
self.assertTrue(len(parent2.sub_node_list) == 5)
parent2.save()
#print parent2
whiskeycache.clear_cache()
parent3 = ParentNode.from_id(parent._id)
#print parent3
self.assertTrue(len(parent3.sub_node_list) == 5)
#print "Edge.COLLECTION.find().count() %d" % Edge.COLLECTION.find().count()
self.assertTrue(Edge.COLLECTION.find().count() == 5)
#parent3.sub_node_list.insert(2, SubNode())
parent3.sub_node_list.pop(1)
parent3.sub_node_list.remove(parent3.sub_node_list[0])
try:
parent3.sub_node_list.append(ParentNode())
except AssertionError, e:
pass
else:
raise AssertionError('you can\'t append to inbound lists')
def test_inbound_node(self):
parent = ParentNode()
sub = parent.sub_node
parent.save()
self.assertTrue(sub.parent == parent)
try:
del sub.parent
except AssertionError, e:
pass
else:
raise AssertionError('you can\'t delete inbound nodes')
#print 'removing parent'
sub.parent.remove()
self.assertTrue(sub.parent == None)
def test_inbound_list(self):
sub = SubNode()
sub.save()
p1 = ParentNode()
p2 = ParentNode()
p3 = ParentNode()
p1.sub_node_list.append(sub)
p2.sub_node_list.append(sub)
p3.sub_node_list.append(sub)
#print sub.parent
p1.save()
p2.save()
p3.save()
self.assertTrue(len(sub.parents) == 3)
self.assertTrue(sub in sub.parents[0].sub_node_list) #oh fuck yes
sub.save() #save again to test for infinite recursion (we're connected in a loop here)
try:
sub.parents.pop()
except AssertionError, e:
pass
else:
raise AssertionError('Removing from inbount terminal should assert')
sub.remove()
self.assertTrue(len(p1.sub_node_list) == 0)
self.assertTrue(len(p2.sub_node_list) == 0)
self.assertTrue(len(p3.sub_node_list) == 0)
def test_bidirectional_node(self):
return
'''
a = BidirectionalNode()
b = BidirectionalNode()
c = BidirectionalNode()
d = BidirectionalNode()
print "dljfdd" + str(a.nodes)
print "dljfdd" + str(b.nodes)
print "dljfdd" + str(c.nodes)
a.nodes.append(b)
a.nodes.append(c)
a.nodes.append(d)
b.nodes.append(a)
print "dljfdd" + str(b.nodes)
self.assertTrue(len(a.nodes) == 3)
self.assertTrue(len(b.nodes) == 1)
self.assertTrue(len(c.nodes) == 1)
c.nodes.append(b)
self.assertTrue(len(b.nodes) == 2)
self.assertTrue(len(c.nodes) == 2)
'''
def test_tree_node(self):
t = TreeNode()
t2 = TreeNode()
t.parent = t2
t.save()
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,626
|
texuf/whiskeynode
|
refs/heads/master
|
/setup.py
|
import os
import platform
from setuptools import setup
install_requires = ['mongomock', 'pymongo', 'pprintpp']
if platform.python_version() < '2.7':
install_requires.append('unittest2')
setup(
name='whiskeynode',
version='0.1',
url='https://github.com/texuf/whiskeynode',
classifiers = [
'Programming Language :: Python :: 2.7',
],
description='A graph ORM for MongoDB with a weak-reference cache.',
license='Apache 2.0',
author='Austin Ellis',
author_email='austinellis@gmail.com',
py_modules=['whiskeynode'],
install_requires=install_requires,
scripts=[],
namespace_packages=[]
)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,627
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/exceptions.py
|
class WhiskeyNodeException(Exception):pass
'''
cache
'''
class WhiskeyCacheException(WhiskeyNodeException):pass
'''
node connections
'''
class ConnectionNotFoundException(WhiskeyNodeException):pass
class FieldNameNotDefinedException(WhiskeyNodeException):pass
class CollectionNotDefinedException(WhiskeyNodeException):pass
class BadEdgeRemovalException(WhiskeyNodeException):pass
class InvalidTerminalParameterException(WhiskeyNodeException):pass
'''
node naming conventions
'''
class InvalidNameException(WhiskeyNodeException):pass
class InvalidEdgeDataException(InvalidNameException):pass
class InvalidFieldNameException(InvalidNameException):pass
class InvalidConnectionNameException(InvalidNameException):pass
class InvalidTerminalException(InvalidNameException):pass
class InvalidTerminalOperationException(WhiskeyNodeException):pass
class InvalidTerminalStateException(InvalidNameException):pass
'''
edges
'''
class InvalidEdgeParameterException(WhiskeyNodeException):pass
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,628
|
texuf/whiskeynode
|
refs/heads/master
|
/tests/test_whiskeynode_traversals.py
|
from bson.objectid import ObjectId
from bson.dbref import DBRef
from datetime import datetime
from functools import partial
from unittest import TestCase
from whiskeynode import WhiskeyNode
from whiskeynode import whiskeycache
from whiskeynode.db import db
from whiskeynode.edges import Edge
from whiskeynode.exceptions import InvalidFieldNameException, FieldNameNotDefinedException
from whiskeynode.terminals import outbound_node, outbound_list, inbound_node, inbound_list
from whiskeynode.terminaltypes import TerminalType
from whiskeynode.traversals import lazy_traversal
import mock
import datetime
class EmailAddress(WhiskeyNode):
COLLECTION_NAME = 'users_emails'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'email':unicode,
}
'''
INIT
'''
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TERMINALS = {
'user': outbound_node(User,create_on_request=True),
}
class User(WhiskeyNode):
COLLECTION_NAME = 'users_users'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'firstName': unicode,
}
'''
INIT
'''
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TRAVERSALS= {
'email': lazy_traversal('emails.email', default_value=''),
'hasContactInfo': lazy_traversal('contactInfo.exists', default_value=False),
}
cls.TERMINALS = {
'emails': inbound_list( EmailAddress, 'user', render=False),
'contactInfo': outbound_node( ContactInfo),
}
class ContactInfo(WhiskeyNode):
COLLECTION_NAME = 'users_contactinfo'
COLLECTION = db[COLLECTION_NAME]
FIELDS = {
'phoneNumber':unicode,
}
def __init__(self, *args, **kwargs):
WhiskeyNode.__init__(self, *args, **kwargs)
@classmethod
def init_terminals(cls):
cls.TRAVERSALS= {
'email': lazy_traversal('user.email', default_value=''),
'firstName': lazy_traversal('user.firstName', default_value=False),
}
cls.TERMINALS = {
'user': inbound_node( User, 'contactInfo', render=False),
}
class DocumentBaseTest(TestCase):
def tearDown(self):
self.__cleanup()
def __cleanup(self):
Edge.COLLECTION.drop()
EmailAddress.COLLECTION.drop()
User.COLLECTION.drop()
ContactInfo.COLLECTION.drop()
whiskeycache.clear_cache()
def test_create_traversals(self):
self.__cleanup()
my_email_address = 'boogers@brains.com'
new_email_address = 'boogers2@brains.com'
e = EmailAddress({'email':my_email_address})
self.assertTrue(e.user.contactInfo is None)
e.user.contactInfo = ContactInfo()
self.assertTrue(e.user.email == my_email_address)
self.assertTrue(e.user.contactInfo.email == my_email_address)
e2 = EmailAddress({'email':new_email_address})
e2.user = e.user
self.assertTrue(e.user.contactInfo.email == new_email_address)
self.assertTrue(e2.user.contactInfo.email == new_email_address)
with mock.patch('mongomock.Collection.save') as save_moc:
e.save()
print save_moc.call_count
self.assertTrue(save_moc.call_count == 7) #2 emails with 2 edges to 1 user with 1 edge to 1 contactInfo
def __load_objects(self):
self.__cleanup()
EmailAddress.COLLECTION.insert({'lastModified': datetime.datetime(2014, 1, 14, 16, 35, 21, 84428), '_id': ObjectId('52d5d7c92cc8230471fedf99'), 'email': 'boogers@brains.com', 'createdAt': datetime.datetime(2014, 1, 14, 16, 35, 21, 83710)})
User.COLLECTION.insert({'firstName': u'', 'hasContactInfo': True, 'lastModified': datetime.datetime(2014, 1, 14, 16, 35, 21, 85368), '_id': ObjectId('52d5d7c92cc8230471fedf9a'), 'email': 'boogers@brains.com', 'createdAt': datetime.datetime(2014, 1, 14, 16, 35, 21, 83883)})
ContactInfo.COLLECTION.insert({'phoneNumber': u'', 'firstName': u'', 'lastModified': datetime.datetime(2014, 1, 14, 16, 35, 21, 85447), '_id': ObjectId('52d5d7c92cc8230471fedf9c'), 'email': 'boogers@brains.com', 'createdAt': datetime.datetime(2014, 1, 14, 16, 35, 21, 85027)})
Edge.COLLECTION.insert({'inboundId': ObjectId('52d5d7c92cc8230471fedf9a'), 'name': 'user', 'outboundId': ObjectId('52d5d7c92cc8230471fedf99'), 'terminalType': 'node', 'inboundCollection': 'users_users', 'lastModified': datetime.datetime(2014, 1, 14, 16, 35, 21, 84540), 'outboundCollection': 'users_emails','_id': ObjectId('52d5d7c92cc8230471fedf9b'), 'data': {}, 'createdAt': datetime.datetime(2014, 1, 14, 16, 35, 21, 84084)})
Edge.COLLECTION.insert({'inboundId': ObjectId('52d5d7c92cc8230471fedf9c'), 'name': 'contactInfo', 'outboundId': ObjectId('52d5d7c92cc8230471fedf9a'), 'terminalType': 'node', 'inboundCollection': 'users_contactinfo', 'lastModified': datetime.datetime(2014, 1, 14, 16, 35, 21, 85558), 'outboundCollection': 'users_users', '_id': ObjectId('52d5d7c92cc8230471fedf9d'), 'data': {}, 'createdAt': datetime.datetime(2014, 1, 14, 16, 35, 21, 85229)})
def test_load_traversals(self):
self.__load_objects()
my_email_address = 'boogers@brains.com'
new_email_address = 'boogers2@brains.com'
e = EmailAddress.find_one()
self.assertTrue(e.email == my_email_address)
e2 = EmailAddress({'email':new_email_address})
e2.user = e.user
self.assertTrue(e.user.contactInfo.email == new_email_address)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,629
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/indexes.py
|
''' to run:
from whiskeynode.indexes import ensure_indexes
ensure_indexes()
'''
import pkgutil
import pyclbr
import whiskeynode
try:
import nodes
except:
nodes = None
def ensure_indexes(logger=None, do_print=True):
if nodes:
_ensure_index(nodes, logger, do_print)
_ensure_index(whiskeynode, logger, do_print)
def _ensure_index(package, logger, do_print):
prefix = package.__name__ + "."
for importer, modname, ispkg in pkgutil.iter_modules(package.__path__):
full_modname = prefix+modname
outer_module = __import__(full_modname, fromlist="dummy")
if not ispkg:
#print "Found submodule %s (is a package: %s)" % (modname, ispkg)
#print "inspected: "+str(classes)
classes = pyclbr.readmodule(full_modname)
module = getattr(package, modname)
for key,value in classes.items():
#print full_modname
if 'Document' in value.super or 'WhiskeyNode' in value.super:
cls = getattr(module, value.name)
try:
inst = cls()
for index in inst.ENSURE_INDEXES:
if isinstance(index, list) or index not in inst.ENSURE_UNIQUE_INDEXES:
dbug_msg = "ensuring index cls: %s collection: %s index: %s " % (full_modname, inst.COLLECTION_NAME, index)
if logger is not None:
logger(dbug_msg)
elif do_print:
print dbug_msg
inst.COLLECTION.ensure_index(index)
for index in inst.ENSURE_UNIQUE_INDEXES:
dbug_msg = "ensuring unique index cls: %s collection: %s index: %s " % (full_modname, inst.COLLECTION_NAME, index)
if logger is not None:
logger(dbug_msg)
elif do_print:
print dbug_msg
if index not in inst.ENSURE_INDEXES:
raise Exception('All indexes in ENSURE_UNIQUE_INDEXES should also be in ENSURE_INDEXES')
inst.COLLECTION.ensure_index(index, unique=True)
except Exception, e:
pass
dbug_msg = "Failed to import %s %s" % (full_modname, str(e))
if logger is not None:
logger(dbug_msg)
elif do_print:
print dbug_msg
else:
_ensure_index(outer_module, logger, do_print)
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,630
|
texuf/whiskeynode
|
refs/heads/master
|
/whiskeynode/whiskeycache.py
|
from bson.objectid import ObjectId
from operator import attrgetter
from threading import Lock
from whiskeynode.exceptions import WhiskeyCacheException
import itertools
import weakref
'''
Weak Reference RAM - if something exists in memory, you should be able to find it here
'''
#MONKEY PATCH FOR 2.7
def weak_ref_len(self):
return len(self.data) - len(self._pending_removals)
weakref.WeakSet.__len__ = weak_ref_len
#END MONKEY PATCH FOR 2.7
RAM = weakref.WeakValueDictionary()
RAM_ALL = {} #'collectionName':weakSet
lock = Lock()
def from_cache(cls, data, dirty=True):
try:
return RAM[data['_id']]
except KeyError:
return cls(init_with=data, dirty=dirty)
def clear_cache():
''' for testing '''
with lock:
for key in RAM.keys():
try:
del RAM[key]
except KeyError:
pass
for key in RAM_ALL.keys():
try:
del RAM_ALL[key]
except KeyError:
pass
def remove(node):
with lock:
try:
del RAM[node._id]
try:
RAM_ALL[node.COLLECTION_NAME].remove(node)
except KeyError:
pass
except:
pass
def save(node):
#print "SAVE %s - %s" %(str(node), str(node.ENSURE_INDEXES))
with lock:
RAM[node._id] = node
try:
RAM_ALL[node.COLLECTION_NAME].add(node)
except: #KeyError
RAM_ALL[node.COLLECTION_NAME] = weakref.WeakSet([node])
def from_id(_id, collection_name):
if _id in RAM:
rv = RAM[_id]
return rv if rv is not None and rv.COLLECTION_NAME == collection_name else None
else:
return None
def from_ids(_ids):
with lock:
l = [RAM[x] for x in _ids if x in RAM]
return l
def find_one(cls, query):
if query == {}:
for x in RAM.values():
if type(x) is cls:
return x
elif '_id' in query:
return from_id(query['_id'], cls.COLLECTION_NAME)
try:
with lock:
l = list(RAM_ALL[cls.COLLECTION_NAME])
for x in l:
is_true = True
for key in query.keys():
if getattr(x, key, None) != query[key]:
is_true = False
break
if is_true:
return x
except KeyError:
return None
def _sort(dataset, sort):
if sort:
if len(sort) == 1:
return sorted(dataset, key=attrgetter(sort[0][0]), reverse=sort[0][1]==-1)
for sortKey, sortDirection in reversed(sort):
dataset = iter(sorted(dataset, key = attrgetter(sortKey), reverse = sortDirection < 0))
return dataset
def find(cls, query, sort):
''' find (should be mostly like pymongo find) '''
def search(search_set, query):
return_values = set([])
if query == {}:
try:
l = list(RAM_ALL[cls.COLLECTION_NAME])
return [x for x in l]
except KeyError:
return []
if '_id' in query:
if not isinstance(query['_id'], dict):
try:
return [RAM[query['_id']]]
except KeyError:
return []
if '$or' == query.keys()[0] and len(query) == 1:
lol = [search(search_set, x) for x in query['$or']] #list of lists (lol)
return set(itertools.chain(*lol))
if '$and' == query.keys()[0] and len(query) == 1:
lol = [search(search_set, x) for x in query['$and']]
return set.intersection(*lol)
if len(query) > 1:
lol = [search(search_set, {k:v}) for k,v in query.items()]
return set.intersection(*lol)
key = query.keys()[0]
for x in search_set:
is_true = True
if type(query[key]) is dict:
if query[key] == {}:
is_true = getattr(x,key, None) == query[key]
break
query_keys = query[key].keys()
supported = ('$in', '$ne', '$gt', '$nin')
if len(query_keys) == 1 and query_keys[0] in supported:
if query_keys[0] == '$in':
is_true = getattr(x, key, None) in query[key]['$in']
elif query_keys[0] == '$nin':
is_true = getattr(x, key, None) not in query[key]['$nin']
elif query_keys[0] == '$ne':
is_true = getattr(x, key, None) != query[key]['$ne']
elif query_keys[0] == '$gt':
is_true = getattr(x, key, None) > query[key]['$gt']
else:
raise WhiskeyCacheException('Whiskey cache only supports the %s paramater, for deeper searches like [%s] with key [%s], use the COLLECTION' % (str(supported), str(query[key]),key))
elif type(query[key]) is list:
if query[key] == []:
is_true = getattr(x,key,None) == [] #com doesn't work for empty lists too well
else:
is_true = cmp(query[key], getattr(x,key,None))
#print "is_true is " + str(is_true) + ' wanted: ' + str(query[key]) + ' got: ' + str(getattr(x,key,None))
else:
#print "Not a list or dict"
is_true = getattr(x,key, None) == query[key]
if is_true:
#print "APPEND"
return_values.add(x)
return return_values
try:
l = list(RAM_ALL[cls.COLLECTION_NAME])
except KeyError:
return []
else:
return _sort(search(l, query), sort) #i think i need the list here for weakref reasons
def _quick_sort(values):
pass
|
{"/whiskeynode/voter.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/events.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/tests/test_edge.py": ["/whiskeynode/edges.py"], "/whiskeynode/traversals.py": ["/whiskeynode/terminaltypes.py"], "/whiskeynode/edges.py": ["/whiskeynode/__init__.py", "/whiskeynode/db.py", "/whiskeynode/exceptions.py", "/whiskeynode/fieldtypes.py"], "/whiskeynode/whiskeycache.py": ["/whiskeynode/exceptions.py"]}
|
20,631
|
phoeinx/EvaP
|
refs/heads/master
|
/evap/results/tests/test_tools.py
|
from django.test.testcases import TestCase
from django.core.cache import caches
from django.conf import settings
from django.test import override_settings
from model_mommy import mommy
from evap.evaluation.models import Contribution, RatingAnswerCounter, Questionnaire, Question, Course, UserProfile
from evap.results.tools import get_answers, get_answers_from_answer_counters, get_results_cache_key, calculate_average_grades_and_deviation, calculate_results
from evap.staff.tools import merge_users
class TestCalculateResults(TestCase):
def test_caches_published_course(self):
course = mommy.make(Course, state='published')
self.assertIsNone(caches['results'].get(get_results_cache_key(course)))
calculate_results(course)
self.assertIsNotNone(caches['results'].get(get_results_cache_key(course)))
def test_cache_unpublished_course(self):
course = mommy.make(Course, state='published')
calculate_results(course)
course.unpublish()
self.assertIsNone(caches['results'].get(get_results_cache_key(course)))
def test_calculation_results(self):
contributor1 = mommy.make(UserProfile)
student = mommy.make(UserProfile)
course = mommy.make(Course, state='published', participants=[student, contributor1])
questionnaire = mommy.make(Questionnaire)
question = mommy.make(Question, questionnaire=questionnaire, type="G")
contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])
mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=1, count=5)
mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=2, count=15)
mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=3, count=40)
mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=4, count=60)
mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=5, count=30)
results = calculate_results(course)
self.assertEqual(len(results), 1)
self.assertEqual(len(results[0].results), 1)
result = results[0].results[0]
self.assertEqual(result.total_count, 150)
self.assertAlmostEqual(result.average, float(109) / 30)
self.assertAlmostEqual(result.deviation, 1.015983376941878)
def test_calculate_results_after_user_merge(self):
""" Asserts that merge_users leaves the results cache in a consistent state. Regression test for #907 """
contributor = mommy.make(UserProfile)
main_user = mommy.make(UserProfile)
student = mommy.make(UserProfile)
course = mommy.make(Course, state='published', participants=[student])
questionnaire = mommy.make(Questionnaire)
mommy.make(Question, questionnaire=questionnaire, type="G")
mommy.make(Contribution, contributor=contributor, course=course, questionnaires=[questionnaire])
calculate_results(course)
merge_users(main_user, contributor)
results = calculate_results(course)
for section in results:
self.assertTrue(Contribution.objects.filter(course=course, contributor=section.contributor).exists())
def test_answer_counting(self):
contributor1 = mommy.make(UserProfile)
contributor2 = mommy.make(UserProfile)
student = mommy.make(UserProfile)
course1 = mommy.make(Course, state='published', participants=[student, contributor1])
questionnaire = mommy.make(Questionnaire)
question1 = mommy.make(Question, questionnaire=questionnaire, type="G")
question2 = mommy.make(Question, questionnaire=questionnaire, type="G")
contribution1 = mommy.make(Contribution, contributor=contributor1, course=course1, questionnaires=[questionnaire])
contribution2 = mommy.make(Contribution, contributor=contributor1, questionnaires=[questionnaire])
contribution3 = mommy.make(Contribution, contributor=contributor2, course=course1, questionnaires=[questionnaire])
rating_answer_counters = []
rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=1, count=1))
rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=3, count=4))
rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=4, count=2))
rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=5, count=3))
# create some unrelated answer counters for different questions / contributions
mommy.make(RatingAnswerCounter, question=question1, contribution=contribution2, answer=1, count=1)
mommy.make(RatingAnswerCounter, question=question1, contribution=contribution3, answer=1, count=1)
mommy.make(RatingAnswerCounter, question=question2, contribution=contribution1, answer=1, count=1)
answer_counters = get_answers(contribution1, question1)
self.assertSetEqual(set(rating_answer_counters), set(answer_counters))
answers = get_answers_from_answer_counters(answer_counters)
self.assertListEqual(answers, [1, 3, 3, 3, 3, 4, 4, 5, 5, 5])
@override_settings(CONTRIBUTION_PERCENTAGE=0.3, GRADE_PERCENTAGE=0.6)
def test_average_grades(self):
contributor1 = mommy.make(UserProfile)
contributor2 = mommy.make(UserProfile)
course = mommy.make(Course)
questionnaire = mommy.make(Questionnaire)
question_grade = mommy.make(Question, questionnaire=questionnaire, type="G")
question_likert = mommy.make(Question, questionnaire=questionnaire, type="L")
general_contribution = mommy.make(Contribution, contributor=None, course=course, questionnaires=[questionnaire])
contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])
contribution2 = mommy.make(Contribution, contributor=contributor2, course=course, questionnaires=[questionnaire])
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=1, count=1)
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=4, count=2)
mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=3, count=4)
mommy.make(RatingAnswerCounter, question=question_likert, contribution=general_contribution, answer=5, count=3)
total_likert = settings.CONTRIBUTION_PERCENTAGE * 3 + (1 - settings.CONTRIBUTION_PERCENTAGE) * 5
total_grade = 2.5
total = settings.GRADE_PERCENTAGE * total_grade + (1 - settings.GRADE_PERCENTAGE) * total_likert
average, deviation = calculate_average_grades_and_deviation(course)
self.assertAlmostEqual(average, total)
self.assertAlmostEqual(deviation, 0)
@override_settings(CONTRIBUTION_PERCENTAGE=0.3, GRADE_PERCENTAGE=0.6)
def test_average_deviation(self):
contributor1 = mommy.make(UserProfile)
contributor2 = mommy.make(UserProfile)
course = mommy.make(Course)
questionnaire = mommy.make(Questionnaire)
question_grade = mommy.make(Question, questionnaire=questionnaire, type="G")
question_likert = mommy.make(Question, questionnaire=questionnaire, type="L")
general_contribution = mommy.make(Contribution, contributor=None, course=course, questionnaires=[questionnaire])
contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])
contribution2 = mommy.make(Contribution, contributor=contributor2, course=course, questionnaires=[questionnaire])
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=1, count=1)
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=3, count=1)
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=4, count=2)
mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=2, count=2)
mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=3, count=4)
mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=5, count=4)
mommy.make(RatingAnswerCounter, question=question_likert, contribution=general_contribution, answer=5, count=3)
__, deviation = calculate_average_grades_and_deviation(course)
total_likert_dev = settings.CONTRIBUTION_PERCENTAGE * 1 + (1 - settings.CONTRIBUTION_PERCENTAGE) * 0
total_grade_dev = 1
total_dev = settings.GRADE_PERCENTAGE * total_grade_dev + (1 - settings.GRADE_PERCENTAGE) * total_likert_dev
self.assertAlmostEqual(deviation, total_dev)
|
{"/evap/results/tests/test_tools.py": ["/evap/results/tools.py"], "/evap/results/views.py": ["/evap/results/tools.py"], "/evap/results/templatetags/results_templatetags.py": ["/evap/results/tools.py"]}
|
20,632
|
phoeinx/EvaP
|
refs/heads/master
|
/evap/results/views.py
|
from collections import OrderedDict, namedtuple
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404, render
from django.contrib.auth.decorators import login_required
from evap.evaluation.models import Semester, Degree, Contribution
from evap.evaluation.auth import internal_required
from evap.results.tools import calculate_results, calculate_average_grades_and_deviation, TextResult, RatingResult, \
HeadingResult, COMMENT_STATES_REQUIRED_FOR_VISIBILITY, YesNoResult
@internal_required
def index(request):
semesters = Semester.get_all_with_published_courses()
return render(request, "results_index.html", dict(semesters=semesters))
@internal_required
def semester_detail(request, semester_id):
semester = get_object_or_404(Semester, id=semester_id)
visible_states = ['published']
if request.user.is_reviewer:
visible_states += ['in_evaluation', 'evaluated', 'reviewed']
courses = semester.course_set.filter(state__in=visible_states).prefetch_related("degrees")
courses = [course for course in courses if course.can_user_see_course(request.user)]
# Annotate each course object with its grades.
for course in courses:
course.avg_grade, course.avg_deviation = calculate_average_grades_and_deviation(course)
CourseTuple = namedtuple('CourseTuple', ('courses', 'single_results'))
courses_by_degree = OrderedDict()
for degree in Degree.objects.all():
courses_by_degree[degree] = CourseTuple([], [])
for course in courses:
if course.is_single_result:
for degree in course.degrees.all():
section = calculate_results(course)[0]
result = section.results[0]
courses_by_degree[degree].single_results.append((course, result))
else:
for degree in course.degrees.all():
courses_by_degree[degree].courses.append(course)
template_data = dict(semester=semester, courses_by_degree=courses_by_degree)
return render(request, "results_semester_detail.html", template_data)
@login_required
def course_detail(request, semester_id, course_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(semester.course_set, id=course_id, semester=semester)
if not course.can_user_see_results(request.user):
raise PermissionDenied
sections = calculate_results(course)
if request.user.is_reviewer:
public_view = request.GET.get('public_view') != 'false' # if parameter is not given, show public view.
else:
public_view = request.GET.get('public_view') == 'true' # if parameter is not given, show own view.
# If grades are not published, there is no public view
if not course.can_publish_grades:
public_view = False
represented_users = list(request.user.represented_users.all())
represented_users.append(request.user)
show_grades = request.user.is_reviewer or course.can_publish_grades
# filter text answers
for section in sections:
results = []
for result in section.results:
if isinstance(result, TextResult):
answers = [answer for answer in result.answers if user_can_see_text_answer(request.user, represented_users, answer, public_view)]
if answers:
results.append(TextResult(question=result.question, answers=answers))
else:
results.append(result)
section.results[:] = results
# filter empty headings
for section in sections:
filtered_results = []
for index in range(len(section.results)):
result = section.results[index]
# filter out if there are no more questions or the next question is also a heading question
if isinstance(result, HeadingResult):
if index == len(section.results) - 1 or isinstance(section.results[index + 1], HeadingResult):
continue
filtered_results.append(result)
section.results[:] = filtered_results
# remove empty sections
sections = [section for section in sections if section.results]
# group by contributor
course_sections_top = []
course_sections_bottom = []
contributor_sections = OrderedDict()
for section in sections:
if section.contributor is None:
if section.questionnaire.is_below_contributors:
course_sections_bottom.append(section)
else:
course_sections_top.append(section)
else:
contributor_sections.setdefault(section.contributor,
{'total_votes': 0, 'sections': []})['sections'].append(section)
for result in section.results:
if isinstance(result, TextResult):
contributor_sections[section.contributor]['total_votes'] += 1
elif isinstance(result, RatingResult) or isinstance(result, YesNoResult):
# Only count rating results if we show the grades.
if show_grades:
contributor_sections[section.contributor]['total_votes'] += result.total_count
# Show a warning if course is still in evaluation (for reviewer preview).
evaluation_warning = course.state != 'published'
# Results for a course might not be visible because there are not enough answers
# but it can still be "published" e.g. to show the comment results to contributors.
# Users who can open the results page see a warning message in this case.
sufficient_votes_warning = not course.can_publish_grades
course.avg_grade, course.avg_deviation = calculate_average_grades_and_deviation(course)
template_data = dict(
course=course,
course_sections_top=course_sections_top,
course_sections_bottom=course_sections_bottom,
contributor_sections=contributor_sections,
evaluation_warning=evaluation_warning,
sufficient_votes_warning=sufficient_votes_warning,
show_grades=show_grades,
reviewer=request.user.is_reviewer,
contributor=course.is_user_contributor_or_delegate(request.user),
can_download_grades=request.user.can_download_grades,
public_view=public_view)
return render(request, "results_course_detail.html", template_data)
def user_can_see_text_answer(user, represented_users, text_answer, public_view=False):
if public_view:
return False
if text_answer.state not in COMMENT_STATES_REQUIRED_FOR_VISIBILITY:
return False
if user.is_reviewer:
return True
contributor = text_answer.contribution.contributor
if text_answer.is_private:
return contributor == user
if text_answer.is_published:
if text_answer.contribution.responsible:
return contributor == user or user in contributor.delegates.all()
if contributor in represented_users:
return True
if text_answer.contribution.course.contributions.filter(
contributor__in=represented_users, comment_visibility=Contribution.ALL_COMMENTS).exists():
return True
if text_answer.contribution.is_general and text_answer.contribution.course.contributions.filter(
contributor__in=represented_users, comment_visibility=Contribution.COURSE_COMMENTS).exists():
return True
return False
|
{"/evap/results/tests/test_tools.py": ["/evap/results/tools.py"], "/evap/results/views.py": ["/evap/results/tools.py"], "/evap/results/templatetags/results_templatetags.py": ["/evap/results/tools.py"]}
|
20,633
|
phoeinx/EvaP
|
refs/heads/master
|
/evap/results/tools.py
|
from collections import namedtuple, defaultdict, OrderedDict
from functools import partial
from math import ceil
from statistics import pstdev, median
from django.conf import settings
from django.core.cache import caches
from django.db.models import Sum
from evap.evaluation.models import TextAnswer, Contribution, RatingAnswerCounter
from evap.evaluation.tools import questionnaires_and_contributions
GRADE_COLORS = {
1: (136, 191, 74),
2: (187, 209, 84),
3: (239, 226, 88),
4: (242, 158, 88),
5: (235, 89, 90),
}
COMMENT_STATES_REQUIRED_FOR_VISIBILITY = [TextAnswer.PRIVATE, TextAnswer.PUBLISHED]
# see calculate_results
ResultSection = namedtuple('ResultSection', ('questionnaire', 'contributor', 'label', 'results', 'warning'))
CommentSection = namedtuple('CommentSection', ('questionnaire', 'contributor', 'label', 'is_responsible', 'results'))
RatingResult = namedtuple('RatingResult', ('question', 'total_count', 'average', 'deviation', 'counts', 'warning'))
YesNoResult = namedtuple('YesNoResult', ('question', 'total_count', 'average', 'deviation', 'counts', 'warning', 'approval_count'))
TextResult = namedtuple('TextResult', ('question', 'answers'))
HeadingResult = namedtuple('HeadingResult', ('question'))
def avg(iterable):
"""Simple arithmetic average function. Returns `None` if the length of
`iterable` is 0 or no items except None exist."""
items = [item for item in iterable if item is not None]
if len(items) == 0:
return None
return float(sum(items)) / len(items)
def mix(a, b, alpha):
if a is None and b is None:
return None
if a is None:
return b
if b is None:
return a
return alpha * a + (1 - alpha) * b
def get_answers(contribution, question):
return question.answer_class.objects.filter(contribution=contribution, question=question)
def get_number_of_answers(contribution, question):
answers = get_answers(contribution, question)
if question.is_rating_question:
return get_sum_of_answer_counters(answers)
else:
return len(answers)
def get_sum_of_answer_counters(answer_counters):
return answer_counters.aggregate(total_count=Sum('count'))['total_count'] or 0
def get_answers_from_answer_counters(answer_counters):
answers = []
for answer_counter in answer_counters:
for __ in range(0, answer_counter.count):
answers.append(answer_counter.answer)
return answers
def get_textanswers(contribution, question, filter_states=None):
assert question.is_text_question
answers = get_answers(contribution, question)
if filter_states is not None:
answers = answers.filter(state__in=filter_states)
return answers
def get_counts(question, answer_counters):
counts = OrderedDict()
possible_answers = range(1, 6)
if question.is_yes_no_question:
possible_answers = [1, 5]
# ensure ordering of answers
for answer in possible_answers:
counts[answer] = 0
for answer_counter in answer_counters:
counts[answer_counter.answer] = answer_counter.count
return counts
def get_results_cache_key(course):
return 'evap.staff.results.tools.calculate_results-{:d}'.format(course.id)
def calculate_results(course, force_recalculation=False):
if course.state != "published":
return _calculate_results_impl(course)
cache_key = get_results_cache_key(course)
if force_recalculation:
caches['results'].delete(cache_key)
return caches['results'].get_or_set(cache_key, partial(_calculate_results_impl, course))
def _calculate_results_impl(course):
"""Calculates the result data for a single course. Returns a list of
`ResultSection` tuples. Each of those tuples contains the questionnaire, the
contributor (or None), a list of single result elements, the average grade and
deviation for that section (or None). The result elements are either
`RatingResult` or `TextResult` instances."""
# there will be one section per relevant questionnaire--contributor pair
sections = []
# calculate the median values of how many people answered a questionnaire type (lecturer, tutor, ...)
questionnaire_med_answers = defaultdict(list)
questionnaire_max_answers = {}
questionnaire_warning_thresholds = {}
for questionnaire, contribution in questionnaires_and_contributions(course):
max_answers = max([get_number_of_answers(contribution, question) for question in questionnaire.rating_questions], default=0)
questionnaire_max_answers[(questionnaire, contribution)] = max_answers
questionnaire_med_answers[questionnaire].append(max_answers)
for questionnaire, max_answers in questionnaire_med_answers.items():
questionnaire_warning_thresholds[questionnaire] = max(settings.RESULTS_WARNING_PERCENTAGE * median(max_answers), settings.RESULTS_WARNING_COUNT)
for questionnaire, contribution in questionnaires_and_contributions(course):
# will contain one object per question
results = []
for question in questionnaire.question_set.all():
if question.is_rating_question:
answer_counters = get_answers(contribution, question)
answers = get_answers_from_answer_counters(answer_counters)
total_count = len(answers)
average = avg(answers) if total_count > 0 else None
deviation = pstdev(answers, average) if total_count > 0 else None
counts = get_counts(question, answer_counters)
warning = total_count > 0 and total_count < questionnaire_warning_thresholds[questionnaire]
if question.is_yes_no_question:
if question.is_positive_yes_no_question:
approval_count = counts[1]
else:
approval_count = counts[5]
results.append(YesNoResult(question, total_count, average, deviation, counts, warning, approval_count))
else:
results.append(RatingResult(question, total_count, average, deviation, counts, warning))
elif question.is_text_question:
answers = get_textanswers(contribution, question, COMMENT_STATES_REQUIRED_FOR_VISIBILITY)
results.append(TextResult(question=question, answers=answers))
elif question.is_heading_question:
results.append(HeadingResult(question=question))
section_warning = questionnaire_max_answers[(questionnaire, contribution)] < questionnaire_warning_thresholds[questionnaire]
sections.append(ResultSection(questionnaire, contribution.contributor, contribution.label, results, section_warning))
return sections
def calculate_average_grades_and_deviation(course):
"""Determines the final average grade and deviation for a course."""
avg_generic_likert = []
avg_contribution_likert = []
dev_generic_likert = []
dev_contribution_likert = []
avg_generic_grade = []
avg_contribution_grade = []
dev_generic_grade = []
dev_contribution_grade = []
for __, contributor, __, results, __ in calculate_results(course):
average_likert = avg([result.average for result in results if result.question.is_likert_question])
deviation_likert = avg([result.deviation for result in results if result.question.is_likert_question])
average_grade = avg([result.average for result in results if result.question.is_grade_question])
deviation_grade = avg([result.deviation for result in results if result.question.is_grade_question])
(avg_contribution_likert if contributor else avg_generic_likert).append(average_likert)
(dev_contribution_likert if contributor else dev_generic_likert).append(deviation_likert)
(avg_contribution_grade if contributor else avg_generic_grade).append(average_grade)
(dev_contribution_grade if contributor else dev_generic_grade).append(deviation_grade)
# the final total grade will be calculated by the following formula (GP = GRADE_PERCENTAGE, CP = CONTRIBUTION_PERCENTAGE):
# final_likert = CP * likert_answers_about_persons + (1-CP) * likert_answers_about_courses
# final_grade = CP * grade_answers_about_persons + (1-CP) * grade_answers_about_courses
# final = GP * final_grade + (1-GP) * final_likert
final_likert_avg = mix(avg(avg_contribution_likert), avg(avg_generic_likert), settings.CONTRIBUTION_PERCENTAGE)
final_likert_dev = mix(avg(dev_contribution_likert), avg(dev_generic_likert), settings.CONTRIBUTION_PERCENTAGE)
final_grade_avg = mix(avg(avg_contribution_grade), avg(avg_generic_grade), settings.CONTRIBUTION_PERCENTAGE)
final_grade_dev = mix(avg(dev_contribution_grade), avg(dev_generic_grade), settings.CONTRIBUTION_PERCENTAGE)
final_avg = mix(final_grade_avg, final_likert_avg, settings.GRADE_PERCENTAGE)
final_dev = mix(final_grade_dev, final_likert_dev, settings.GRADE_PERCENTAGE)
return final_avg, final_dev
def has_no_rating_answers(course, contributor, questionnaire):
questions = questionnaire.rating_questions
contribution = Contribution.objects.get(course=course, contributor=contributor)
return RatingAnswerCounter.objects.filter(question__in=questions, contribution=contribution).count() == 0
def color_mix(color1, color2, fraction):
return tuple(
int(round(color1[i] * (1 - fraction) + color2[i] * fraction)) for i in range(3)
)
def get_grade_color(grade):
# Can happen if no one leaves any grades. Return white because its least likely to cause problems.
if grade is None:
return (255, 255, 255)
grade = round(grade, 1)
next_lower = int(grade)
next_higher = int(ceil(grade))
return color_mix(GRADE_COLORS[next_lower], GRADE_COLORS[next_higher], grade - next_lower)
def get_deviation_color(deviation):
if deviation is None:
return (255, 255, 255)
capped_deviation = min(deviation, 2.0) # values above that are very uncommon in practice
val = int(255 - capped_deviation * 60) # tweaked to look good
return (val, val, val)
|
{"/evap/results/tests/test_tools.py": ["/evap/results/tools.py"], "/evap/results/views.py": ["/evap/results/tools.py"], "/evap/results/templatetags/results_templatetags.py": ["/evap/results/tools.py"]}
|
20,634
|
phoeinx/EvaP
|
refs/heads/master
|
/evap/results/templatetags/results_templatetags.py
|
from django.template import Library
from evap.results.tools import get_grade_color, get_deviation_color
register = Library()
@register.filter(name='gradecolor')
def gradecolor(grade):
return 'rgb({}, {}, {})'.format(*get_grade_color(grade))
@register.filter(name='deviationcolor')
def deviationcolor(deviation):
return 'rgb({}, {}, {})'.format(*get_deviation_color(deviation))
|
{"/evap/results/tests/test_tools.py": ["/evap/results/tools.py"], "/evap/results/views.py": ["/evap/results/tools.py"], "/evap/results/templatetags/results_templatetags.py": ["/evap/results/tools.py"]}
|
20,635
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/travels/helpers.py
|
import json
import logging
import urllib.parse
from travels.exceptions import NotFoundException
from travels.urls import urls, Views
logger = logging.getLogger('backend')
def route_data_or_404(pieces):
try:
method, url_data, protocol = pieces[0].strip().split()
query_params = {}
if '?' in url_data:
url_data, query_params_data = url_data.split('?')
query_params = dict(urllib.parse.parse_qsl(query_params_data))
url = url_data.replace('/', '')
body = json.loads(pieces[-1].replace('\n\t', '')) if pieces[-1] != '' else None
route_data = {
'method': method,
'url': url,
'body': body,
'protocol': protocol,
'query_params': query_params if query_params else None
}
logger.info(route_data)
return route_data
except Exception as e:
raise NotFoundException(f'Error to decode request: {e}')
def get_view(route_data):
for r in urls:
if route_data['url'] == r[0]:
method = getattr(
Views(route_data),
r[0],
lambda: True
)
logger.info(f'View: {r[0]}')
return method()
raise NotFoundException('Page not Found')
def to_json(routes):
routes_dict = []
obj = {}
for route in routes:
obj['source'] = route[0]
obj['destination'] = route[1]
obj['price'] = int(route[2])
routes_dict.append(obj.copy())
return routes_dict
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,636
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/travels/urls.py
|
from travels.views import Views
r = Views()
urls = [
('routes', r.routes),
('new_route', r.new_route),
('get_route', r.get_route)
]
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,637
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/travels/views.py
|
from travels.exceptions import ErrorWriteFile
from travels.script import FindRoute, write_file
from settings import FILE
class Views:
def __init__(self, route_data=None, file_data=None):
self.data = file_data or FILE
self.route = FindRoute(file_data=self.data)
self.route_data = route_data
def routes(self):
from travels.helpers import to_json
routes = self.route.get_rows_from_file()
obj = to_json(routes)
return obj
def new_route(self):
source = self.route_data['body']['source'].upper()
destination = self.route_data['body']['destination'].upper()
price = self.route_data['body']['price']
if write_file(self.data, source, destination, price):
return {
'source': source,
'destination': destination,
'price': price
}
raise ErrorWriteFile('Error saving data')
def get_route(self):
route = FindRoute(
self.route_data['query_params']['source'].upper(),
self.route_data['query_params']['destination'].upper(),
self.data
)
route.dijkstra()
route.best_route()
result = {
'route': f"{route.source} - {' - '.join(route.path)}",
'price': route.price
}
return result
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,638
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/tests/test_api.py
|
import json
import unittest
from socket import socket, AF_INET, SOCK_STREAM
from travels.script import FindRoute
from settings import FILE, INITIAL_DATA
def remove_last_line():
with open(FILE, 'w') as f:
for row in INITIAL_DATA:
f.writelines(row)
class TestApi(unittest.TestCase):
def setUp(self):
self.client_socket = socket(AF_INET, SOCK_STREAM)
def tearDown(self):
self.client_socket.close()
def test_routes(self):
self.client_socket.connect(('localhost', 8000))
self.client_socket.send('GET /routes HTTP/1.1\r\n\r\n'.encode())
response = (self.client_socket.recv(1024).decode()).strip().split('\r\n')
protocol, status_code, status = response[0].split()
data = json.loads(response[-1].replace('\'', '\"'))
self.assertEqual(int(status_code), 200)
self.assertEqual(len(data), 7)
self.assertEqual(data[0]['source'], 'GRU')
self.assertEqual(data[-1]['source'], 'SCL')
def test_get_route_gru_cdg(self):
self.client_socket.connect(('localhost', 8000))
self.client_socket.send(
'GET /get_route?source=gru&destination=CDG HTTP/1.1\r\n\r\n'.encode()
)
response = (self.client_socket.recv(1024).decode()).strip().split('\r\n')
protocol, status_code, status = response[0].split()
data = json.loads(response[-1].replace('\'', '\"'))
self.assertEqual(data, {'route': 'GRU - BRC - SCL - ORL - CDG', 'price': 40})
self.assertEqual(int(status_code), 200)
self.assertEqual(len(data), 2)
def test_get_route_not_in_file(self):
self.client_socket.connect(('localhost', 8000))
self.client_socket.send(
'GET /get_route?source=ABC&destination=XYZ HTTP/1.1\r\n\r\n'.encode()
)
response = (self.client_socket.recv(1024).decode()).strip().split('\r\n')
protocol, status_code, status = response[0].split()
data = json.loads(response[-1].replace('\'', '\"'))
self.assertEqual(data, {'detail': 'BAD_REQUEST'})
self.assertEqual(int(status_code), 400)
def test_new_route(self):
self.client_socket.connect(('localhost', 8000))
self.client_socket.send(
'POST /new_route HTTP/1.1\r\n\r\n'
'{\n\t"source": "ABC",\n\t"destination": "DEF",\n\t"price": 35\n}'.encode()
)
response = (self.client_socket.recv(1024).decode()).strip().split('\r\n')
protocol, status_code, status = response[0].split()
data = json.loads(response[-1].replace('\'', '\"'))
self.assertEqual(data, {'source': 'ABC', 'destination': 'DEF', 'price': 35})
self.assertEqual(int(status_code), 201)
remove_last_line()
def test_get_route_gru_cdg(self):
route = FindRoute('GRU', 'CDG', FILE)
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{
'route': 'GRU - BRC - SCL - ORL - CDG',
'price': 40
}
)
def test_get_route_brc_cdg(self):
route = FindRoute('BRC', 'CDG', FILE)
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{
'route': 'BRC - SCL - ORL - CDG',
'price': 30
}
)
def test_get_route_brc_cdg_with_lowercase(self):
route = FindRoute('brc', 'cdg', FILE)
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{
'route': 'BRC - SCL - ORL - CDG',
'price': 30
}
)
def test_get_route_without_source(self):
route = FindRoute('', 'cdg', FILE)
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{}
)
def test_get_route_without_destination(self):
route = FindRoute('brc', '', FILE)
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{}
)
def test_get_route_without_file(self):
route = FindRoute('brc', 'cdg')
route.dijkstra()
route.best_route()
self.assertEqual(
route.result,
{}
)
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,639
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/tests/test_script.py
|
import logging
import mock
import os
import unittest
from getopt import GetoptError
from travels.exceptions import ErrorWriteFile
from travels.script import FindRoute, write_file, file_data_console
from settings import FILE
logger = logging.getLogger('backend')
class TestScript(unittest.TestCase):
def setUp(self):
self.graph = {
'GRU': {
'BRC': 10,
'CDG': 75,
'SCL': 20,
'ORL': 56
},
'BRC': {
'SCL': 5
},
'SCL': {
'ORL': 20
},
'CDG': {},
'ORL': {
'CDG': 5
}
}
def test_get_rows_from_file(self):
route = FindRoute(file_data=FILE)
rows = route.get_rows_from_file()
self.assertEqual(len(rows), 7)
def test_get_rows_from_file_without_file(self):
route = FindRoute()
rows = route.get_rows_from_file()
self.assertEqual(rows, None)
def test_graph_from_file(self):
route = FindRoute(file_data=FILE)
graph = route.graph_from_file()
self.assertEqual(
graph,
self.graph
)
def test_graph_from_file_with_no_exits_routes_graph_should_be_equal(self):
route = FindRoute('ABC', 'DEF', FILE)
graph = route.graph_from_file()
self.assertEqual(graph, self.graph)
def test_dijkstra_calculate_route(self):
route = FindRoute('GRU', 'CDG', FILE)
route.dijkstra()
self.assertEqual(route.unseen_nodes, {})
self.assertEqual(
route.shortest_distance,
{'GRU': 0, 'BRC': 10, 'SCL': 15, 'CDG': 40, 'ORL': 35}
)
self.assertEqual(
route.predecessor,
{'BRC': 'GRU', 'CDG': 'ORL', 'SCL': 'BRC', 'ORL': 'SCL'}
)
def test_best_route(self):
route = FindRoute('GRU', 'CDG', FILE)
route.dijkstra()
best_route = route.best_route()
result_string_expected = (
f"best route: {route.source} - {' - '.join(route.path)}"
f" > {route.shortest_distance[route.destination]}"
)
self.assertEqual(best_route, result_string_expected)
def test_best_route_logs(self):
with self.assertLogs('backend', level='INFO') as log:
route = FindRoute('GRU', 'CDG', FILE)
route.dijkstra()
route.best_route()
self.assertIn('best route: GRU - BRC - SCL - ORL - CDG > 40', log.output[0])
def test_write_file(self):
source, destination, price = ['ABC', 'DEF', 42]
file = write_file('tests/write-file.csv', source, destination, price)
assert file
os.remove('tests/write-file.csv')
def test_write_file_should_return_an_error(self):
source, destination, price = ['ABC', 'DEF', None]
with self.assertRaises(ErrorWriteFile):
write_file('tests/write-file.csv', source, destination, price)
def test_file_data_console(self):
argv = ['tests/input-file-test.csv']
file = file_data_console(argv)
self.assertEqual(file, FILE)
@mock.patch('sys.exit')
def test_file_data_console_without_params(self, mock_sys):
mock_sys.side_effect = GetoptError
argv = []
with self.assertRaises(Exception):
with self.assertLogs('backend', level='ERROR') as log:
file_data_console(argv)
self.assertIn('Example: python script.py <input_file.csv>', log.output[0])
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,640
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/travels/exceptions.py
|
class NotFoundException(Exception):
pass
class ErrorWriteFile(Exception):
pass
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,641
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/travels/script.py
|
#!/usr/bin/python
import logging
import sys
import getopt
from travels.exceptions import ErrorWriteFile
logger = logging.getLogger('backend')
class FindRoute:
def __init__(self, source=None, destination=None, file_data=None):
self.source = source.upper() if source else None
self.destination = destination.upper() if destination else None
self.price = 0
self.data = file_data
self.graph = self.graph_from_file()
self.path = []
self.shortest_distance = {}
self.predecessor = {}
self.unseen_nodes = self.graph.copy()
self.infinity = 999999
self.result = {}
def get_rows_from_file(self):
if self.data:
rows = []
with open(self.data, 'r') as f:
reader = f.readlines()
for row in reader:
rows.append(row.strip().split(','))
return rows
def graph_from_file(self):
rows = self.get_rows_from_file() or []
graph = {}
for row in rows:
source, destination, price = row
if source not in graph:
graph[source] = {}
if not graph.get(destination):
graph[destination] = {}
graph[source][destination] = int(price)
return graph
def dijkstra(self):
for node in self.unseen_nodes:
self.shortest_distance[node] = self.infinity
self.shortest_distance[self.source] = 0
self.calculate_route()
def calculate_route(self):
while self.unseen_nodes:
min_node = None
for node in self.unseen_nodes:
if min_node is None or self.shortest_distance[node] < self.shortest_distance[min_node]:
min_node = node
for child_node, price in self.graph[min_node].items():
if price + self.shortest_distance[min_node] < self.shortest_distance[child_node]:
self.shortest_distance[child_node] = price + self.shortest_distance[min_node]
self.predecessor[child_node] = min_node
self.unseen_nodes.pop(min_node)
def best_route(self):
current_node = self.destination or self.source
prerequisites = bool(self.source and self.destination and self.data)
while current_node != self.source:
try:
self.path.insert(0, current_node)
current_node = self.predecessor[current_node]
except KeyError:
logger.info('Path not reachable')
break
if prerequisites and self.shortest_distance[self.destination] != self.infinity:
self.price = self.shortest_distance[self.destination]
self.result = {
'route': f"{self.source} - {' - '.join(self.path)}",
'price': (self.shortest_distance[self.destination])
}
result_string = (
f"best route: {self.source} - {' - '.join(self.path)}"
f" > {self.shortest_distance[self.destination]}"
)
logger.info(result_string)
return result_string
def file_data_console(argv):
error_message = 'Example: python script.py <input_file.csv>'
try:
_, input_file = getopt.getopt(argv, None)
if not input_file:
raise getopt.GetoptError(error_message)
except getopt.GetoptError:
logger.error(error_message)
sys.exit(2)
return input_file[0]
def write_file(file, source, destination, price):
try:
logger.info('Starting to save in csv file')
if bool(source and destination and price):
new_entry = f'{source},{destination},{str(price)}'
with open(file, 'a+') as f:
f.write('\n')
f.write(new_entry)
f.close()
logger.info('Data saved in csv with success!')
return True
else:
raise ErrorWriteFile
except Exception:
raise ErrorWriteFile
if __name__ == "__main__":
option = 1
while option != 0:
input_data = input('please enter the route: ')
if not input_data:
break
input_data = input_data.upper().split('-')
file_data = file_data_console(sys.argv[1:])
route = FindRoute(input_data[0], input_data[1], file_data=file_data)
route.dijkstra()
print(route.best_route())
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,642
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/settings.py
|
import os
FILE = os.environ.get('FILE', 'tests/input-file-test.csv')
DATA_STRUCTURE = "HTTP/1.1 {status_code} {status}\r\n" \
"Content-Type: application/json; charset=utf-8" \
"\r\n\r\n{body}\r\n\r\n"
INITIAL_DATA = [
'GRU,BRC,10\n',
'BRC,SCL,5\n',
'GRU,CDG,75\n',
'GRU,SCL,20\n',
'GRU,ORL,56\n',
'ORL,CDG,5\n',
'SCL,ORL,20'
]
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,643
|
socrateschieregato/TravelsTest
|
refs/heads/master
|
/server.py
|
import logging
from socket import (
AF_INET,
socket,
SOCK_STREAM,
SHUT_WR
)
from travels.exceptions import NotFoundException
from travels.helpers import route_data_or_404, get_view
from settings import DATA_STRUCTURE
logger = logging.getLogger('backend')
class Server:
def create_server(self):
try:
server = socket(AF_INET, SOCK_STREAM)
server.bind(('localhost', 8000))
server.listen(5)
while True:
(client_socket, address) = server.accept()
rd = client_socket.recv(5000).decode()
pieces = rd.split('\r\n')
if len(pieces) > 0:
print(pieces[0])
try:
route_data = route_data_or_404(pieces)
body = get_view(route_data)
if body and route_data['method'] == 'POST':
status_code = 201
else:
status_code = 200
data = DATA_STRUCTURE.format(
status_code=status_code,
status='OK',
body=body if body else ''
)
client_socket.send(bytes(data, 'utf-8'))
except NotFoundException as e:
data = DATA_STRUCTURE.format(
status_code=404,
status='NOT_FOUND',
body={'detail': "NOT_FOUND"}
)
client_socket.send(bytes(data, 'utf-8'))
logger.error(f"Erro: {e}")
except Exception as e:
data = DATA_STRUCTURE.format(
status_code=400,
status='BAD_REQUEST',
body={'detail': "BAD_REQUEST"}
)
client_socket.send(bytes(data, 'utf-8'))
logger.error(f"Erro: {e}")
finally:
client_socket.shutdown(SHUT_WR)
except KeyboardInterrupt:
logger.info("\nShutting down...\n")
finally:
server.close()
print('Server up and running on: http://localhost:8000')
server = Server()
server.create_server()
|
{"/travels/helpers.py": ["/travels/exceptions.py", "/travels/urls.py"], "/travels/urls.py": ["/travels/views.py"], "/travels/views.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py", "/travels/helpers.py"], "/tests/test_api.py": ["/travels/script.py", "/settings.py"], "/tests/test_script.py": ["/travels/exceptions.py", "/travels/script.py", "/settings.py"], "/travels/script.py": ["/travels/exceptions.py"], "/server.py": ["/travels/exceptions.py", "/travels/helpers.py", "/settings.py"]}
|
20,656
|
ouyangwuhai/freemind_to_excel
|
refs/heads/master
|
/lib/freemind_to_project.py
|
#!/usr/bin/env python3
#
# freemind_to_project.py
#
# Copyright LiKneu 2019
#
import pprint # Similar to Data::Dumper
import lxml.etree as ET # handling XML
def main(args):
return 0
def get_pj_path(fm_path):
'''Takes the XPATH of the Freemind parent element and returns the XPATH
of the Freemind element'''
pj_path = fm_path.replace('map', 'Project')
pj_path = pj_path.replace('node', 'Tasks', 1)
pj_path = pj_path.replace('node', 'Task')
return pj_path
def node_to_note(pj_root, pj_uid, pj_text):
'''Stores the note information of Freemind into a <Note> tag of Project'''
# Create the XPATH search pattern to find the Project UID...
xpath_pattern = '//UID[text()="' + str(pj_uid) + '"]'
task_id = pj_root.xpath(xpath_pattern)
# ...to get the parent node of this UID...
pj_parent = task_id[0].getparent()
# ...to attach the <Notes> to it.
note = ET.SubElement(pj_parent, "Notes")
note.text = pj_text
def node_to_task(pj_parent, pj_name, pj_uid, pj_level=1):
'''Creates <Task> element with the necessary subelements and attaches it to
a given parent element.
# Example of a minimalistic task as it was produced from a Freemap
# Project 2003 export
<Task>
<UID>0</UID>
<ID>1</ID>
<Type>1</Type>
<IsNull>0</IsNull>
<OutlineNumber>1</OutlineNumber>
<OutlineLevel>0</OutlineLevel>
<Name>Stamm</Name>
<FixedCostAccrual>1</FixedCostAccrual>
<RemainingDuration>PT8H0M0S</RemainingDuration>
<Estimated>1</Estimated>
<PercentComplete>0</PercentComplete>
<Priority/>
</Task>
'''
task = ET.SubElement(pj_parent, "Task")
# Name (occurence: min = 0, max = 1)
# The name of the task.
name = ET.SubElement(task, "Name")
# Project doesn't allow linebreaks in task titles so we remove them here
name.text = pj_name.replace('\n', ' ')
# UID (occurences: min = max = 1)
# The UID element is a unique identifier
uid = ET.SubElement(task, "UID")
uid.text = str(pj_uid)
# ID (occurence: min = 0, max = 1)
# For Resource, ID is the position identifier of the resource within the
# list of resources. For Task, it is the position identifier of the task
# in the list of tasks.
pj_id = ET.SubElement(task, "ID")
pj_id.text = "1"
# Type
# 0 = fixed units
# 1 = fixed duration
# 2 = fixed work
pj_type = ET.SubElement(task, "Type")
pj_type.text = "1"
# IsNull
# 0 = task or ressource is not null
# 1 = task or ressource is null
isnull = ET.SubElement(task, "IsNull")
isnull.text = "0"
# OutlineNumber
# Indicates the exact position of a task in the outline. For example,
# "7.2" indicates that a task is the second subtask under the seventh
# top-level summary task.
outlinenumber = ET.SubElement(task, "OutlineNumber")
outlinenumber.text = "1"
# OutlineLevel
# The number that indicates the level of a task in the project outline
# hierarchy.
outlinelevel = ET.SubElement(task, "OutlineLevel")
outlinelevel.text = str(pj_level)
# FixedCostAccrual
# Indicates how fixed costs are to be charged, or accrued, to the cost
# of a task. (no info on Microsofts webpage what values are allowed)
fixedcostaccrual = ET.SubElement(task, "FiexedCostAccrual")
fixedcostaccrual.text = "1"
# RemainingDuration (occurence: min = 0, max = 1)
# The amount of time required to complete the unfinished portion of a
# task. Remaining duration can be calculated in two ways, either based on
# percent complete or on actual duration.
# TODO: check if RemainingDuration is necessary
remainingduration = ET.SubElement(task, "RemainingDuration")
remainingduration.text = "PT8H0M0S" # 8 Hours, 0 Minutes, 0 seconds
# Estimated (occurence: min = 0, max = 1)
# Indicates whether the task's duration is flagged as an estimate.
# 0 = not estimated (i.e. precise)
# 1 = estimated
estimated = ET.SubElement(task, "Estimated")
estimated.text = "1"
# PercentComplete (occurence: min = 0, max = 1)
# The percentage of the task duration completed.
percentcomplete = ET.SubElement(task, "PercentComplete")
percentcomplete.text = "0"
# Priority (occurence: min = 0, max = 1)
# Indicates the level of importance assigned to a task, with 500 being
# standard priority; the higher the number, the higher the priority.
priority = ET.SubElement(task, "Priority")
def to_project(input_file, output_file):
'''Converts the Freeemind XML file into an MS Project XML file.'''
print('Converting to Project')
print('Input file : ' + input_file)
print('Output file: ' + output_file)
# Prepare the Fremmind XML tree
# Read freemind XML file into variable
fm_tree = ET.parse(input_file)
# Get the root element of the Freemind XML tree
fm_root = fm_tree.getroot()
# Prepare the root element of the new Project XML file
attrib = {'xmlns':'http://schemas.microsoft.com/project'}
pj_root = ET.Element('Project', attrib)
# Based on the Project root element we define the Project tree
pj_tree = ET.ElementTree(pj_root)
# Add the Project <Title> element
pj_title = ET.SubElement(pj_root, 'Title')
pj_title.text = fm_root.xpath('/map/node/@TEXT')[0]
# Add the Project <Tasks> element
pj_tasks = ET.SubElement(pj_root, 'Tasks')
pj_path = pj_tree.getpath(pj_tasks)
# Dict holding mapping table of Freemind and Project UIDs
uid_mapping = {}
# UID in Project starts with 0
pj_uid = 0
for fm_node in fm_root.iter('node'):
# Determine the parent of the present Freemind element
fm_parent = fm_node.getparent()
# Determine the XPATH of the Freemind parent element
fm_parent_path = fm_tree.getpath(fm_parent)
# Determine the XPATH of the Project parent element from the XPATH of
# the Freemind parent element
pj_parent_path = get_pj_path(fm_parent_path)
print("pj parent path:", pj_parent_path)
# Determine the Project parent element based on its XPATH
pj_parent = pj_tree.xpath(pj_parent_path)[0]
# Get the Project text from the Freemind node TEXT attribute
pj_name = fm_node.get("TEXT")
# Get the Freemind ID from its attribute
fm_id = fm_node.get("ID")
# Add Freemind ID and Project UID to mapping table (Dictionary)
uid_mapping[fm_id] = pj_uid
# calculate level with help of XPATH
# Count number of dashed as indicator for the depth of the structure
# -1 is for the 1st that is not needed here
pj_level = pj_tree.getpath(pj_parent).count('/')-1
node_to_task(pj_parent=pj_parent, pj_name=pj_name, pj_uid=pj_uid, pj_level=pj_level)
# Check if node has an attached note <richcontent>
fm_note = fm_node.xpath('normalize-space(./richcontent)')
# If yes, remove all html tags and store the remaining text in a
# Project <Note> tag
if fm_note:
node_to_note(pj_root=pj_root, pj_uid=pj_uid, pj_text=fm_note)
pj_uid += 1
# Write the Project XML tree to disc
pj_tree.write(output_file, pretty_print=True, xml_declaration=True, encoding="utf-8")
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
{"/main.py": ["/lib/freemind_to_excel.py", "/lib/freemind_to_project.py", "/lib/freemind_to_word.py"]}
|
20,657
|
ouyangwuhai/freemind_to_excel
|
refs/heads/master
|
/lib/freemind_to_word.py
|
#!/usr/bin/env python3
#
# freemind_to_word.py
#
# Copyright LiKneu 2019
#
import lxml.etree as ET # handling XML
import docx # handling of MS Word documents
def main(args):
return 0
def to_word(input_file, output_file):
'''Converts the Freeemind XML file into an MS Mord DOCX file.'''
print('Converting to Word')
print('Input file : ' + input_file)
print('Output file: ' + output_file)
# Prepare the Fremmind XML tree
# Read freemind XML file into variable
fm_tree = ET.parse(input_file)
# Get the root element of the Freemind XML tree
fm_root = fm_tree.getroot()
doc = docx.Document()
# Get the title of the freemind root node and use it as title of the
# cover page
title_cover = fm_root.xpath('/map/node/@TEXT')[0]
main_title = doc.add_heading(title_cover, 0)
# Add a paragraph below the title heading. I found no other way to directly
# add a page break after the heading.
para = doc.add_paragraph(' ')
doc.paragraphs[0].runs[0].add_break(docx.enum.text.WD_BREAK.PAGE)
# Walk through all <node> tags of the freemind file
for fm_node in fm_root.iter('node'):
# Get the content of the "TEXT" attribute of the node
wd_title = fm_node.get("TEXT")
# Calculate how deep we have gone into the tree structure to choose a
# fitting heading style
wd_title_level = fm_tree.getpath(fm_node).count('/')-1
# The 1st 4 levels get different heading styles..
if wd_title_level <= 4:
doc.add_heading(wd_title, wd_title_level)
else:
#..all the other levels stick with one style
doc.add_paragraph(wd_title, 'Heading5')
# Check if node has an attached note <richcontent>
fm_note = fm_node.xpath('normalize-space(./richcontent)')
# If yes, remove all html tags and store the remaining text in a
# Project <Note> tag
if fm_note:
doc.add_paragraph(fm_note, 'Normal')
# Write the Word DOCX file to disc
doc.save(output_file)
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
{"/main.py": ["/lib/freemind_to_excel.py", "/lib/freemind_to_project.py", "/lib/freemind_to_word.py"]}
|
20,658
|
ouyangwuhai/freemind_to_excel
|
refs/heads/master
|
/main.py
|
#!/usr/bin/env python3
#
# Copyright LiKneu 2019
#
# Converts Freemind XML files into other file formats like:
# * Excel
#
import sys # command line options
from lib.freemind_to_excel import to_excel
from lib.freemind_to_project import to_project
from lib.freemind_to_word import to_word
def main():
script = sys.argv[0] # filename of this script
if len(sys.argv)==1: # no arguments so print help message
print('''Usage: main.py action filename
action must be one of --excel --project --word''')
return
action = sys.argv[1]
# check if user input of action is an allowed/defined one
assert action in ['--excel', '--project', '--word'], \
'Action is not one of --excel --project --word: ' + action
input_file = sys.argv[2] # filename of the to be converted mindmap
output_file = sys.argv[3] # filename of the export file
process(action, input_file, output_file)
def process(action, input_file, output_file):
'''Processes user input.'''
if action == '--excel':
to_excel(input_file, output_file)
elif action == '--project':
to_project(input_file, output_file)
elif action == '--word':
to_word(input_file, output_file)
if __name__ == '__main__':
main()
|
{"/main.py": ["/lib/freemind_to_excel.py", "/lib/freemind_to_project.py", "/lib/freemind_to_word.py"]}
|
20,659
|
ouyangwuhai/freemind_to_excel
|
refs/heads/master
|
/lib/freemind_to_excel.py
|
#!/usr/bin/env python3
def to_excel(input_file, output_file):
'''Converts the Freeemind XML file into an Excel sheet.'''
import lxml.etree as ET # handling XML
import openpyxl # handling Excel files
from tqdm import tqdm # progress bar
print('Converting to Excel')
print('Input file : ' + input_file)
print('Output file: ' + output_file)
wb = openpyxl.Workbook() # Create Excel workbook object
# Get the active sheet (Excel creates sheet 'Sheet') automatically with the
# creation of the workbook
sheet = wb.active
tree = ET.parse(input_file) # Read freemind XML file into variable
root = tree.getroot() # Get the root element of the XML file
# Get text of the root node with a XPATH ...
sheet_title = root.xpath('/map/node/@TEXT')
sheet.title = sheet_title[0] # ..and name the Excel sheet like it
base = [] # Create list to hold the cells for export to EXCEL
headings = ['Level 0']
max_levels = 0
for node in tqdm(root.iter('node')):
path = tree.getpath(node) # Get the xpath of the node
# Count number of dashed as indicator for the depth of the structure
# -2 is for the 1st 2 levels that are not needed here
nr_dashes = path.count('/') - 2
if nr_dashes > max_levels: # To generate headings for each column we
max_levels = nr_dashes # need to know the max depth of the tree
# Add a heading to the list for each level of the tree
headings.append('Level ' + str(max_levels))
# Count numbers of elements already in the list
nr_base = len(base)
# If we have less dashes than elements we now that we jumped back a level
if nr_dashes < nr_base:
# And so we reduce the list to the same number like levels
base = base[:nr_dashes]
# Append the text of the element to the list
base.append(node.get('TEXT'))
# If there are no children of type 'node' below the present node add
# data to Excel sheet
if not node.xpath('.//node'):
sheet.append(base)
# Insert an empty row on top of all rows to make room for header cells
sheet.insert_rows(1)
col = 1 # First row in Excel is 1 (not 0)
# Since it seems not possible to inser a whole row with data at the top of
# a shhet we have to iterate through the columns and write cells separately
for heading in headings:
sheet.cell(row=1, column=col).value = heading
col+=1
# Add autofilter
# We use sheet.dimensions to let openpyxl determin the number of rows & columns
sheet.auto_filter.ref = sheet.dimensions
sheet.freeze_panes = 'A2'
wb.save(output_file) # Save the workbook we have created above to disc
|
{"/main.py": ["/lib/freemind_to_excel.py", "/lib/freemind_to_project.py", "/lib/freemind_to_word.py"]}
|
20,661
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/util/show_single.py
|
import numpy as np
import matplotlib.pyplot as plt
from encoding import UYVY_RAW2RGB_PIL
with open('test.raw', 'rb') as f:
data = np.array(list(f.read()), dtype='uint8')
img = UYVY_RAW2RGB_PIL(data, 720, 576)
img.show()
#frame = data[1::2].reshape(720, 576)
#plt.imshow(frame)
#plt.show()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,662
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/argos.py
|
"""
Argos reel for LED wall.
Just some harmless eyes, looking around.
TODO: check if colours look decent on wall.
Created by kratenko.
"""
import numpy as np
import time
import random
from fluter import Fluter
iris = """
-
aaaa -
aaaaaa -
aaaooaaa -
aaooooaa -
aaooooaa -
aaaooaaa -
aaaaaa -
aaaa -
-
-
"""
ball = """
-
-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
wwwwwwwwwwwwwwww-
"""
lid1 = """
................-
.....vxxxxv.....-
...vxx xxv...-
.vxx xxv.-
vx xv-
x x-
vx xv-
.vx xv.-
..vxx xxv..-
....vxxxxxxv....-
................-
"""
lid2 = """
................-
.....vxxxxv.....-
...vxxyyyyxxv...-
.vxx xxxxxx xxv.-
vx xv-
x x-
vx xv-
.vx xv.-
..vxx xxv..-
....vxxxxxxv....-
................-
"""
lid3 = """
................-
.....vxxxxv.....-
...vxxyyyyxxv...-
.vxxyyyyyyyyxxv.-
vx xxyyyyxx xv-
x xxxx x-
vx xv-
.vx xv.-
..vxx xxv..-
....vxxxxxxv....-
................-
"""
lid4 = """
................-
.....vxxxxv.....-
...vxxyyyyxxv...-
.vxxyyyyyyyyxxv.-
vxyyyyyyyyyyyyxv-
x xxyyyyyyyxxx x-
vx xxxxxxxx xv-
.vx xv.-
..vxx xxv..-
....vxxxxxxv....-
................-
"""
lid5 = """
................-
.....vxxxxv.....-
...vxxyyyyxxv...-
.vxxyyyyyyyyxxv.-
vxyyyyyyyyyyyyxv-
xyyyyyyyyyyyyyyx-
vxyyyyyyyyyyyyxv-
.vxyyyyyyyyyyxv.-
..vxxyyyyyyxxv..-
....vxxxxxxv....-
................-
"""
lids = [lid1, lid1, lid1, lid4, lid5]
cmap = {
".": [0x00, 0x00, 0x00],
"w": [0xd8, 0xd8, 0xd8],
"X": [0x80, 0x80, 0x80],
"x": [0xea//2, 0xc0//2, 0x86//2],
"v": [0xea//6, 0xc0//6, 0x86//6],
"y": [0xea, 0xc0, 0x86],
"b": [0xff, 0xff, 0xff],
}
c2map = [
# blue
{
"a": [0x00, 0x6d, 0xcc],
"o": [0x02, 0x24, 0x3d],
},
# green
{
"a": [0x02, 0xbb, 0x39],
"o": [0x01, 0x3d, 0x09]
},
# yellow
{
"a": [0xac, 0xbc, 0x01],
"o": [0x3b, 0x3c, 0x00]
},
# red
{
"a": [0xb5, 0x51, 0x03],
"o": [0x3c, 0x16, 0x01]
}
]
def draw(a, s, trans=None, colour=0):
if trans is None:
trans = (0, 0)
h, w, d = np.shape(a)
y = -1
for line in s.split("\n"):
if not line:
continue
y += 1
if y >= h:
break
_y = y + trans[0]
if not 0 <= _y < h:
continue
for x, c in enumerate(line):
if x >= w:
break
_x = x + trans[1]
if not 0 <= _x < w:
continue
if c in cmap:
a[_y, _x] = cmap[c]
else:
if 0 <= colour < len(c2map):
if c in c2map[colour]:
a[_y, _x] = c2map[colour][c]
class Eye:
blinking = [1, 2, 3, 4, 4, 4, 4, 3, 2, 1]
def __init__(self):
self.direction = (0, 0)
self.colour = 0
self.lid_pos = 0
self.action = None
self.progress = None
self.idle_time = None
self.idle_next = None
self.start_action("idle")
def update(self):
self.progress += 1
if self.action == "close":
if self.progress <= 4:
self.lid_pos = self.progress
else:
self.start_action("closed")
elif self.action == "closed":
if self.progress > 2:
self.change_colour()
self.start_action("open")
elif self.action == "open":
if 1 <= self.progress <= 4:
self.lid_pos = 4 - self.progress
else:
self.start_action("idle")
elif self.action == "move":
self.change_direction()
self.start_action("idle")
elif self.action == "idle":
if self.progress >= self.idle_time:
self.start_action(self.idle_next)
def start_action(self, action):
self.action = action
self.progress = 0
if action == "idle":
self.idle_time = random.randint(30, 120)
if random.randint(1, 10) == 1:
self.idle_next = "close"
else:
self.idle_next = "move"
def change_direction(self):
self.direction = (random.randint(-2, 2), random.randint(-3, 3))
def change_colour(self):
if random.randint(1, 5) == 1:
self.colour = random.randint(0, len(c2map) - 1)
def draw(self):
a = np.zeros((11, 16, 3), dtype=np.uint8)
draw(a, ball)
draw(a, iris, self.direction, colour=self.colour)
draw(a, lids[self.lid_pos])
return a
fluter = Fluter()
top_off = (0, 0)
a = np.zeros((24, 16, 3), dtype=np.uint8)
top = Eye()
bot = Eye()
while True:
a[:11,:,:] = top.draw()
a[13:24,:,:] = bot.draw()
fluter.send_array(a)
top.update()
bot.update()
time.sleep(.05)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,663
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/audio_beatdetection.py
|
import datetime
import numpy as np
class AudioBeatdetection:
def __init__(self, _num_leds_h=16, _num_leds_v=24):
self.num_leds_h = _num_leds_h
self.num_leds_v = _num_leds_v
self.leds = np.zeros((_num_leds_v, _num_leds_h, 3))
self.last_measurement = datetime.datetime.now()
#TODO
def generate_frame(self, pattern=0):
# TODO trigger the measurements for beat detection
# TODO trigger some neat visualisations (based on pattern arg)
return self.leds
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,664
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/image_loader.py
|
import numpy as np
from os import listdir
from os.path import isfile, join
import random
from PIL import Image
from PIL import ImageEnhance
_FORMAT = "RGB"
class ImageLoader:
def __init__(self, _num_leds_h=16, _num_leds_v=24):
self.num_leds_h = _num_leds_h
self.num_leds_v = _num_leds_v
self.leds = np.zeros((_num_leds_v, _num_leds_h, 3)) #should be not necessary
self.black = (0, 0, 0)
self.ipath = "../images"
self.default_image_name = "black.png"
self.image_name = self.default_image_name
self.get_image_list()
self.img_leds = Image.new(_FORMAT, (self.num_leds_h, self.num_leds_v), self.black)
self.load_image(self.default_image_name)
def enhance_image(self, img):
factor = 1.5
converter = ImageEnhance.Color(img)
return converter.enhance(factor)
def get_image_list(self):
self.image_list = [f for f in listdir(self.ipath) if isfile(join(self.ipath, f))]
self.image_list.sort()
return
def load_image(self, name):
if any(name in n for n in self.image_list):
self.img_leds = Image.open(self.ipath+"/"+name).\
resize((self.num_leds_h, self.num_leds_v)).convert("RGB")
self.img_leds = self.enhance_image(self.img_leds)
self.leds = np.array(self.img_leds)
return self.leds
def load_random_image(self):
self.image_name = self.image_list[random.randint(0, len(self.image_list)-1)]
return self.load_image(self.image_name)
def load_next_image(self):
pos = self.image_list.index(self.image_name)
self.image_name = self.image_list[(pos+1)%len(self.image_list)]
return self.load_image(self.image_name)
def load_prev_image(self):
pos = self.image_list.index(self.image_name)
self.image_name = self.image_list[(len(self.image_list)+pos-1)%len(self.image_list)]
return self.load_image(self.image_name)
def load_numbered_image(self, number):
self.image_name = self.image_list[(number+len(self.image_list))%len(self.image_list)]
return self.load_image(self.image_name)
#for debug:
if __name__ == "__main__":
iload = ImageLoader()
leds = iload.load_random_image()
print(iload.ipath+"/"+iload.image_name)
print("debug -", "leds:", leds.shape)
Image.fromarray(leds).save("leds.png", "PNG")
#img.convert("RGB").save("leds.png", "PNG")
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,665
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/pixelflut/spi_brain.py
|
import logging
import base64
import pigpio
from pygame.surfarray import array3d
# GPIO pin numbers
SYNC_PIN = 24
# canvas parameters
CANVAS_WIDTH = 16
CANVAS_HEIGHT = 24
log = logging.getLogger('brain')
log.debug('lol')
ticks = 0
try:
pi = pigpio.pi()
spi = pi.spi_open(0, 500000, 0) # 243750 487500 975000 1950000
pi.set_mode(SYNC_PIN, pigpio.INPUT) # define pulldown/pullup
except:
# Possibly the gpio daemon broke or we are not running on a pi.
input('Continue?')
pi = None
spi = None
def send_canvas_over_spi(canvas):
global spi, array3d, pi
global CANVAS_WIDTH, CANVAS_HEIGHT, SYNC_PIN
global log
import numpy as np
log.debug('send_canvas_over_spi')
leds = array3d(canvas.screen).astype('uint8')
leds = leds[:CANVAS_WIDTH, :CANVAS_HEIGHT, :]
#leds = np.random.uniform(0, 1, size=(16, 24, 3)) * 255
#leds = leds.astype('uint8')
data = leds.flatten().tobytes()
# just wait, until the sync pin is set
while ((pi.read_bank_1() >> SYNC_PIN) & 1) != 1:
pass
(num, byte) = pi.spi_read(spi, 1)
pi.spi_write(spi, data)
@on('LOAD')
def load(canvas):
log.debug('load event')
return # remove if canvas should be resized as well
global CANVAS_WIDTH, CANVAS_HEIGHT
import pygame
size = CANVAS_WIDTH, CANVAS_HEIGHT
canvas.screen = pygame.display.set_mode(size, canvas.flags)
canvas.width, canvas.height = size
@on('RESIZE')
def resize(canvas):
global log
log.debug('resize event')
@on('QUIT')
def quit(canvas):
global log
log.debug('quit event')
@on('TICK')
def tick(canvas):
global log
global ticks
global send_canvas_over_spi
if ticks % 50 == 0:
print('.')
# TODO: it would be best to have this here but it blocks everything :/
send_canvas_over_spi(canvas)
ticks += 1
@on('CONNECT')
def connect(canvas, client):
global log
log.debug('connect event %s', client)
@on('DISCONNECT')
def disconnect(canvas, client):
global log
log.debug('disconnect event %s', client)
@on('COMMAND-PX')
def command_px(canvas, client, *args):
global log
global send_canvas_over_spi
log.debug('px command event %s %s', client, args)
assert len(args) == 3
x, y, c = args
c = c.lower().strip('#')
assert x.isdecimal()
assert y.isdecimal()
assert 6 <= len(c) <= 8
# pad optional alpha
c += 'f' * (8 - len(c))
x, y = int(x), int(y)
r, g, b, a = tuple(int(c[i:i+2], 16) for i in (0, 2, 4, 6))
canvas.set_pixel(x, y, r, g, b, a)
#send_canvas_over_spi(canvas)
return True
@on('COMMAND-WL')
def command_wl(canvas, client, *args):
global log, base64
global send_canvas_over_spi
log.debug("wl command event %s %d args", client, len(args))
w, h = canvas.size
raw_size = w * h * canvas.depth
b64_size = int(raw_size + raw_size/3)
assert len(args) == 1
base = args[0]
assert len(base) == b64_size
data = base64.b64decode(base)
assert len(data) == raw_size
for y in range(h):
for x in range(w):
p = (y*w + x) * 3
canvas.set_pixel(x, y, data[p], data[p+1], data[p+2], 0xff)
#send_canvas_over_spi(canvas)
return True
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,666
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/slowimg.py
|
"""
Image sending reel using PX command.
Simple reel that sends random images from a directory to the Wall.
Uses the standard pixelflut PX command to send one pixel at a time.
This is very slow and you can see the image change.
Created by kratenko.
"""
import os
import random
import time
import numpy as np
from PIL import Image
from fluter import Fluter
fluter = Fluter()
def get_random_file(path):
"""
Returns a random filename, chosen among the files of the given path.
"""
files = os.listdir(path)
index = random.randrange(0, len(files))
return os.path.join(path, files[index])
def send(img):
arr = np.array(img)
for i in range(0, img.size[0]):
for j in range(0, img.size[1]):
fluter.send_pixel((i, j), arr[j, i])
while True:
# prepare image (open, convert to rgba, resize, convert to array)
fn = get_random_file("../images")
print("sending image '{}'".format(fn))
send(Image.open(fn))
time.sleep(1)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,667
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/wator.py
|
"""
RGB Reel Wator Simulation
A reel implementing the Wator Simulation. Uses the energy based version for
the predators. When the simulation hits either final state it restarts.
TODO: configuration of parameters from outside (program args)
TODO: biased initialization
TODO: non-energy-based variant
See https://en.wikipedia.org/wiki/Wa-Tor
Created by kratenko
"""
import numpy as np
import random
import time
import math
import os
from PIL import Image
from fluter import Fluter
W, H = 16, 24
f = np.random.randint(-1, 2, size=(H, W), dtype=np.int16)
FISH_BREED = 3
FISH_ENERGY = 5
SHARK_STARVE = 2
SHARK_BREED = 10
fluter = Fluter()
img_skull = Image.open(os.path.join("img", "skull.png"))
img_fish = Image.open(os.path.join("img", "cheep-cheep-blue.png"))
def send(f):
water = [0, 0, 0]
fish = [0, 0, 0xff]
shark = [0, 0xff, 0]
d = np.zeros((H, W, 3), dtype=np.uint8)
d[f<0] = shark
d[f==0] = water
d[f>0] = fish
fluter.send_array(d)
def get_neigh(y, x):
# no diagonal:
return [((y - 1) % H, x), (y, (x + 1) % W), ((y + 1) % H, x), (y, (x - 1) % W)]
def dest_condition(f, y, x, condition):
neigh = get_neigh(y, x)
if condition == 0:
conditioned = [a for a in neigh if f[a] == 0]
elif condition == 1:
conditioned = [a for a in neigh if f[a] > 0]
else:
conditioned = []
if conditioned:
return random.choice(conditioned)
else:
return None
def move_fish(f):
moved = np.zeros((H, W), dtype=bool)
for y in range(H):
for x in range(W):
if f[y, x] > 0:
# fish
if not moved[y, x]:
dest = dest_condition(f, y, x, 0)
if dest:
val = f[y, x] + 1
if val >= FISH_BREED:
f[dest] = 1
f[y, x] = 1
else:
f[dest] = val
f[y, x] = 0
moved[dest] = True
else:
f[y, x] = min(f[y, x] + 1, FISH_BREED)
moved[y, x] = True
def move_shark(f):
moved = np.zeros((H, W), dtype=bool)
for y in range(H):
for x in range(W):
if f[y, x] < 0:
# shark
if not moved[y, x]:
dest = dest_condition(f, y, x, 1)
if dest:
# find fish
f[dest] = f[y, x] - FISH_ENERGY
if f[dest] < -SHARK_BREED:
# breed new shark
val = f[dest]
f[dest] = math.floor(val/2)
f[y, x] = math.ceil(val/2)
moved[dest] = True
moved[y, x] = True
else:
f[y, x] = 0
moved[dest] = True
elif f[y, x] <= 1:
# starved to death:
f[y, x] = 0
else:
# no fish, just move
dest = dest_condition(f, y, x, 0)
if dest:
f[dest] = f[y, x] - 1
f[y, x] = 0
moved[dest] = True
else:
f[y, x] -= 1
moved[y, x] = True
def step(f):
move_fish(f)
move_shark(f)
fields = W * H
runs = 1
steps = 0
def redo():
global f, steps, runs
steps = 0
runs += 1
f = np.random.randint(-1, 2, size=(H, W), dtype=np.int16)
while True:
fish = np.sum(f>0)
sharks = np.sum(f<0)
print("Run %d, Step %d -- Fish: %d, Sharks: %d" % (runs, steps, fish, sharks))
send(f)
# eval
if fish == 0 and sharks == 0:
fluter.send_image(img_skull)
time.sleep(2)
redo()
continue
elif fish == fields:
fluter.send_image(img_fish)
time.sleep(2)
redo()
continue
step(f)
steps += 1
time.sleep(.2)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,668
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/test_spi.py
|
import serial
import time
import datetime
import base64
import pigpio
USBPORT = '/dev/ttyACM0' #check correct port first
#USBPORT = 'COM3' #check correct port first
NUM_LEDS_H = 16 #16
NUM_LEDS_V = 24 #24
FPS = 25
WAITTIME_VSTREAM = 0.040 #40 ms
WAITTIME_ISTREAM = 1.0 #40 ms
b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
#s = serial.Serial(USBPORT, 115200) #57600 dc115200 230400
pi = pigpio.pi()
if not pi.connected:
print("could not connect spi")
exit()
#spi = pi.spi_open(0, 115200)
spi = pi.spi_open(0, 64000//2)
leds = [[0 for i in range(NUM_LEDS_V)] for j in range(NUM_LEDS_H)]
counter = 0
delaycounter = 1
delay = 1 #FPS 1 for testing
data_read = 0
print("Start sending")
cnt = 0
while True:
timestart = datetime.datetime.now()
#data_read = s.read(1)
(num_bytes, data_read) = pi.spi_read(spi, 1)
#(num_bytes, data_read) = (0,1)
#data_read = int(data_read)
# mode - video stream: 25 frames per second with 6 bit/px
if (True or data_read==b'3'):
for i in range(NUM_LEDS_H):
for j in range(NUM_LEDS_V):
#leds[i][j] = (4*(counter-i+j))%64
#leds[i][j] = (4*(counter-i+j))%64
#leds[i][j] = 256//NUM_LEDS_V*((counter+i+j)%NUM_LEDS_V)
leds[i][j] = 1
if (delaycounter%delay == 0):
counter=(counter+1)%NUM_LEDS_H
delaycounter=(delaycounter+1)%delay
data_b64 = ''.join(b64dict[m] for n in leds for m in n)
data_dec = base64.b64decode(data_b64)
#print(len(data_b64),data_b64)
#print(len(data_dec),data_dec)
#pi.spi_write(spi, data_dec+b'\n')
cnt+=1
pi.spi_write(spi, bytes('Take this: %d\n\n' % cnt, encoding="utf-8"))
print("CNT:", cnt)
#pi.spi_xfer(spi, data_dec)
#s.write(bytes([m for n in leds for m in n])) #undecoded format
#spi.flush()
timefin = datetime.datetime.now()
waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))
print("arduino_mode:",data_read,"process_t:", 0.000001*(timefin-timestart).microseconds, "wait_t:", waittime)
time.sleep(waittime)
pi.spi_close(spi)
pi.stop()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,669
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/util/capture_single.py
|
from pyv4l2.frame import Frame
from pyv4l2.control import Control
frame = Frame('/dev/video1')
frame_data = frame.get_frame()
control = Control("/dev/video1")
#control.get_controls()
#control.get_control_value(9963776)
#control.set_control_value(9963776, 8)
with open('test.raw', 'wb') as f:
f.write(frame_data)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,670
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/wireworld.py
|
""""
Wireworld Reel
Implementation for Wireworld for our RGB-Wall
See https://en.wikipedia.org/wiki/Wireworld
TODO: multiple board layouts to chose from by program parm
TODO: set board layout by program parm
Created by kratenko
"""
import numpy as np
import time
from fluter import Fluter
fluter = Fluter()
W, H = 16, 24
EMPTY = 0
WIRE = 1
HEAD = 2
TAIL = 3
b_xor = """
..............x.
....x.....x...x.
...x.o...x.o..x.
...x.+...x.+..x.
...x.x...x.x..x.
...+.x...x.x..x.
...o.x...x.x..x.
...x.x...x.x..x.
...x.x...x.o..x.
...x.x...x.+..x.
....x.....x...x.
....x.....x...x.
....x.....x...x.
....x.....x...x.
....x.xxx.x...x.
.....xx.xx....x.
......x.x.....x.
......xxx.....x.
.......x......x.
.......x......x.
.......x......x.
.......x......x.
........xxxxxx..
................
"""
def build_field(s):
f = np.zeros((H, W), dtype=np.uint8)
y = -1
for line in s.split("\n"):
line = line.strip()
if not line:
continue
y += 1
if y >= H:
break
for x, c in enumerate(line):
if x >= W:
break
t = EMPTY
if c == "x":
t = WIRE
elif c == "+":
t = HEAD
elif c == "o":
t = TAIL
f[y, x] = t
return f
def moore_neigh(pos):
y, x = pos
n = ((y - 1, x - 1), (y - 1, x), (y - 1, x + 1),
(y, x - 1), (y, x + 1),
(y + 1, x - 1), (y + 1, x), (y + 1, x + 1))
n = tuple(pos for pos in n if 0 <= pos[1] < W and 0 <= pos[0] < H)
return n
def count_neigh_heads(f, pos):
n = moore_neigh(pos)
s = 0
for p in n:
if f[p] == HEAD:
s += 1
return s
def step(f):
o = f.copy()
for y in range(0, H):
for x in range(0, W):
if f[y, x] == HEAD:
o[y, x] = TAIL
elif f[y, x] == TAIL:
o[y, x] = WIRE
elif f[y, x] == WIRE:
if 1 <= count_neigh_heads(f, (y, x)) <= 2:
o[y, x] = HEAD
return o
def send(f):
empty = [0, 0, 0]
wire = [0xff, 0xff, 0]
head = [0, 0, 0xff]
tail = [0xff, 0, 0]
d = np.zeros((H, W, 3), dtype=np.uint8)
d[f == EMPTY] = empty
d[f == WIRE] = wire
d[f == HEAD] = head
d[f == TAIL] = tail
fluter.send_array(d)
f = build_field(b_xor)
while True:
send(f)
f = step(f)
time.sleep(.2)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,671
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/lib/cropping.py
|
# QnD extraction pixels from screenshot
from PIL import Image
def extract_single_player_area(im, area=None):
if area is None:
area = (96, 40, 96+10*8, 40+20*8)
return im.crop(area)
def extract_square(im, coords):
sx, sy = coords
square = (sx*8, sy*8, (sx+1)*8, (sy+1)*8)
return im.crop(square)
def extract_colours(area):
print(area)
a = area.crop((0, 0, 10, 20)).copy()
dx = area.width / 10
dy = area.height / 20
print(dx, dy)
for y in range(20):
for x in range(10):
at = (int(x*dx + (dx/2)), int(y*dy + (dy/2)))
pix = area.getpixel(at)
a.putpixel((x, y), pix)
print(a)
return a
if __name__ == "__main__":
im = Image.open("img/Tetris (USA)-10.png")
area = extract_single_player_area(im)
extract_colours(area).resize((8*10, 8*20)).show()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,672
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/controller.py
|
import datetime
import numpy as np
import pigpio
from threading import Thread
from queue import Queue
import time
import sys
from utils_ui import Logger
import stream_pixelflut as pixelflut
#import sys
#sys.path.append("..")
#from pixelflut import pixelflut
from stream_nes import StreamNES
from image_loader import ImageLoader
from audio_beatdetection import AudioBeatdetection
DEBUG_MODE = True
exptime = datetime.datetime.now()
log_out_file = "logs/log_" + exptime.strftime("%y%m%d%H%M") + ".txt"
sys.stdout = Logger(output_file=log_out_file)
is_first_loop = True
print("rgb-tetris-wall raspi reprocessing - start -", exptime.strftime("%y%m%d%H%M"))
if DEBUG_MODE:
print("debug -", "raspberry PI preprocessing - start")
USBPORT = '/dev/ttyACM0' # check correct port first
# USBPORT = 'COM3' #check correct port first
NUM_LEDS_H = 16 # 16
NUM_LEDS_V = 24 # 24
FPS = 25
POLL_GRACE_PERIOD = 0.001 # mainly for debug.
#waittime_until_next_image = 30.0 # change the random image every 5 minutes
threshold_until_next_image = 10 # change the random image every 10th time.
#time_last_istream_change = datetime.datetime.now()
next_image_counter = threshold_until_next_image
b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
# initialise SPI
pi = pigpio.pi()
if not pi.connected:
print("could not connect SPI")
exit()
spi = pi.spi_open(0, 500000, 0) # 243750 487500 975000 1950000
# initialise pin to arduino for flagging synchronisation
SYNC_PIN = 24 # GPIO pin numbers
pi.set_mode(SYNC_PIN, pigpio.INPUT) # define pulldown/pullup
leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')
mode = 0
submode = [0 for n in range(256)]
iloader = ImageLoader(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V)
strmnes = StreamNES(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V, _ntsc=True)
abeatd = AudioBeatdetection(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V)
pixelflut_queue = Queue()
pixelflut_thread = Thread(target=pixelflut.threaded,
args=(pixelflut_queue,'pixelflut_brain.py'))
pixelflut_thread.start()
pixelflut_read = pixelflut_queue.get(timeout=5)
time.sleep(0.4) # some needed initial delay
def decodeByte2Mode(byte):
# first two bits code the mode and remaining 6 bits code the submode
return (byte >> 6) + 1, byte & ~(3 << 6)
def read_mode_SPI():
(num, byte) = pi.spi_read(spi, 1)
if num == 1:
mode, submode = decodeByte2Mode(byte[0])
if DEBUG_MODE:
print("debug -", "read mode", "received_data:", num, byte[0], "received_mode:", mode, "received_submode:", submode)
return (mode, submode)
def send_SPI(data):
if DEBUG_MODE:
print("debug -", "sending bytes:", len(data))
pi.spi_write(spi, data)
while True:
try:
timestart = datetime.datetime.now()
if DEBUG_MODE:
timeproc = timesend = timestart
if DEBUG_MODE:
print("debug -", "waiting for SPI", "pi.read_bank_1:", pi.read_bank_1())
while ((pi.read_bank_1() >> SYNC_PIN) & 1) != 1:
pass # just wait, until the sync pin is set
if DEBUG_MODE:
print("debug - got SPI")
if ((pi.read_bank_1() >> SYNC_PIN) & 1) == 1:
#(new_mode, new_submode) = request_mode_SPI()
#time.sleep(0.001)
(new_mode, new_submode) = read_mode_SPI()
is_modes_changed = True
if mode == new_mode and submode[mode] == new_submode:
is_modes_changed = False
if DEBUG_MODE:
print("debug -", "change:", is_modes_changed, "new_mode:", new_mode, "new_submode:", new_submode, "prev_mode:", mode, "prev_submode:", submode[mode])
else:
if (is_first_loop): #just for logging
is_first_loop = False
print("first read mode byte from arduino -", "new_mode:", new_mode, "new_submode:", new_submode)
mode = new_mode
submode[mode] = new_submode
if (mode == 4): # mode for stream of beat-patterns
if DEBUG_MODE:
timeproc = datetime.datetime.now()
#TODO calculate LEDS
leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')
if DEBUG_MODE:
timesend = datetime.datetime.now()
data_enc = leds.transpose(1, 0, 2).flatten().tobytes()
send_SPI(data_enc)
elif (mode == 3): #mode for stream from NES/video
""" in the NES mode the new frame needs to get determined
WHILE the arduino is writing the old frame to the leds
in order to parallelise these two expensive computations
and meet the speed requirements of max 40ms per frame """
if DEBUG_MODE:
timesend = datetime.datetime.now()
if not is_modes_changed:
# last turn the mode was the same, thus the calculated frame should be valid
data_enc = leds.transpose(1, 0, 2).flatten().tobytes()
#decode pixels from 24-bit into 6-bit (64-colour palette)
#data_b64 = ''.join(b64dict[m] for n in leds for m in n)
#data_dec = base64.b64decode(data_b64)
#print("debug -", "len(data_b64):", len(data_b64), "data_b64:", data_b64)
#print("debug -", "len(data_dec):", len(data_dec), "data_dec:", data_dec)
send_SPI(data_enc)
# calculate new frame:
if DEBUG_MODE:
timeproc = datetime.datetime.now()
#TODO: needs debugging!
# if is_modes_changed:
# if submode[1] == 1:
# strmnes = StreamNES(_ntsc=False)
# else:
# strmnes = StreamNES(_ntsc=True)
leds = strmnes.read_frame()
if DEBUG_MODE:
print("debug -", "leds:", leds.shape)
elif (mode == 2): #mode for pixelflut
""" TODO documentation """
if DEBUG_MODE:
timesend = datetime.datetime.now()
if not is_modes_changed:
data_enc = leds.flatten().tobytes()
#data_enc = leds.transpose(1, 0, 2).flatten().tobytes()
send_SPI(data_enc)
if DEBUG_MODE:
timeproc = datetime.datetime.now()
leds = pixelflut_read()
elif (mode == 1): # mode for stream of images
if DEBUG_MODE:
timeproc = datetime.datetime.now()
now = datetime.datetime.now()
#if is_modes_changed or ((now - time_last_istream_change).seconds + (now - time_last_istream_change).microseconds*0.000001 > waittime_until_next_image):
if is_modes_changed or (next_image_counter >= (threshold_until_next_image - 1)):
"""
if DEBUG_MODE:
print("debug -", "new image:", submode[1],
"last_image_t:", "{0:.2f}".format(round((now - time_last_istream_change).seconds * 1000 + (now - time_last_istream_change).microseconds / 1000, 2)),
"wait_next_image_t:", "{0:.2f}".format(
round(waittime_until_next_image * 1000, 2)),
"(ms)")
"""
if DEBUG_MODE:
print("debug -", "new image:", submode[1],
"counter:", next_image_counter)
if submode[1] == 0:
leds = iloader.load_random_image()
else:
leds = iloader.load_numbered_image(submode[1])
#time_last_istream_change = datetime.datetime.now()
next_image_counter = 0
else:
next_image_counter += 1
if DEBUG_MODE:
print("debug -", "leds:", leds.shape)
if DEBUG_MODE:
timesend = datetime.datetime.now()
data_enc = leds.transpose(1, 0, 2).flatten().tobytes()
send_SPI(data_enc)
else: #mode == 0 # no stream
if DEBUG_MODE:
timeproc = datetime.datetime.now()
if DEBUG_MODE:
print("debug -", "Nothing to see here")
pass
leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')
if DEBUG_MODE:
timesend = datetime.datetime.now()
data_enc = leds.transpose(1, 0, 2).flatten().tobytes()
send_SPI(data_enc)
timefin = datetime.datetime.now()
waittime = max(0.0, (POLL_GRACE_PERIOD) - ((timefin - timestart).microseconds*0.000001 + (timefin - timestart).seconds))
if DEBUG_MODE:
if timeproc > timesend:
handshake_delta_t = (timesend - timestart).microseconds/1000 + (timesend - timestart).seconds*1000
send_delta_t = (timeproc - timesend).microseconds/1000 + (timeproc - timesend).seconds*1000
proc_delta_t = (timefin - timeproc).microseconds/1000 + (timefin - timeproc).seconds*1000
else:
handshake_delta_t = (timeproc - timestart).microseconds/1000 + (timeproc - timestart).seconds*1000
proc_delta_t = (timesend - timeproc).microseconds/1000 + (timesend - timeproc).seconds*1000
send_delta_t = (timefin - timesend).microseconds/1000 + (timefin - timesend).seconds*1000
print("debug -", "arduino mode:", mode, "submode:", submode[mode],
"handshake_t:", "{0:.2f}".format(round(handshake_delta_t,2)),
"process_t:", "{0:.2f}".format(round(proc_delta_t,2)),
"send_t:", "{0:.2f}".format(round(send_delta_t,2)),
"wait_t:", "{0:.2f}".format(round(waittime*1000,2)),
"(ms)")
time.sleep(waittime)
except KeyboardInterrupt:
break
if DEBUG_MODE:
print("debug -", "raspberry PI preprocessing - closing")
time.sleep(0.2)
pi.spi_close(spi)
time.sleep(0.2)
pi.stop()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,673
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/nes_tetris.py
|
from PIL import Image
from PIL import ImageFilter
from PIL import ImageEnhance
_FORMAT = "RGB" #HSV
_COLOR_ENHANCE_FACTOR = 3.0
class NesTetris:
#def __init__(self, _num_leds_h=16, _num_leds_v=24, _gray=(95, 7, 128)): #RGB
def __init__(self, _num_leds_h=16, _num_leds_v=24, _gray=(116, 116, 108)): #HSV
self.num_h = _num_leds_h
self.num_v = _num_leds_v
self.gray = _gray
self.black = (0, 0, 0)
self.hsv_pixel = Image.new("HSV", (1, 1), 0) # for the score rainbow
self.img_leds = Image.new(_FORMAT, (_num_leds_h, _num_leds_v), 0)
#print("debug -", "leds_init:", np.array(self.img_leds, dtype=np.uint8).shape)
# frame play area
for y in range(2, 24):
self.img_leds.putpixel((0, y), _gray)
self.img_leds.putpixel((11, y), _gray)
for x in range(0, 12):
self.img_leds.putpixel((x, 2), _gray)
self.img_leds.putpixel((x, 23), _gray)
# frame next block area
for x in range(12, 16):
self.img_leds.putpixel((x, 10), _gray)
self.img_leds.putpixel((x, 15), _gray)
# score/lines/level areas
for x in range(12, 16):
self.img_leds.putpixel((x, 2), _gray)
self.img_leds.putpixel((x, 23), _gray)
return
def reset_areas(self):
# play area
for y in range(3, 23):
for x in range(1, 11):
self.img_leds.putpixel((x, y), self.black)
# next block area
for y in range(11, 15):
for x in range(12, 16):
self.img_leds.putpixel((x, y), self.black)
# lines areas
for y in range(3, 10):
for x in range(12, 16):
self.img_leds.putpixel((x, y), self.black)
# level areas
for y in range(16, 23):
for x in range(12, 16):
self.img_leds.putpixel((x, y), self.black)
return
def enhance_image(self, img):
converter = ImageEnhance.Color(img)
return converter.enhance(_COLOR_ENHANCE_FACTOR)
def is_pix_white(self, pix):
if (pix[0] >= 128) and (pix[1] >= 128) and (pix[2] >= 128): #RGB
#if (pix[2]) >= 128: #HSV
return True
return False
def is_pix_black(self, pix):
if (pix[0] < 48) and (pix[1] < 48) and (pix[2] < 48): #RGB
#if (pix[2]) < 48: #HSV
return True
return False
def get_number(self, img):
#img.convert("RGB").save("debug2.png", "PNG")
number = 0
#read
if not self.is_pix_white(img.getpixel((12, 3))):
if self.is_pix_white(img.getpixel((2, 2))):
if self.is_pix_white(img.getpixel((17, 2))):
number = 7
else:
number = 5
elif self.is_pix_white(img.getpixel((2, 9))):
if self.is_pix_white(img.getpixel((17, 6))):
number = 8
else:
number = 6
else:
if self.is_pix_white(img.getpixel((17, 14))):
number = 2
else:
number = 9
else:
if self.is_pix_white(img.getpixel((12, 12))):
if self.is_pix_white(img.getpixel((17, 14))):
number = 1
else:
number = 4
else:
if self.is_pix_white(img.getpixel((17, 2))):
number = 3
else:
number = 0
#print("debug number:", str(number))
return number
def test_pixel(self, img, x, y, is_white=True):
pix = img.getpixel((x, y))
if is_white:
return self.is_pix_white(pix)
else:
return self.is_pix_black(pix)
# def get_enhanced_pixel(self, img, x, y):
# img_pix = self.enhance_image(img.crop((x, y, x+1, y+1)))
# return img_pix.getpixel((0, 0))
def test_tetris_runnig(self, img):
# if not self.test_pixel(img, 54, 59, is_white=False):
# return False
# if not self.test_pixel(img, 197, 142, is_white=False):
# return False
# if not self.test_pixel(img, 484, 350, is_white=False):
# return False
# if not self.test_pixel(img, 536, 101, is_white=False):
# return False
# if not self.test_pixel(img, 546, 321, is_white=True):
# return False
# if not self.test_pixel(img, 370, 53, is_white=True):
# return False
# if not self.test_pixel(img, 67, 154, is_white=True):
# return False
# if not self.test_pixel(img, 109, 387, is_white=True):
# return False
if not self.test_pixel(img, 54, 59, is_white=False):
return False
if not self.test_pixel(img, 197, 142, is_white=False):
return False
if not self.test_pixel(img, 484, 350, is_white=False):
return False
if not self.test_pixel(img, 536, 101, is_white=False):
return False
if not self.test_pixel(img, 567, 330, is_white=True):
return False
if not self.test_pixel(img, 370, 54, is_white=True):
return False
if not self.test_pixel(img, 67, 144, is_white=True):
return False
if not self.test_pixel(img, 109, 387, is_white=True):
return False
# if not self.is_pix_black(img.getpixel((54, 59))):
# return False
# if not self.is_pix_black(img.getpixel((197, 142))):
# return False
# if not self.is_pix_black(img.getpixel((484, 350))):
# return False
# if not self.is_pix_black(img.getpixel((536, 101))):
# return False
# if not self.is_pix_white(img.getpixel((546, 321))):
# return False
# if not self.is_pix_white(img.getpixel((370, 53))):
# return False
# if not self.is_pix_white(img.getpixel((67, 154))):
# return False
# if not self.is_pix_white(img.getpixel((109, 387))):
# return False
return True
def extract_game_area(self, im, area=None, ntsc=True):
if ntsc:
if area is None:
area = (43, 0, 43 + 642, 0 + 478)
else:
if area is None:
area = (41, 42, 41 + 642, 42 + 478)
return im.crop(area)
def extract_colours(self, img):
#img.convert("RGB").save("debug.png", "PNG")
#img = self.enhance_image(img)
for y in range(20):
for x in range(10):
at = (1 + x * 20 + 10, 1 + y * 16 + 9)
if not self.is_pix_black(img.getpixel(at)):
pix = img.getpixel(at)
else:
pix = self.black
self.img_leds.putpixel((1 + x, 3 + y), pix)
return
def extract_next_block(self, img):
#img.convert("RGB").save("debug.png", "PNG")
#img = self.enhance_image(img)
#read
if not self.is_pix_black(img.getpixel((5, 18))):
next_block = 6
next_block_col = img.getpixel((5, 18))
elif not self.is_pix_black(img.getpixel((15, 9))):
if not self.is_pix_black(img.getpixel((35, 26))):
if not self.is_pix_black(img.getpixel((55, 9))):
next_block = 0
else:
next_block = 2
else:
if not self.is_pix_black(img.getpixel((15, 26))):
next_block = 5
else:
next_block = 1
next_block_col = img.getpixel((15, 9))
else:
if not self.is_pix_black(img.getpixel((62, 10))):
next_block = 4
next_block_col = img.getpixel((62, 10))
else:
next_block = 3
next_block_col = img.getpixel((50, 9))
#write
for x in range(0, 4):
self.img_leds.putpixel((12+x, 12), self.black)
self.img_leds.putpixel((12+x, 13), self.black)
if next_block == 0:
for x in range(0, 3):
self.img_leds.putpixel((12+x, 12), next_block_col)
self.img_leds.putpixel((13, 13), next_block_col)
elif next_block == 1:
for x in range(0, 3):
self.img_leds.putpixel((12+x, 12), next_block_col)
self.img_leds.putpixel((14, 13), next_block_col)
elif next_block == 2:
for x in range(0, 2):
self.img_leds.putpixel((12+x, 12), next_block_col)
self.img_leds.putpixel((13+x, 13), next_block_col)
elif next_block == 3:
for x in range(0, 2):
self.img_leds.putpixel((13+x, 12), next_block_col)
self.img_leds.putpixel((13+x, 13), next_block_col)
elif next_block == 4:
for x in range(0, 2):
self.img_leds.putpixel((13+x, 12), next_block_col)
self.img_leds.putpixel((12+x, 13), next_block_col)
elif next_block == 5:
for x in range(0, 3):
self.img_leds.putpixel((12+x, 12), next_block_col)
self.img_leds.putpixel((12, 13), next_block_col)
else: #next_block == 6:
for x in range(0, 4):
self.img_leds.putpixel((12+x, 12), next_block_col)
def extract_score(self, img):
#img.convert("RGB").save("debug.png", "PNG")
#read
score = 0 \
+ 100000 * self.get_number(img.crop((1, 0, 1 + 20, 16))) \
+ 10000 * self.get_number(img.crop((21, 0, 21 + 20, 16))) \
+ 1000 * self.get_number(img.crop((41, 0, 41 + 20, 16))) \
+ 100 * self.get_number(img.crop((62, 0, 62 + 20, 16))) \
+ 10 * self.get_number(img.crop((82, 0, 82 + 20, 16))) \
+ self.get_number(img.crop((102, 0, 102 + 20, 16)))
#write
for i in range(max(int(score/10000), 0), 32):
self.img_leds.putpixel((0 + int(i/2), 0 + i%2), self.black)
for i in range(min(int(score/10000), 32)):
self.hsv_pixel.putpixel((0, 0), (max(186-i*6, 0), 255, 128))
self.img_leds.putpixel((0 + int(i/2), 0 + i%2), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))
#"self.img_leds.putpixel((0 + int(i/2), 0 + i%2), (max(186-i*6, 0), 255, 128))
#print("debug score", score)
def extract_level(self, img):
#img.convert("RGB").save("debug.png", "PNG")
#read
level = 0 \
+ 10 * self.get_number(img.crop((0, 0, 20, 16))) \
+ self.get_number(img.crop((20, 0, 40, 16)))
#write
for i in range(max(level, 0), 28):
self.img_leds.putpixel((12 + int(i/7), 16 + i%7), self.black)
for i in range(min(level+1,28)):
self.hsv_pixel.putpixel((0, 0), (max(180-i*6, 0), 255, 128))
self.img_leds.putpixel((12 + int(i/7), 16 + i%7), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))
#self.img_leds.putpixel((12 + int(i/7), 16 + i%7), (max(180-i*6, 0), 255, 128))
#print("debug level", level)
def extract_lines(self, img):
#img.convert("RGB").save("debug.png", "PNG")
#read
lines = 0 \
+ 100 * self.get_number(img.crop((1, 0, 20, 16))) \
+ 10 * self.get_number(img.crop((21, 0, 41, 16))) \
+ self.get_number(img.crop((41, 0, 61, 16)))
#write
for i in range(max(int(lines/10), 0), 28):
self.img_leds.putpixel((12 + int(i/7), 3 + i%7), self.black)
for i in range(min(int(lines/10)+1, 28)):
self.hsv_pixel.putpixel((0, 0), (max(180-i*6, 0), 255, 128))
self.img_leds.putpixel((12 + int(i/7), 3 + i%7), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))
#self.img_leds.putpixel((12 + int(i/7), 3 + i%7), (max(180-i*6, 0), 255, 128))
#print("debug lines", lines)
def transform_frame(self, img):
# check if game is running
if not self.test_tetris_runnig(img):
self.reset_areas()
return self.img_leds
# play area
#self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)).convert("HSV").filter(ImageFilter.SMOOTH))
self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)).filter(ImageFilter.SMOOTH))
#self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)))
# next block
self.extract_next_block(img.crop((482, 237, 482 + 81, 237 + 33)).filter(ImageFilter.SMOOTH))
#self.extract_next_block(img.crop((482, 237, 482 + 81, 237 + 33)))
# number of lines
self.extract_lines(img.crop((380, 45, 380 + 61, 45 + 16)).filter(ImageFilter.SMOOTH))
#self.extract_lines(img.crop((380, 45, 380 + 61, 45 + 16)))
# score
self.extract_score(img.crop((481, 125, 481 + 122, 125 + 16)).filter(ImageFilter.SMOOTH))
#self.extract_score(img.crop((481, 125, 481 + 122, 125 + 16)))
# number of level
self.extract_level(img.crop((522, 333, 522 + 40, 333 + 16)).filter(ImageFilter.SMOOTH))
#self.extract_level(img.crop((522, 333, 522 + 40, 333 + 16)))
#return self.img_leds
return self.enhance_image(self.img_leds)
#for debug
import numpy as np
import time
import datetime
if __name__ == "__main__":
im = Image.open("nes_cut.png").convert(_FORMAT)
gray = im.getpixel((6,6))
print("debug gray", gray)
game = NesTetris(_gray=gray)
for n in range(5):
timestart = datetime.datetime.now()
leds = game.transform_frame(im).convert("RGB")
timefin = datetime.datetime.now()
print("leds", np.array(leds, dtype=np.uint8).shape, "transform_t: {ptime} in ms".format(ptime=(timefin-timestart).microseconds / 1000))
leds.save("leds.png", "PNG")
im.convert("RGB").save("debug1.png", "PNG")
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,674
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/pixelflut/canvas_brain.py
|
import logging
log = logging.getLogger('brain')
log.debug('lol')
ticks = 0
@on('RESIZE')
def resize(canvas):
global log
log.debug('resize event')
@on('QUIT')
def quit(canvas):
global log
log.debug('quit event')
@on('TICK')
def tick(canvas):
global log
global ticks
if ticks % 50 == 0:
print('.')
ticks += 1
@on('CONNECT')
def connect(canvas, client):
global log
log.debug('connect event %s', client)
@on('DISCONNECT')
def disconnect(canvas, client):
global log
log.debug('disconnect event %s', client)
@on('COMMAND-PX')
def command_px(canvas, client, *args):
global log
log.debug('px command event %s %s', client, args)
assert len(args) == 3
x, y, c = args
c = c.lower().strip('#')
assert x.isdecimal()
assert y.isdecimal()
assert 6 <= len(c) <= 8
# pad optional alpha
c += 'f' * (8 - len(c))
x, y = int(x), int(y)
r, g, b, a = tuple(int(c[i:i+2], 16) for i in (0, 2, 4, 6))
canvas.set_pixel(x, y, r, g, b, a)
return True
@on('COMMAND-WL')
def command_wl(canvas, client, *args):
import base64
global log
log.debug("wl command event %s %d args", client, len(args))
w, h = canvas.size
raw_size = w * h * canvas.depth
b64_size = int(raw_size + raw_size/3)
assert len(args) == 1
base = args[0]
assert len(base) == b64_size
data = base64.b64decode(base)
assert len(data) == w * h * canvas.depth
for y in range(h):
for x in range(w):
p = (y*w + x) * 3
canvas.set_pixel(x, y, data[p], data[p+1], data[p+2], 0xff)
return True
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,675
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/send_serial.py
|
import serial
import time
import datetime
import base64
import os
USBPORT = os.environ.get("USBPORT", '/dev/ttyACM0') #check correct port first
#USBPORT = '/dev/ttyAMA0' #check correct port first
#USBPORT = 'COM3' #check correct port first
NUM_LEDS_H = 16 #16
NUM_LEDS_V = 24 #24
FPS = 25
WAITTIME_VSTREAM = float(os.environ.get('WAITTIME_VSTREAM', '0.040')) #40 ms
WAITTIME_ISTREAM = 1.0 #40 ms
b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
s = serial.Serial(USBPORT, 115200*3) #57600 dc115200 230400
time.sleep(2)
leds = [[0 for i in range(NUM_LEDS_V)] for j in range(NUM_LEDS_H)]
counter = 0
delaycounter = 1
delay = 1 #FPS 1 for testing
data_read = 0
print("Start sending")
while True:
timestart = datetime.datetime.now()
data_read = int(s.read(1))
# mode - video stream: 25 frames per second with 6 bit/px
if (data_read==3):
data_prep_start = datetime.datetime.now()
for i in range(NUM_LEDS_H):
for j in range(NUM_LEDS_V):
leds[i][j] = (4*(counter-i+j))%64
#leds[i][j] = 256//NUM_LEDS_V*((counter+i+j)%NUM_LEDS_V)
#leds[i][j] = 63
if (delaycounter%delay == 0):
counter=(counter+1)%NUM_LEDS_H
delaycounter=(delaycounter+1)%delay
data_b64 = ''.join(b64dict[m] for n in leds for m in n)
data_dec = base64.b64decode(data_b64)
#print(len(data_b64),data_b64)
#print(len(data_dec),data_dec)
data_send_start = datetime.datetime.now()
s.write(data_dec)
#s.write(bytes([m for n in leds for m in n])) #undecoded format
s.flush()
timefin = datetime.datetime.now()
waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))
print("arduino_mode:",data_read,"prep_t:", 0.000001*(data_send_start-data_prep_start).microseconds, "write_t:", 0.000001*(timefin-data_send_start).microseconds, "process_t:", 0.000001*(timefin-timestart).microseconds, "wait_t:", waittime)
time.sleep(waittime)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,676
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/gol.py
|
"""
RGB-Reel Conway's Game of Life
Simple implementation of Conway's Game of Life as reel for your wall. Runs in
a torus world and get's initialized randomly.
See https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life
created by kratenko
"""
import numpy as np
import time
from fluter import Fluter
fluter = Fluter()
w = 16
h = 24
glider = [[1, 0, 0],
[0, 1, 1],
[1, 1, 0]]
f = np.random.randint(2, size=(h, w), dtype=np.uint8)
#f = np.zeros((h, w), dtype=np.uint8)
#f[:3, :3] = glider
def neigh(f, pos):
x, y = pos
xa = (x - 1) % w
xb = x
xc = (x + 1) % w
ya = (y - 1) % h
yb = y
yc = (y + 1) % h
n = 0
n += f[ya, xa] + f[ya, xb] + f[ya, xc]
n += f[yb, xa] + 0 + f[yb, xc]
n += f[yc, xa] + f[yc, xb] + f[yc, xc]
return n
def next_gen(fin):
fout = fin.copy()
for y in range(h):
for x in range(w):
n = neigh(fin, (x, y))
if fin[y, x]:
fout[y, x] = 1 if 1 < n < 4 else 0
else:
fout[y, x] = 1 if n == 3 else 0
return fout
def to_raw(fin):
dead = b"\x00\x00\x80"
live = b"\x00\xff\x00"
d = []
for y in range(h):
for x in range(w):
d += [live] if f[y, x] else [dead]
return b"".join(d)
while True:
fluter.send_raw(to_raw(f))
f = next_gen(f)
time.sleep(.2)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,677
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/stream.py
|
import os
import argparse
import numpy as np
from time import sleep
from time import time
from pyv4l2.frame import Frame
from pyv4l2.control import Control
from lib.encoding import UYVY_RAW2RGB_PIL
from lib.visualization import send_visdom
from lib.cropping import extract_single_player_area
from lib.cropping import extract_colours
import pigpio
parser = argparse.ArgumentParser()
parser.add_argument('--width', type=int, default=720)
parser.add_argument('--height', type=int, default=576)
parser.add_argument('--scale', type=float, default=1.)
parser.add_argument('--visdom-server', type=str, default='http://localhost')
parser.add_argument('--visdom', action='store_true')
parser.add_argument('--device', type=str, default='/dev/video0')
args = parser.parse_args()
w = int(args.width // args.scale)
h = int(args.height // args.scale)
os.system('v4l2-ctl -d {device} --set-fmt-video width={w},height={h}'.format(
device=args.device, w=w, h=h))
frame = Frame(args.device)
if args.visdom:
import visdom
vis = visdom.Visdom(server=args.visdom_server)
pi = pigpio.pi()
if not pi.connected:
print("could not connect spi")
exit()
spi = pi.spi_open(0, 750000, 0)
SYNC_PIN = 24
pi.set_mode(SYNC_PIN, pigpio.INPUT)
NUM_LEDS_H = 16
NUM_LEDS_V = 24
leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')
def send_spi(colours):
colours = colours.convert('HSV')
print(colours)
for x in range(colours.width):
for y in range(colours.height):
px = colours.getpixel((x, y))
leds[x, colours.height - y - 1] = px
data_dec = leds.transpose(1, 0, 2).flatten().tobytes()
# wait for spi
print("waiting for spi")
wait = True
print("debug -", "pi.read_bank_1:", pi.read_bank_1())
while wait:
v = (pi.read_bank_1() >> SYNC_PIN) & 1
if v == 1:
wait = False
print("debug -", "pi.read_bank_1:", pi.read_bank_1())
pi.spi_write(spi, data_dec)
def send_pi(img):
x_tl = 270/720 * img.width
y_tl = 152/576 * img.height
x_br = 475/720 * img.width
y_br = 474/576 * img.height
rect = (x_tl, y_tl, x_br, y_br)
area = extract_single_player_area(img, rect)
if args.visdom:
send_visdom(vis, area.convert('RGBA'), win='crop img')
colours = extract_colours(area)
send_spi(colours)
if args.visdom:
send_visdom(vis, colours.convert('RGBA'), win='crop color img')
while True:
time_in = time()
frame_data = frame.get_frame()
data = np.array(list(frame_data), dtype='uint8')
time_cap = time()
img = UYVY_RAW2RGB_PIL(data, w, h)
#import pdb; pdb.set_trace()
if args.visdom:
send_visdom(vis, img.convert('RGBA'), win='cap img')
send_pi(img)
time_out = time()
print('sent image, time image: {}, time cap: {}'.format(
(time_out - time_in), (time_cap - time_in)))
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,678
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/fluter/__init__.py
|
"""
Module for sending to deep pixelflut server.
Simple functions to send to a pixelflut server from a python script.
Handles connecting to the server on its own (but does never disconnect).
In addition to setting individual pixels it supports a command "WL" to
send a complete picture to our LED-Tetris-Wall.
To specify host/port of target server, set environment variable
PIXELFLUT_HOST to "hostname" or to "hostname:port".
Created by deep cyber -- the deepest of all cybers.
"""
import socket
import base64
import numpy as np
import os
class Fluter:
DEFAULT_HOST = None
def __init__(self, host=DEFAULT_HOST, width=16, height=24, depth=3):
self.host, self.port = self._parse_host(host)
self.width = width
self.height = height
self.depth = depth
self.socket = None
def _parse_host(self, host):
if host is Fluter.DEFAULT_HOST:
host = os.environ.get("PIXELFLUT_HOST", "localhost:1234")
parts = host.split(":")
if len(parts) == 2:
return parts[0], int(parts[1])
else:
return parts[0], 1234
def _connect(self):
# TODO: add a reconnect mechanic
if not self.socket:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
def send_pixel(self, pos, colour):
"""
Set a single pixel on pixelflut server. Connects on demand.
:param pos: (x, y) -- position of pixel to set
:param colour: (r, g, b) or (r, g, b, a) -- colour to set pixel to
:return:
"""
assert len(pos) == 2
assert 3 <= len(colour) <= 4
self._connect()
args = tuple(pos) + tuple(colour)
if len(colour) == 3:
self.socket.send(b"PX %d %d %02x%02x%02x\n" % args)
else:
self.socket.send(b"PX %d %d %02x%02x%02x%02x\n" % args)
def send_raw(self, data):
"""
Send 16x24 raw image data (RGB, uint8) to server.
:param data:
:return:
"""
assert len(data) == self.width * self.height * self.depth
self._connect()
encoded = base64.b64encode(data)
self.socket.send(b"WL " + encoded + b"\n")
def send_image(self, image):
"""
Send image to server (scales to 16x24).
:param image:
:return:
"""
image = image.resize((self.width, self.height)).convert("RGB")
self.send_raw(image.tobytes())
def send_array(self, arr):
"""
Send array data to server. Must have 16*24*3 uint8 values.
:param arr:
:return:
"""
flat = np.array(arr).flatten()
self.send_raw(flat)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,679
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/lib/encoding.py
|
from PIL import Image
def UYVY_RAW2RGB_PIL(data, w, h):
y=Image.frombytes('L',(w,h),data[1::2].copy())
u=Image.frombytes('L',(w,h),data[0::4].reshape(w//2,h).copy().repeat(2, 0))
v=Image.frombytes('L',(w,h),data[2::4].reshape(w//2,h).copy().repeat(2, 0))
return Image.merge('YCbCr',(y,u,v))
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,680
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/stream_nes.py
|
import os
import base64
import numpy as np
#from pyv4l2.frame import Frame
from PyV4L2Camera.camera import Camera
from PIL import Image
from PIL import ImageFilter
from nes_tetris import NesTetris
""" Computational costs on raspi:
- grab the frame: 13-17 ms
- convert frame to RGB PIL img: 5 - 6 ms
- cut game area: 2-3 ms
- calculate led pixels from cutted rgb img (including smooth filters): 19 - 27 ms
overall costs: 40-52 ms
"""
class StreamNES:
# -s, --set - standard = < num >
# pal or pal - X(X=B / G / H / N / Nc / I / D / K / M / 60)(V4L2_STD_PAL)
# ntsc or ntsc - X(X=M / J / K)(V4L2_STD_NTSC)
# secam or secam - X(X=B / G / H / D / K / L / Lc)(V4L2_STD_SECAM)
def __init__(self, _num_leds_h=16, _num_leds_v=24, _ntsc=True, feedback=False):
self.num_leds_h = _num_leds_h
self.num_leds_v = _num_leds_v
self.ntsc = _ntsc
self.leds = np.zeros((_num_leds_v, _num_leds_h, 3)) #should be not necessary
self.b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
if self.ntsc:
self.mode = 'NTSC'
self.fps = 30
self.width = 720
self.height = 480
else:
self.mode = 'PAL-B'
self.fps = 25
self.width = 720
self.height = 576
self.format = 'UYVY'
self.b = 3 # 3 2
#self.color = '' #''smpte170'
if (feedback):
fb = 'verbose'
else:
fb = 'silent'
self.scale = 1.
self.device = '/dev/video0'
self.w = int(self.width // self.scale)
self.h = int(self.height // self.scale)
self.game = NesTetris(_num_leds_h=_num_leds_h, _num_leds_v=_num_leds_v)
#-p 25
os.system(
'v4l2-ctl -d {device} -s {m} --set-fmt-video width={w},height={h},pixelformat={pf} --{fb}'.format(
#'v4l2-ctl -d {device} -p {fps} -s {m} --set-fmt-video width={w},height={h},pixelformat={pf} --{fb}'.format(
device=self.device, fps=self.fps, m=self.mode, w=self.w, h=self.h, pf=self.format, fb=fb))
#self.frame = Frame(self.device)
self.frame = Camera(self.device)
def Frame_UYVY2YCbCr_PIL(self, w, h, frame_data):
data = np.fromstring(frame_data, dtype='uint8')
y = Image.frombytes('L', (w, h), data[1::2].copy())
u = Image.frombytes('L', (w, h), data[0::4].copy().repeat(2, 0))
v = Image.frombytes('L', (w, h), data[2::4].copy().repeat(2, 0))
return Image.merge('YCbCr', (y, u, v))
def read_frame_dec(self):
self.leds = self.read_frame()
#TODO convert to 64 color palette, thus the remainder does not work
data_b64 = ''.join(self.b64dict[m] for n in self.leds for m in n)
data_dec = base64.b64decode(data_b64)
return data_dec
def read_frame(self):
#get a frame from the device
#frame_data = self.frame.get_frame()
while True:
frame_data = self.frame.get_frame()
if len(frame_data) == self.w * self.h * self.b:
break
#img = self.Frame_UYVY2YCbCr_PIL(self.w, self.h, frame_data)
img = Image.frombytes('RGB', (self.w, self.h), frame_data, 'raw', 'RGB')
#cut the frame to game size (depending on game) ane transform it for the leds
#img_game = self.game.extract_game_area(img).filter(ImageFilter.SMOOTH).convert("HSV")
img_game = self.game.extract_game_area(img, ntsc=self.ntsc)
img_leds = self.game.transform_frame(img_game)
#img to array conversion
self.leds = np.array(img_leds)
#debug:
#self.leds = img_leds
#img_game.convert("RGB").save("nes_cut.png", "PNG")
#img_leds.convert("RGB").save("leds.png", "PNG")
return self.leds
# for debug:
def read_frame0(self):
frame_data = self.frame.get_frame()
return frame_data
def read_frame1(self):
#frame_data = self.frame.get_frame()
while True:
frame_data = self.frame.get_frame()
if len(frame_data) == self.w * self.h * self.b:
break
else:
print("debug - ", "frame not correct", "frame_data_len:",
len(frame_data))
return frame_data
def read_frame2(self, frame_data):
#img = self.Frame_UYVY2YCbCr_PIL(self.w, self.h, frame_data)
img = Image.frombytes('RGB', (self.w, self.h), frame_data, 'raw', 'RGB')
return img
def read_frame3(self, img):
#img_game = self.game.extract_game_area(img).filter(ImageFilter.SMOOTH).convert("HSV")
img_game = self.game.extract_game_area(img, ntsc=self.ntsc)
return img_game
def read_frame4(self, img_game):
img_leds = self.game.transform_frame(img_game)
self.leds = img_leds
return self.leds
# end for debug
#for debug
import time
import datetime
#import visdom
from six import BytesIO
import base64 as b64
def send_visdom(vis, im, win=None, env=None, opts=None):
opts = {} if opts is None else opts
opts['height'] = opts.get('height', im.height)
opts['width'] = opts.get('width', im.width)
buf = BytesIO()
im.save(buf, format='PNG')
b64encoded = b64.b64encode(buf.getvalue()).decode('utf-8')
data = [{
'content': {
'src': 'data:image/png;base64,' + b64encoded,
'caption': opts.get('caption'),
},
'type': 'image',
}]
return vis._send({
'data': data,
'win': win,
'eid': env,
'opts': opts,
})
if __name__ == "__main__":
iterations = 250
is_visdom = False
# command line:$ python3 -m visdom.server
WAITTIME_VSTREAM = 1.0 #0.040 # 40 ms
print("Start StreamNES...")
stream = StreamNES(feedback=True)
visd_server = 'http://localhost'
if is_visdom:
vis = visdom.Visdom(server=visd_server)
print("Start reading frames...")
for i in range(iterations):
timestart = datetime.datetime.now()
print("read frame...")
#stream.read_frame()
#print("...done")
a = stream.read_frame1()
print("...done1")
timestart_a = datetime.datetime.now()
b = stream.read_frame2(a)
print("...done2")
timestart_b = datetime.datetime.now()
c = stream.read_frame3(b)
print("...done3")
timestart_c = datetime.datetime.now()
d = stream.read_frame4(c)
print("...done4")
timefin = datetime.datetime.now()
c.convert("RGB").save("nes_cut.png", "PNG")
d.convert("RGB").save("leds.png", "PNG")
if is_visdom:
send_visdom(vis, c.convert('RGBA'), win='source')
send_visdom(vis, d.resize((160,240)).convert('RGBA'), win='led-pixel-wall')
waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))
time_a = timestart_a - timestart
time_b = timestart_b - timestart_a
time_c = timestart_c - timestart_b
time_d = timefin - timestart_c
time_total = time_a + time_b + time_c + time_d
print("grab_t: {time_a}, conv_t: {time_b}, "
"cut_t: {time_c}, smooth_trans_t: {time_d}, "
"total_t: {time_total}, wait_t: {waittime} in ms".format(
time_a=time_a.microseconds / 1000,
time_b=time_b.microseconds / 1000,
time_c=time_c.microseconds / 1000,
time_d=time_d.microseconds / 1000,
time_total=time_total.microseconds / 1000,
waittime=waittime * 1000,
))
time.sleep(waittime)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,681
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/send_spi.py
|
mport serial
import time
import datetime
import base64
import pigpio
import numpy as np
USBPORT = '/dev/ttyACM0' #check correct port first
#USBPORT = 'COM3' #check correct port first
NUM_LEDS_H = 16 #16
NUM_LEDS_V = 24 #24
FPS = 25
WAITTIME_VSTREAM = 0.040 #40 ms
WAITTIME_ISTREAM = 1.0 #40 ms
b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
#s = serial.Serial(USBPORT, 115200) #57600 dc115200 230400
pi = pigpio.pi()
if not pi.connected:
print("could not connect spi")
exit()
#spi = pi.spi_open(0, 115200)
spi = pi.spi_open(0, 750000, 0)
counter = 0
delaycounter = 1
delay = 1 #FPS 1 for testing
data_read = 0
#gp = pigpio.pi()
SYNC_PIN = 18 # GPIO pin numbers
pi.set_mode(SYNC_PIN, pigpio.INPUT)
import PIL.Image
ref_img = PIL.Image.open('../mario.png').convert('HSV')
#ref_img = PIL.Image.open('d.png').convert('RGB')
for j in range(NUM_LEDS_V):
for i in range(NUM_LEDS_H):
px = ref_img.getpixel((i, j))
print(1 if sum(px) > 0 else 0, end='')
print('')
leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')
cnt = 0
print("Start sending")
while True:
timestart_proc = datetime.datetime.now()
for x in range(NUM_LEDS_H):
for y in range(NUM_LEDS_V):
#leds[x, y] = 12
#leds[x, y] = (6*(cnt-x+y) % 64, 4*(cnt-x+y) % 64, 2*(cnt-x+y) % 64)
px = ref_img.getpixel((x, y))
leds[x, NUM_LEDS_V - y - 1] = px
if (delaycounter%delay == 0):
counter=(counter+1)%NUM_LEDS_H
delaycounter=(delaycounter+1)%delay
data_dec = leds.transpose(1, 0, 2).flatten().tobytes()
timestart_send = datetime.datetime.now()
print("sending bytes:", len(data_dec))
pi.spi_write(spi, data_dec)
timestart_render = datetime.datetime.now()
wait = True
while wait:
v = (pi.read_bank_1() >> SYNC_PIN) & 1
if v == 1:
wait = False
else:
#print(cnt, "wait for sync", v)
pass
cnt += 1
timefin = datetime.datetime.now()
waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart_proc).microseconds))
time_proc = timestart_send - timestart_proc
time_send = timestart_render - timestart_send
time_render = timefin - timestart_render
time_total = time_send + time_render + time_proc
print("time_proc: {time_proc}, time_send: {time_send}, "
"time_render: {time_render}, time_total: {time_total}, "
"wait_t: {waittime}".format(
time_proc=time_proc.microseconds / 1000,
time_send=time_send.microseconds / 1000,
time_render=time_render.microseconds / 1000,
time_total=time_total.microseconds / 1000,
waittime=waittime,
))
time.sleep(waittime)
pi.spi_close(spi)
pi.stop()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,682
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/reels/rauschen.py
|
"""
Rauschen reel.
Just some randomly changing gray values, to emulate random noise on a television.
Created by kratenko.
"""
import numpy as np
import time
from fluter import Fluter
fluter = Fluter()
while True:
# great gray pixels
f = np.random.randint(256, size=(fluter.height, fluter.width), dtype=np.uint8)
# rescale to rgb
f = np.stack((f,)*fluter.depth, axis=-1)
fluter.send_array(f)
time.sleep(.01)
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,683
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/streaming/lib/visualization.py
|
from six import BytesIO
import base64 as b64
def send_visdom(vis, im, win=None, env=None, opts=None):
opts = {} if opts is None else opts
opts['height'] = opts.get('height', im.height)
opts['width'] = opts.get('width', im.width)
buf = BytesIO()
im.save(buf, format='PNG')
b64encoded = b64.b64encode(buf.getvalue()).decode('utf-8')
data = [{
'content': {
'src': 'data:image/png;base64,' + b64encoded,
'caption': opts.get('caption'),
},
'type': 'image',
}]
return vis._send({
'data': data,
'win': win,
'eid': env,
'opts': opts,
})
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,684
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/fluter.py
|
# just wrap fluter module, so pycharm finds it
from reels.fluter import Fluter
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,685
|
deepestcyber/rgb-tetris-wall
|
refs/heads/master
|
/raspi_preproc/workbench/s1.py
|
import time
import pigpio
pi = pigpio.pi()
if not pi.connected:
print("nope")
exit(0)
h = pi.spi_open(0, 1152000)
n = 0
try:
while True:
n += 1
s = "n:%04x\n" % n
pi.spi_xfer(h, s)
print(s)
#time.sleep(0.01)
except KeyboardInterrupt:
print("Byebye")
pi.spi_close(h)
pi.stop()
|
{"/reels/argos.py": ["/fluter.py"], "/reels/slowimg.py": ["/fluter.py"], "/reels/wator.py": ["/fluter.py"], "/reels/wireworld.py": ["/fluter.py"], "/reels/gol.py": ["/fluter.py"], "/reels/rauschen.py": ["/fluter.py"], "/fluter.py": ["/reels/fluter/__init__.py"]}
|
20,686
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap4/Q12.py
|
import os, sys
sys.path.append(os.pardir)
from utils.tree import TreeNode
def count_paths_with_sum(root, sum_tar):
"""
Args:
- TreeNode root
- int sum_tar
Returns:
- int sum of paths
"""
if root is None:
return 0
# Count pats with sum starting from the root node
paths_from_root = count_paths_with_sum_from_node(root, sum_tar, 0)
# Try the nodes on the left and right
paths_on_left = count_paths_with_sum(root.left, sum_tar)
paths_on_right = count_paths_with_sum(root.right, sum_tar)
return paths_from_root + paths_on_left + paths_on_right
def count_paths_with_sum_from_node(node, sum_tar, sum_cur):
"""
Args:
- TreeNode node : starting node
- int sum_tar : target sum
- int sum_cur : current sum
Returns
- int paths_total : sum of total paths
"""
if node is None:
return 0
sum_cur += node.data
paths_total = 0
if sum_cur == sum_tar:
paths_total += 1
paths_total += count_paths_with_sum_from_node(node.left, sum_tar, sum_cur)
paths_total += count_paths_with_sum_from_node(node.right, sum_tar, sum_cur)
return paths_total
if __name__ == '__main__':
# Tree1
root1 = TreeNode(5);
root1.left = TreeNode(3)
root1.right = TreeNode(1)
root1.left.left = TreeNode(-8)
root1.left.right = TreeNode(8)
root1.right.left = TreeNode(2)
root1.right.right = TreeNode(6)
ans1 = count_paths_with_sum(root1, 0)
print(f'Tree1 contains {ans1} of with {0} summation')
# Tree2
root2 = TreeNode(-7)
root2.left = TreeNode(-7)
root2.left.right = TreeNode(1)
root2.left.right.left = TreeNode(2)
root2.right = TreeNode(7)
root2.right.left = TreeNode(3)
root2.right.right = TreeNode(20)
root2.right.right.left = TreeNode(0)
root2.right.right.left.left = TreeNode(-3)
root2.right.right.left.left.right = TreeNode(2)
root2.right.right.left.left.right.left = TreeNode(1)
ans2 = count_paths_with_sum(root2, -14)
print(f'Tree2 contains {ans2} of with {-14} summation')
# Tree3
root3 = TreeNode(0)
root3.left = TreeNode(0)
root3.right = TreeNode(0)
root3.right.left = TreeNode(0)
root3.right.left.right = TreeNode(0)
root3.right.right = TreeNode(0)
ans3 = count_paths_with_sum(root3, 0)
ans4 = count_paths_with_sum(root3, 4)
print(f'Tree3 contains {ans3} of with {0} summation')
print(f'Tree3 contains {ans4} of with {4} summation')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,687
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap2/Q3_remove_middle_node.py
|
"""
ใใฎๅ้กใ้ฃใใใฎใฏใไธญ้ใใผใใ ใไธใใใใใจใใซใใใใๅใฎใใผใใ่ฆใใชใ็นใงใใใใใใใฃใฆไธญ้ใใผใใๅ้คใใใซใฏไธญ่บซ่ชไฝใ้ๆฌกๆธใๆใใชใใใฐใใใชใใ
"""
from linkedlist import Node
def remove_this_node(node):
n = node
while n.next != None:
n.data = n.next.data
# ็ต็ซฏๅฆ็ใๆๅพใฎใใผใใไธๅๆถใใ
if n.next.next == None:
n.next = None
else:
# ็ต็ซฏใใใชใๅ ดๅใฏๆฌกใฎใใผใใฎๅฆ็ใซ
n = n.next
if __name__ == "__main__":
ls = Node(1)
ls.appendToTail(2)
ls.appendToTail(3)
ls.appendToTail(4)
ls.appendToTail(5)
ls.appendToTail(6)
ls.appendToTail(7)
ls.appendToTail(8)
ls.appendToTail(9)
ls.appendToTail(10)
ls.printls()
delnode = ls.get_Nth_node(5)
remove_this_node(delnode)
ls.printls()
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,688
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap8/Q11.py
|
def make_change(amount, denoms, index):
if index >= len(denoms)-1:
return 1
denom_amount = denoms[index]
ways = 0
i = 0
while i*denom_amount <= amount:
amount_remain = amount - i*denom_amount
ways += make_change(amount_remain, denoms, index+1)
i += 1
return ways
if __name__ == '__main__':
denoms = [25, 10, 5, 1]
amount = 100
ways = make_change(amount, denoms, 0)
print(f'{amount} cents can be change by {ways} patterns')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,689
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap5/Q6.py
|
def bit_swap_required(a, b):
count = 0
c = a^b
while not c in [0, -1]:
count += c&1
c >>= 1
return count
if __name__ == '__main__':
a = -23432
b = 512132
ans = bit_swap_required(a, b)
print(f'{ans} bit required to convert {bin(a)} <-> {bin(b)}')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,690
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap8/Q5.py
|
def min_product(a, b):
bigger = b if a < b else a
smaller = a if a < b else b
return min_product_helper(smaller, bigger)
def min_product_helper(smaller, bigger):
global counter
if smaller == 0:
return 0
elif smaller == 1:
return bigger
s = smaller>>1
side1 = min_product_helper(s, bigger)
side2 = side1
if smaller%2 == 1:
counter += 1
side2 = min_product_helper(smaller-s, bigger)
counter += 1
return side1 + side2
if __name__ == '__main__':
counter = 0
a, b = 13494, 22323
product = a*b
min_prod = min_product(a, b)
print(f'Calculate result {product == min_prod} with {counter}.')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,691
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap8/Q3.py
|
import random
def magic_slow(array):
'''
Get magic index (slow)
Args: list<int> array: target sorted array
Returns: int: magic index, -1 if not exists
'''
for i in range(len(array)):
if array[i] == i:
return i
return -1
def magic_fast(array, start, end):
if end < start:
return -1
mid = (start + end)//2
if array[mid] == mid:
return mid
elif array[mid] > mid:
return magic_fast(array, start, mid-1)
else:
return magic_fast(array, mid+1, end)
if __name__ == '__main__':
array = [-14, -12, 0, 1, 2, 5, 9, 10, 23, 25, 30]
print(f'{magic_slow(array)} is the magic index in {array} (slow ver.)')
print(f'{magic_fast(array, 0, len(array)-1)} is the magic index in {array} (fast ver.)')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,692
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap5/Q3_alt2.py
|
SEQ_LENGTH = 32
def get_max_seq(seqs):
"""
Get length of the longest sequences of 1s by flipping
Args: list<int> seqs: three sequences ordered as (0s, then 1s, then 0s)
Returns: int: length of the longest sequence
"""
if seqs[1] == 1: # single 0 -> marge sequences
return seqs[0] + seqs[2] + 1
elif seqs[1] == 0: # no-0s -> take one side
return max(seqs[0], seqs[2])
else: # manu 0s -> take side, add 1 (flip a bit)
return max(seqs[0], seqs[2]) + 1
def shift(seqs):
""" Shift the given list """
seqs[2] = seqs[1]
seqs[1] = seqs[0]
seqs[0] = 0
return seqs
def longest_seq(n):
"""
Get the longest sequence
Args: int n: target number
Returns: int, length of the longest 1s sequence
"""
searching_for = 0
seqs = [0, 0, 0]
max_seq = 1
for i in range(SEQ_LENGTH):
if (n&1) != searching_for:
if searching_for == 1: # End of 1s+0s+1s sequence
max_seq = max(max_seq, get_max_seq(seqs))
searching_for = n&1 # Flip 1->0 or 0->1
seqs = shift(seqs) # Shift sequences
seqs[0] += 1
n >>= 1
if searching_for == 0: # Check final set f sequences
seqs = shift(seqs)
final_seq = get_max_seq(seqs)
return max_seq
if __name__ == '__main__':
num = 1775
ans = longest_seq(num)
print(f'{num} -> {ans}')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,693
|
daigo0927/ctci-6th
|
refs/heads/master
|
/utils/tree.py
|
class TreeNode(object):
def __init__(self, data):
self.data = data
self.left = None
self.right = None
self.parent = None
self._size = 1
def _set_left(self, left):
self.left = left
if left is not None:
left.parent = self
def _set_right(self, right):
self.right = right
if right is not None:
right.parent = self
def insert_in_order(self, d):
if d < self.data:
if self.left is None:
self._set_left(TreeNode(d))
else:
self.left.insert_in_order(d)
else:
if self.right is None:
self._set_right(TreeNode(d))
else:
self.right.insert_in_order(d)
self._size += 1
def size(self):
return self._size
def isBST(self):
if not self.left is None:
if self.data < self.left.data or not self.left.isBST():
return False
if not self.right is None:
if self.data >= self.right.data or not self.right.isBST:
return False
return True
def height(self):
left_height = self.left.height() if not self.left is None else 0
right_height = self.right.height() if not self.right is None else 0
return 1 + max(left_height, right_height)
def find(self, d):
if d == self.data:
return self
elif d <= self.data:
return self.left.find(d) if self.left is not None else None
elif d > self.data:
return self.right.find(d) if self.right is not None else None
else:
return None
@staticmethod
def _create_minimal_BST(array, start, end):
if end < start:
return None
mid = (start + end)//2
n = TreeNode(array[mid])
n._set_left(TreeNode._create_minimal_BST(array, start, mid-1))
n._set_right(TreeNode._create_minimal_BST(array, mid+1, end))
return n
@staticmethod
def create_minimal_BST(array):
return TreeNode._create_minimal_BST(array, 0, len(array)-1)
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,694
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap10/Q9.py
|
def find_element(matrix, elem):
row = 0
col = len(matrix[0]) - 1
while row < len(matrix) and col >= 0:
if matrix[row][col] == elem:
return True
elif matrix[row][col] > elem:
col -= 1
else:
row += 1
return False
if __name__ == '__main__':
matrix = [[15, 20, 40, 85],
[20, 35, 80, 95],
[30, 55, 95, 105],
[40, 80, 100, 120]]
elem = 55
if find_element(matrix, elem):
print('True')
else:
print('False')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
20,695
|
daigo0927/ctci-6th
|
refs/heads/master
|
/chap8/Q11_alt.py
|
def make_change(amount, denoms, index, map_):
if map_[amount][index] > 0:
return map_[amount][index]
if index >= len(denoms)-1:
return 1
denom_amount = denoms[index]
ways, i = 0, 0
while i*denom_amount <= amount:
amount_remain = amount - i*denom_amount
ways += make_change(amount_remain, denoms, index+1, map_)
i += 1
map_[amount][index] = ways
return ways
if __name__ == '__main__':
denoms = [25, 10, 5, 1]
amount = 100
map_ = [[0]*len(denoms) for _ in range(amount+1)]
ways = make_change(100, denoms, 0, map_)
print(f'{amount} can be changed by {ways} patterns')
|
{"/chap4/Q12.py": ["/utils/tree.py"], "/chap4/Q11.py": ["/utils/tree.py"], "/chap4/Q10.py": ["/utils/misc.py"], "/chap4/Q12_alt.py": ["/utils/tree.py"], "/chap2/Q5.py": ["/utils/linkedlist.py"], "/utils/misc.py": ["/utils/tree.py"], "/chap2/Q7.py": ["/utils/linkedlist.py"], "/chap2/Q8.py": ["/utils/linkedlist.py"], "/chap2/Q6.py": ["/utils/linkedlist.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.