repo_name
stringlengths 5
104
| path
stringlengths 4
248
| content
stringlengths 102
99.9k
|
|---|---|---|
rudyryk/python-samples
|
hello_tornado/hello_asyncio.py
|
# hello_asyncio.py
import asyncio
import tornado.ioloop
import tornado.web
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
try:
import aioredis
except ImportError:
print("Please install aioredis: pip install aioredis")
exit(0)
class AsyncRequestHandler(tornado.web.RequestHandler):
"""Base class for request handlers with `asyncio` coroutines support.
It runs methods on Tornado's ``AsyncIOMainLoop`` instance.
Subclasses have to implement one of `get_async()`, `post_async()`, etc.
Asynchronous method should be decorated with `@asyncio.coroutine`.
Usage example::
class MyAsyncRequestHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
html = yield from self.application.http.get('http://python.org')
self.write({'html': html})
You may also just re-define `get()` or `post()` methods and they will be simply run
synchronously. This may be convinient for draft implementation, i.e. for testing
new libs or concepts.
"""
@tornado.gen.coroutine
def get(self, *args, **kwargs):
"""Handle GET request asyncronously, delegates to
``self.get_async()`` coroutine.
"""
yield self._run_method('get', *args, **kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
"""Handle POST request asyncronously, delegates to
``self.post_async()`` coroutine.
"""
yield self._run_method('post', *args, **kwargs)
@asyncio.coroutine
def _run_async(self, coroutine, future_, *args, **kwargs):
"""Perform coroutine and set result to ``Future`` object."""
try:
result = yield from coroutine(*args, **kwargs)
future_.set_result(result)
except Exception as e:
future_.set_exception(e)
print(traceback.format_exc())
def _run_method(self, method_, *args, **kwargs):
"""Run ``get_async()`` / ``post_async()`` / etc. coroutine
wrapping result with ``tornado.concurrent.Future`` for
compatibility with ``gen.coroutine``.
"""
coroutine = getattr(self, '%s_async' % method_, None)
if not coroutine:
raise tornado.web.HTTPError(405)
future_ = tornado.concurrent.Future()
asyncio.async(
self._run_async(coroutine, future_, *args, **kwargs)
)
return future_
class MainHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
redis = self.application.redis
yield from redis.set('my-key', 'OK')
val = yield from redis.get('my-key')
self.write('Hello asyncio.coroutine: %s' % val)
class Application(tornado.web.Application):
def __init__(self):
# Prepare IOLoop class to run instances on asyncio
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
handlers = [
(r"/", MainHandler),
]
super().__init__(handlers, debug=True)
def init_with_loop(self, loop):
self.redis = loop.run_until_complete(
aioredis.create_redis(('localhost', 6379), loop=loop)
)
if __name__ == "__main__":
print("Run hello_asyncio ... http://127.0.0.1:8888")
application = Application()
application.listen(8888)
loop = asyncio.get_event_loop()
application.init_with_loop(loop)
loop.run_forever()
|
ieuan1630-cmis/ieuan1630-cmis-cs2
|
cs2quiz1.py
|
#40/40
#Part 1: Terminology (15 points) --> 15/15
#1 1pt) What is the symbol "=" used for?
#to assign and store values to and in variables
# 1pt
#
#2 3pts) Write a technical definition for 'function'
#a named sequence of calculations which takes input and returns output
# 3pts
#
#3 1pt) What does the keyword "return" do?
#it gives back the output or result of the function
# 1pt
#
#4 5pts) We know 5 basic data types. Write the name for each one and provide two
# examples of each below
# 1: integer ex: 1, 2
# 2: floating point ex: 1.2, 1.3
# 3: string ex: "hi", "hello"
# 4: boolean ex: True, False
# 5: tuple ex: ("HEllo", 3), ("Bob", 10, "fat")
# 5pts
#
#5 2pts) What is the difference between a "function definition" and a
# "function call"?
#a function definition does not result in any output being presented, it simply defines a set of calculations which are run if and only if they are called by a function call
# 2pts
#
#
#6 3pts) What are the 3 phases that every computer program has? What happens in
# each of them
# 1:input (the program takes some input values, most often from the user)
# 2:processing (the program does something with those input values to for instance calculate something)
# 3:output (the program returns the product of its labours (processing) often a something printed
# 3pts
#
#Part 2: Programming (25 points) --> 25/25
#Write a program that asks the user for the areas of 3 circles.
#It should then calculate the diameter of each and the sum of the diameters
#of the 3 circles.
#Finally, it should produce output like this:
#Circle Diameter
#c1 ...
#c2 ...
#c3 ...
#TOTALS ...
# Hint: Radius is the square root of the area divided by pi (a = pi(r)^2) so r = sqrt(a/pi)
import math
#1 pt for header line
#3 pt for correct formula
#1 pt for return value
#1 pt for parameter name
#1 pt for function name
def circarea_to_diameter(circarea):
return 2 * (math.sqrt(circarea/math.pi)) #finds radius and multiples by 2 to get diameter
def sum_three(x, y, z): #takes three values and adds them
return x + y + z
#1pt for header line
#1pt for parameter names
#1pt for return value
#1pt for correct output format
#3pt for correct use of format function
def output(d1, d2, d3, total):
return """
Circle Diameter
C1 {}
C2 {}
C3 {}
Totals {}
""".format(d1, d2, d3, total)
#1pt header line
#1pt getting input
#1pt converting input
#1pt for calling output function
#2pt for correct diameter formula
#1pt for variable names
def main():
#input
C1 = raw_input("Area of C1: ")
C2 = raw_input("Area of C2: ")
C3 = raw_input("Area of C3: ")
#processing
d1 = circarea_to_diameter(float(C1))
d2 = circarea_to_diameter(float(C2))
d3 = circarea_to_diameter(float(C3))
total = sum_three(d1, d2, d3)
#output
print output(d1, d2, d3, total)
#1pt for calling main
main()
#1pt explanatory comments
#1pt code format
|
JohanComparat/pySU
|
spm/bin_SMF/create_table_snr.py
|
import astropy.io.fits as fits
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as p
import numpy as n
import os
import sys
from scipy.stats import scoreatpercentile as sc
from scipy.interpolate import interp1d
survey = sys.argv[1]
z_min, z_max = 0., 1.6
imfs = ["Chabrier_ELODIE_", "Chabrier_MILES_", "Chabrier_STELIB_", "Kroupa_ELODIE_", "Kroupa_MILES_", "Kroupa_STELIB_", "Salpeter_ELODIE_", "Salpeter_MILES_", "Salpeter_STELIB_" ]
z_bins = n.array([0, 0.025, 0.375, 0.7, 0.85, 1.6])
key_SNR = 'SNR_ALL'
SNR_keys = n.array([ 'SNR_32_35', 'SNR_35_39', 'SNR_39_41', 'SNR_41_55', 'SNR_55_68', 'SNR_68_74', 'SNR_74_93' ])
SNR_w_min = n.array([ 32, 35, 39, 41, 55, 68, 74 ])
SNR_w_max = n.array([ 35, 39, 41, 55, 68, 74, 93 ])
wl_40 = ((z_bins[1:]+z_bins[:-1]) * 0.5 + 1)*40.
snr_ids = n.searchsorted(SNR_w_max, wl_40)
print(SNR_keys[snr_ids])
out_dir = os.path.join(os.environ['OBS_REPO'], 'spm', 'results')
#path_2_MAG_cat = os.path.join( os.environ['HOME'], 'SDSS', "dr14_specphot_gri.fits" )
#hd = fits.open(path_2_MAG_cat)
#path_2_sdss_cat = os.path.join( os.environ['HOME'], 'SDSS', '26', 'catalogs', "FireFly.fits" )
#path_2_eboss_cat = os.path.join( os.environ['HOME'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" )
path_2_sdss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', '26', 'catalogs', "FireFly.fits" )
path_2_eboss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" )
# OPENS THE CATALOGS
print("Loads catalog")
if survey =='deep2':
deep2_dir = os.path.join(os.environ['OBS_REPO'], 'DEEP2')
path_2_deep2_cat = os.path.join( deep2_dir, "zcat.deep2.dr4.v4.LFcatalogTC.Planck13.spm.fits" )
catalog = fits.open(path_2_deep2_cat)[1].data
if survey =='sdss':
catalog = fits.open(path_2_sdss_cat)[1].data
z_name, z_err_name, class_name, zwarning = 'Z', 'Z_ERR', 'CLASS', 'ZWARNING'
if survey =='boss':
catalog = fits.open(path_2_eboss_cat)[1].data
z_name, z_err_name, class_name, zwarning = 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO'
IMF = imfs[0]
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
print(IMF, prf)
name, zflg_val, prefix = prf, 0., IMF
catalog_0 = (catalog[z_err_name] > 0.) & (catalog[z_name] > catalog[z_err_name]) & (catalog[class_name]=='GALAXY') & (catalog[zwarning]==zflg_val) & (catalog[z_name] > z_min) & (catalog[z_name] < z_max)
catalog_zOk = catalog_0 & (catalog['SNR_ALL']>0)
converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] )
dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 )
dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 )
#target_bits
program_names = n.array(list(set( catalog['PROGRAMNAME'] )))
program_names.sort()
sourcetypes = n.array(list(set( catalog['SOURCETYPE'] )))
sourcetypes.sort()
length = lambda selection : len(selection.nonzero()[0])
pcs_ref = list(n.arange(0., 101, 5))
g = lambda key, s1, pcs = pcs_ref : n.hstack(( length(s1), sc(catalog[key][s1], pcs) ))
sel_pg = lambda pgr : (catalog_zOk) & (catalog['PROGRAMNAME']==pgr)
sel_st = lambda pgr : (catalog_zOk) & (catalog['SOURCETYPE']==pgr)
sel0_pg = lambda pgr : (catalog_0) & (catalog['PROGRAMNAME']==pgr)
sel0_st = lambda pgr : (catalog_0) & (catalog['SOURCETYPE']==pgr)
all_galaxies = []
tpps = []
for pg in sourcetypes:
sel_all = sel_st(pg)
n_all = length( sel_all )
if n_all > 100 :
#print(pg, n_all)
all_galaxies.append(n_all)
all_out = []
for z_Min, z_Max, snr_key in zip(z_bins[:-1], z_bins[1:], SNR_keys[snr_ids]):
s_z = sel_all &(catalog[z_name] >= z_Min) & (catalog[z_name] < z_Max)
n_z = length(s_z)
#print(z_Min, z_Max, n_z)
if n_z > 0 :
#print(n.min(catalog[snr_key][s_z]), n.max(catalog[snr_key][s_z]))
itp = interp1d(sc(catalog[snr_key][s_z], pcs_ref), pcs_ref, kind='linear', fill_value= 100., bounds_error=False)
#print(itp.x, itp.y)
all_out.append( [n_z, itp(5), itp(20)] )
else :
all_out.append([0., -1, -1])
all_out = n.hstack((all_out))
tpp = pg + " & " + str(int(n_all)) + " & " + " & ".join(n.array([ str(int(el)) for el in all_out]) ) + ' \\\\ \n'
print( tpp)
tpps.append(tpp)
all_galaxies = n.array(all_galaxies)
tpps = n.array(tpps)
ids = n.argsort(all_galaxies)[::-1]
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_comp_"+survey+"_snr_all_sourcetype_SNR_moments.tex")
f=open(out_file, 'w')
#f.write('source type & N & \multicolumn{c}{2}{N galaxies} && \multicolumn{c}{2}{SNR ALL$>0$} & \\multicolumn{c}{2}{frefly converged} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.4$} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.2$} \\\\ \n')
#f.write(' & & N & % & & N & % & N & % & N & % \\\\ \n')
for jj in ids :
f.write( tpps[jj] )
f.close()
sys.exit()
#converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] )
#dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 )
#dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 )
#m_catalog = n.log10(catalog[prefix+'stellar_mass'])
#w_catalog = n.ones_like(catalog[prefix+'stellar_mass'])
#print(ld(catalog_zOk))
#return name + " & $"+ sld(converged)+"$ ("+str(n.round(ld(converged)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex04)+"$ ("+str(n.round(ld(dex04)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex02)+ "$ ("+str(n.round(ld(dex02)/ld(catalog_zOk)*100.,1))+r") \\\\"
##return catalog_sel, m_catalog, w_catalog
sys.exit()
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=False)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(boss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(boss, 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(sdss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(sdss, 'Z', 'Z_ERR', 'CLASS', 'ZWARNING', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
f.close()
#"""
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_2_r.tex")
f=open(out_file, 'w')
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=True)
f.write(l2w + " \n")
f.close()
|
edwinsteele/visual-commute
|
vcapp/migrations/0002_initial.py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Station'
db.create_table('vcapp_station', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('station_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('lon', self.gf('django.db.models.fields.FloatField')()),
('lat', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('vcapp', ['Station'])
# Adding model 'Line'
db.create_table('vcapp_line', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('vcapp', ['Line'])
# Adding model 'Trip'
db.create_table('vcapp_trip', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('timetable_type', self.gf('django.db.models.fields.CharField')(max_length=2)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
))
db.send_create_signal('vcapp', ['Trip'])
# Adding model 'TripStop'
db.create_table('vcapp_tripstop', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_time', self.gf('django.db.models.fields.TimeField')()),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['TripStop'])
# Adding model 'Segment'
db.create_table('vcapp_segment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='departure_point', to=orm['vcapp.TripStop'])),
('arrival_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='arrival_point', to=orm['vcapp.TripStop'])),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
))
db.send_create_signal('vcapp', ['Segment'])
# Adding model 'InterchangeStation'
db.create_table('vcapp_interchangestation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['InterchangeStation'])
def backwards(self, orm):
# Deleting model 'Station'
db.delete_table('vcapp_station')
# Deleting model 'Line'
db.delete_table('vcapp_line')
# Deleting model 'Trip'
db.delete_table('vcapp_trip')
# Deleting model 'TripStop'
db.delete_table('vcapp_tripstop')
# Deleting model 'Segment'
db.delete_table('vcapp_segment')
# Deleting model 'InterchangeStation'
db.delete_table('vcapp_interchangestation')
models = {
'vcapp.interchangestation': {
'Meta': {'object_name': 'InterchangeStation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"})
},
'vcapp.line': {
'Meta': {'object_name': 'Line'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.segment': {
'Meta': {'object_name': 'Segment'},
'arrival_tripstop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'arrival_point'", 'to': "orm['vcapp.TripStop']"}),
'departure_tripstop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'departure_point'", 'to': "orm['vcapp.TripStop']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
},
'vcapp.station': {
'Meta': {'object_name': 'Station'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {}),
'lon': ('django.db.models.fields.FloatField', [], {}),
'station_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.trip': {
'Meta': {'object_name': 'Trip'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
'timetable_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'vcapp.tripstop': {
'Meta': {'object_name': 'TripStop'},
'departure_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
}
}
complete_apps = ['vcapp']
|
Winawer/exercism
|
python/house/house.py
|
parts = (('house', 'Jack built'),
('malt', 'lay in'),
('rat', 'ate'),
('cat', 'killed'),
('dog', 'worried'),
('cow with the crumpled horn', 'tossed'),
('maiden all forlorn', 'milked'),
('man all tattered and torn', 'kissed'),
('priest all shaven and shorn', 'married'),
('rooster that crowed in the morn', 'woke'),
('farmer sowing his corn', 'kept'),
('horse and the hound and the horn', 'belonged to'))
def verse(n):
return '{}\nthat {}'.format(parts[n][0],parts[n][1]) if n != 0 else '{} that {}'.format(parts[n][0],parts[n][1])
def rhymes(v = 11):
if v == 0:
return verse(v)
else:
return verse(v) + ' the ' + rhymes(v-1)
def rhyme():
return '\n'.join([ 'This is the ' + rhymes(v) + '.\n' for v in range(12) ])[:-1]
|
mprinc/FeelTheSound
|
src/PoC/fft.py
|
#!/usr/bin/env python
# 8 band Audio equaliser from wav file
# import alsaaudio as aa
# import smbus
from struct import unpack
import numpy as np
import wave
from time import sleep
import sys
ADDR = 0x20 #The I2C address of MCP23017
DIRA = 0x00 #PortA I/O direction, by pin. 0=output, 1=input
DIRB = 0x01 #PortB I/O direction, by pin. 0=output, 1=input
BANKA = 0x12 #Register address for Bank A
BANKB = 0x13 #Register address for Bank B
# bus=smbus.SMBus(0) #Use '1' for newer Pi boards;
# #Set up the 23017 for 16 output pins
# bus.write_byte_data(ADDR, DIRA, 0); #all zeros = all outputs on Bank A
# bus.write_byte_data(ADDR, DIRB, 0); #all zeros = all outputs on Bank B
# def TurnOffLEDS ():
# bus.write_byte_data(ADDR, BANKA, 0xFF) #set all columns high
# bus.write_byte_data(ADDR, BANKB, 0x00) #set all rows low
# def Set_Column(row, col):
# bus.write_byte_data(ADDR, BANKA, col)
# bus.write_byte_data(ADDR, BANKB, row)
# # Initialise matrix
# TurnOffLEDS()
matrix = [0,0,0,0,0,0,0,0]
power = []
# weighting = [2,2,8,8,16,32,64,64] # Change these according to taste
weighting = [2,2,2,2,4,4,8,8] # Change these according to taste
# Set up audio
#wavfile = wave.open('test_stereo_16000Hz_16bit_PCM.wav','r')
#wavfile = wave.open('Media-Convert_test5_PCM_Stereo_VBR_8SS_44100Hz.wav','r')
wavfile = wave.open('Media-Convert_test2_PCM_Mono_VBR_8SS_48000Hz.wav','r')
sample_rate = wavfile.getframerate()
no_channels = wavfile.getnchannels()
chunk = 4096 # Use a multiple of 8
# output = aa.PCM(aa.PCM_PLAYBACK, aa.PCM_NORMAL)
# output.setchannels(no_channels)
# output.setrate(sample_rate)
# output.setformat(aa.PCM_FORMAT_S16_LE)
# output.setperiodsize(chunk)
# Return power array index corresponding to a particular frequency
def piff(val):
return int(2*chunk*val/sample_rate)
def print_intensity(matrix):
levelFull = "||||||||";
levelEmpty = " ";
levelStr = "";
for level in matrix:
#level = 0;
levelStr += levelFull[0: level] + levelEmpty [0:8-(level)] + " ";
sys.stdout.write("\rlevel: " + levelStr);
sys.stdout.flush();
def calculate_levels(data, chunk, sample_rate):
#print ("[calculate_levels] chunk=%s, sample_rate: %s, len(data)=%s" % (chunk, sample_rate, len(data)));
if(len(data) != chunk):
print ("\n[calculate_levels] skiping: chunk=%s != len(data)=%s" % (chunk, len(data)));
return None;
global matrix
# Convert raw data (ASCII string) to numpy array
data = unpack("%dh"%(len(data)/2),data)
data = np.array(data, dtype='h')
# Apply FFT - real data
fourier=np.fft.rfft(data)
# Remove last element in array to make it the same size as chunk
fourier=np.delete(fourier,len(fourier)-1)
# Find average 'amplitude' for specific frequency ranges in Hz
power = np.abs(fourier)
matrix[0]= int(np.mean(power[piff(0) :piff(156):1]))
matrix[1]= int(np.mean(power[piff(156) :piff(313):1]))
matrix[2]= int(np.mean(power[piff(313) :piff(625):1]))
matrix[3]= int(np.mean(power[piff(625) :piff(1250):1]))
matrix[4]= int(np.mean(power[piff(1250) :piff(2500):1]))
matrix[5]= int(np.mean(power[piff(2500) :piff(5000):1]))
matrix[6]= int(np.mean(power[piff(5000) :piff(10000):1]))
# Produces error, I guess to low sampling rate of the audio file
# matrix[7]= int(np.mean(power[piff(10000):piff(20000):1]))
# Tidy up column values for the LED matrix
matrix=np.divide(np.multiply(matrix,weighting),1000000)
# Set floor at 0 and ceiling at 8 for LED matrix
matrix=matrix.clip(0,8)
return matrix
# Process audio file
print "Processing....."
data = wavfile.readframes(chunk)
while data != '':
# output.write(data)
matrix = calculate_levels(data, chunk,sample_rate)
if matrix == None:
next;
print_intensity(matrix);
# for i in range (0,8):
# Set_Column((1<<matrix[i])-1,0xFF^(1<<i))
sleep(0.1);
data = wavfile.readframes(chunk)
# TurnOffLEDS()
# =========================
|
aptana/Pydev
|
bundles/org.python.pydev/pysrc/pydevconsole.py
|
try:
from code import InteractiveConsole
except ImportError:
from pydevconsole_code_for_ironpython import InteractiveConsole
import os
import sys
try:
False
True
except NameError: # version < 2.3 -- didn't have the True/False builtins
import __builtin__
setattr(__builtin__, 'True', 1) # Python 3.0 does not accept __builtin__.True = 1 in its syntax
setattr(__builtin__, 'False', 0)
from pydev_console_utils import BaseStdIn, StdIn, BaseInterpreterInterface
try:
class ExecState:
FIRST_CALL = True
PYDEV_CONSOLE_RUN_IN_UI = False # Defines if we should run commands in the UI thread.
from org.python.pydev.core.uiutils import RunInUiThread # @UnresolvedImport
from java.lang import Runnable # @UnresolvedImport
class Command(Runnable):
def __init__(self, interpreter, line):
self.interpreter = interpreter
self.line = line
def run(self):
if ExecState.FIRST_CALL:
ExecState.FIRST_CALL = False
sys.stdout.write('\nYou are now in a console within Eclipse.\nUse it with care as it can halt the VM.\n')
sys.stdout.write('Typing a line with "PYDEV_CONSOLE_TOGGLE_RUN_IN_UI"\nwill start executing all the commands in the UI thread.\n\n')
if self.line == 'PYDEV_CONSOLE_TOGGLE_RUN_IN_UI':
ExecState.PYDEV_CONSOLE_RUN_IN_UI = not ExecState.PYDEV_CONSOLE_RUN_IN_UI
if ExecState.PYDEV_CONSOLE_RUN_IN_UI:
sys.stdout.write('Running commands in UI mode. WARNING: using sys.stdin (i.e.: calling raw_input()) WILL HALT ECLIPSE.\n')
else:
sys.stdout.write('No longer running commands in UI mode.\n')
self.more = False
else:
self.more = self.interpreter.push(self.line)
def Sync(runnable):
if ExecState.PYDEV_CONSOLE_RUN_IN_UI:
return RunInUiThread.sync(runnable)
else:
return runnable.run()
except:
# If things are not there, define a way in which there's no 'real' sync, only the default execution.
class Command:
def __init__(self, interpreter, line):
self.interpreter = interpreter
self.line = line
def run(self):
self.more = self.interpreter.push(self.line)
def Sync(runnable):
runnable.run()
try:
try:
execfile # Not in Py3k
except NameError:
from pydev_imports import execfile
import builtins # @UnresolvedImport -- only Py3K
builtins.execfile = execfile
except:
pass
# Pull in runfile, the interface to UMD that wraps execfile
from pydev_umd import runfile, _set_globals_function
try:
import builtins
builtins.runfile = runfile
except:
import __builtin__
__builtin__.runfile = runfile
#=======================================================================================================================
# InterpreterInterface
#=======================================================================================================================
class InterpreterInterface(BaseInterpreterInterface):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, host, client_port, server):
BaseInterpreterInterface.__init__(self, server)
self.client_port = client_port
self.host = host
try:
import pydevd # @UnresolvedImport
if pydevd.GetGlobalDebugger() is None:
raise RuntimeError() # Work as if the debugger does not exist as it's not connected.
except:
self.namespace = globals()
else:
# Adapted from the code in pydevd
# patch provided by: Scott Schlesier - when script is run, it does not
# pretend pydevconsole is not the main module, and
# convince the file to be debugged that it was loaded as main
sys.modules['pydevconsole'] = sys.modules['__main__']
sys.modules['pydevconsole'].__name__ = 'pydevconsole'
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
ns = m.__dict__
try:
ns['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
self.namespace = ns
self.interpreter = InteractiveConsole(self.namespace)
self._input_error_printed = False
def doAddExec(self, line):
command = Command(self.interpreter, line)
Sync(command)
return command.more
def getNamespace(self):
return self.namespace
def getCompletions(self, text, act_tok):
try:
from _pydev_completer import Completer
completer = Completer(self.namespace, None)
return completer.complete(act_tok)
except:
import traceback;traceback.print_exc()
return []
def close(self):
sys.exit(0)
try:
from pydev_ipython_console import InterpreterInterface
except:
sys.stderr.write('PyDev console: using default backend (IPython not available).\n')
pass # IPython not available, proceed as usual.
#=======================================================================================================================
# _DoExit
#=======================================================================================================================
def _DoExit(*args):
'''
We have to override the exit because calling sys.exit will only actually exit the main thread,
and as we're in a Xml-rpc server, that won't work.
'''
try:
import java.lang.System
java.lang.System.exit(1)
except ImportError:
if len(args) == 1:
os._exit(args[0])
else:
os._exit(0)
#=======================================================================================================================
# StartServer
#=======================================================================================================================
def StartServer(host, port, client_port):
# replace exit (see comments on method)
# note that this does not work in jython!!! (sys method can't be replaced).
sys.exit = _DoExit
from _pydev_xmlrpc_hook import InputHookedXMLRPCServer
try:
server = InputHookedXMLRPCServer((host, port), logRequests=False)
interpreter = InterpreterInterface(host, client_port, server)
except:
sys.stderr.write('Error starting server with host: %s, port: %s, client_port: %s\n' % (host, port, client_port))
raise
# Tell UMD the proper default namespace
_set_globals_function(interpreter.getNamespace)
# Functions for basic protocol
server.register_function(interpreter.addExec)
server.register_function(interpreter.getCompletions)
server.register_function(interpreter.getDescription)
server.register_function(interpreter.close)
# Functions so that the console can work as a debugger (i.e.: variables view, expressions...)
server.register_function(interpreter.connectToDebugger)
server.register_function(interpreter.hello)
# Functions for GUI main loop integration
server.register_function(interpreter.enableGui)
server.serve_forever()
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
sys.stdin = BaseStdIn()
port, client_port = sys.argv[1:3]
import pydev_localhost
StartServer(pydev_localhost.get_localhost(), int(port), int(client_port))
|
gppezzi/easybuild-framework
|
easybuild/tools/utilities.py
|
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Module with various utility functions
:author: Kenneth Hoste (Ghent University)
"""
import datetime
import glob
import os
import re
import sys
from string import digits
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError, print_msg
from easybuild.tools.config import build_option
from easybuild.tools.py2vs3 import ascii_letters, string_type
_log = fancylogger.getLogger('tools.utilities')
INDENT_2SPACES = ' ' * 2
INDENT_4SPACES = ' ' * 4
def flatten(lst):
"""Flatten a list of lists."""
res = []
for x in lst:
res.extend(x)
return res
def quote_str(val, escape_newline=False, prefer_single_quotes=False, tcl=False):
"""
Obtain a new value to be used in string replacement context.
For non-string values, it just returns the exact same value.
For string values, it tries to escape the string in quotes, e.g.,
foo becomes 'foo', foo'bar becomes "foo'bar",
foo'bar"baz becomes \"\"\"foo'bar"baz\"\"\", etc.
:param escape_newline: wrap strings that include a newline in triple quotes
:param prefer_single_quotes: if possible use single quotes
:param tcl: Boolean for whether we are quoting for Tcl syntax
"""
if isinstance(val, string_type):
# forced triple double quotes
if ("'" in val and '"' in val) or (escape_newline and '\n' in val):
return '"""%s"""' % val
# escape double quote(s) used in strings
elif '"' in val:
if tcl:
return '"%s"' % val.replace('"', '\\"')
else:
return "'%s'" % val
# if single quotes are preferred, use single quotes;
# unless a space or a single quote are in the string
elif prefer_single_quotes and "'" not in val and ' ' not in val:
return "'%s'" % val
# fallback on double quotes (required in tcl syntax)
else:
return '"%s"' % val
else:
return val
def quote_py_str(val):
"""Version of quote_str specific for generating use in Python context (e.g., easyconfig parameters)."""
return quote_str(val, escape_newline=True, prefer_single_quotes=True)
def shell_quote(token):
"""
Wrap provided token in single quotes (to escape space and characters with special meaning in a shell),
so it can be used in a shell command. This results in token that is not expanded/interpolated by the shell.
"""
# first, strip off double quotes that may wrap the entire value,
# we don't want to wrap single quotes around a double-quoted value
token = str(token).strip('"')
# escape any non-escaped single quotes, and wrap entire token in single quotes
return "'%s'" % re.sub(r"(?<!\\)'", r"\'", token)
def remove_unwanted_chars(inputstring):
"""Remove unwanted characters from the given string and return a copy
All non-letter and non-numeral characters are considered unwanted except for underscore ('_').
"""
return ''.join(c for c in inputstring if c in (ascii_letters + digits + '_'))
def import_available_modules(namespace):
"""
Import all available module in the specified namespace.
:param namespace: The namespace to import modules from.
"""
modules = []
for path in sys.path:
cand_modpath_glob = os.path.sep.join([path] + namespace.split('.') + ['*.py'])
# if sys.path entry being considered is the empty string
# (which corresponds to Python packages/modules in current working directory being considered),
# we need to strip off / from the start of the path
if path == '' and cand_modpath_glob.startswith(os.path.sep):
cand_modpath_glob = cand_modpath_glob.lstrip(os.path.sep)
for module in sorted(glob.glob(cand_modpath_glob)):
if not module.endswith('__init__.py'):
mod_name = module.split(os.path.sep)[-1].split('.')[0]
modpath = '.'.join([namespace, mod_name])
_log.debug("importing module %s", modpath)
try:
mod = __import__(modpath, globals(), locals(), [''])
except ImportError as err:
raise EasyBuildError("import_available_modules: Failed to import %s: %s", modpath, err)
if mod not in modules:
modules.append(mod)
return modules
def only_if_module_is_available(modnames, pkgname=None, url=None):
"""Decorator to guard functions/methods against missing required module with specified name."""
if pkgname and url is None:
url = 'https://pypi.python.org/pypi/%s' % pkgname
if isinstance(modnames, string_type):
modnames = (modnames,)
def wrap(orig):
"""Decorated function, raises ImportError if specified module is not available."""
try:
imported = None
for modname in modnames:
try:
__import__(modname)
imported = modname
break
except ImportError:
pass
if imported is None:
raise ImportError("None of the specified modules %s is available" % ', '.join(modnames))
else:
return orig
except ImportError as err:
# need to pass down 'err' via named argument to ensure it's in scope when using Python 3.x
def error(err=err, *args, **kwargs):
msg = "%s; required module '%s' is not available" % (err, modname)
if pkgname:
msg += " (provided by Python package %s, available from %s)" % (pkgname, url)
elif url:
msg += " (available from %s)" % url
raise EasyBuildError("ImportError: %s", msg)
return error
return wrap
def trace_msg(message, silent=False):
"""Print trace message."""
if build_option('trace'):
print_msg(' >> ' + message, prefix=False)
def nub(list_):
"""Returns the unique items of a list of hashables, while preserving order of
the original list, i.e. the first unique element encoutered is
retained.
Code is taken from
http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
Supposedly, this is one of the fastest ways to determine the
unique elements of a list.
@type list_: a list :-)
:return: a new list with each element from `list` appearing only once (cfr. Michelle Dubois).
"""
seen = set()
seen_add = seen.add
return [x for x in list_ if x not in seen and not seen_add(x)]
def get_class_for(modulepath, class_name):
"""
Get class for a given Python class name and Python module path.
:param modulepath: Python module path (e.g., 'easybuild.base.generaloption')
:param class_name: Python class name (e.g., 'GeneralOption')
"""
# try to import specified module path, reraise ImportError if it occurs
try:
module = __import__(modulepath, globals(), locals(), [''])
except ImportError as err:
raise ImportError(err)
# try to import specified class name from specified module path, throw ImportError if this fails
try:
klass = getattr(module, class_name)
except AttributeError as err:
raise ImportError("Failed to import %s from %s: %s" % (class_name, modulepath, err))
return klass
def get_subclasses_dict(klass, include_base_class=False):
"""Get dict with subclasses per classes, recursively from the specified base class."""
res = {}
subclasses = klass.__subclasses__()
if include_base_class:
res.update({klass: subclasses})
for subclass in subclasses:
# always include base class for recursive call
res.update(get_subclasses_dict(subclass, include_base_class=True))
return res
def get_subclasses(klass, include_base_class=False):
"""Get list of all subclasses, recursively from the specified base class."""
return get_subclasses_dict(klass, include_base_class=include_base_class).keys()
def mk_rst_table(titles, columns):
"""
Returns an rst table with given titles and columns (a nested list of string columns for each column)
"""
title_cnt, col_cnt = len(titles), len(columns)
if title_cnt != col_cnt:
msg = "Number of titles/columns should be equal, found %d titles and %d columns" % (title_cnt, col_cnt)
raise ValueError(msg)
table = []
tmpl = []
line = []
# figure out column widths
for i, title in enumerate(titles):
width = max(map(len, columns[i] + [title]))
# make line template
tmpl.append('{%s:{c}<%s}' % (i, width))
line = [''] * col_cnt
line_tmpl = INDENT_4SPACES.join(tmpl)
table_line = line_tmpl.format(*line, c='=')
table.append(table_line)
table.append(line_tmpl.format(*titles, c=' '))
table.append(table_line)
for row in map(list, zip(*columns)):
table.append(line_tmpl.format(*row, c=' '))
table.extend([table_line, ''])
return table
def time2str(delta):
"""Return string representing provided datetime.timedelta value in human-readable form."""
res = None
if not isinstance(delta, datetime.timedelta):
raise EasyBuildError("Incorrect value type provided to time2str, should be datetime.timedelta: %s", type(delta))
delta_secs = delta.days * 3600 * 24 + delta.seconds + delta.microseconds / 10**6
if delta_secs < 60:
res = '%d sec' % int(delta_secs)
elif delta_secs < 3600:
mins = int(delta_secs / 60)
secs = int(delta_secs - (mins * 60))
res = '%d min %d sec' % (mins, secs)
else:
hours = int(delta_secs / 3600)
mins = int((delta_secs - hours * 3600) / 60)
secs = int(delta_secs - (hours * 3600) - (mins * 60))
hours_str = 'hours' if hours > 1 else 'hour'
res = '%d %s %d min %d sec' % (hours, hours_str, mins, secs)
return res
|
CPSC491FileMaker/project
|
calTimer.QThread.py
|
#rewrite of original calTimer to use qthreads as opposed to native python threads
#needed to make UI changes (impossible from native)
#also attempting to alleviate need for sigterm to stop perm loop
from PyQt4 import QtCore
import time,os,ctypes
import sys
class calTimer(QtCore.QThread):
xml_file = './data/data.xml'
fileSize = os.stat(xml_file)
def initFileSize(self):
print "initfilesize run"
fileToCheck = os.stat(self.xml_file)
self.fileSize = fileToCheck.st_size
def run(self):
self.initFileSize()
testFileSize = self.fileSize
while testFileSize == self.fileSize:
print "No change - sleep 3"
#time.sleep(3)
|
schleichdi2/OpenNfr_E2_Gui-6.0
|
lib/python/Screens/EpgSelection.py
|
from time import localtime, time, strftime, mktime
from enigma import eServiceReference, eTimer, eServiceCenter, ePoint
from Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Components.About import about
from Components.ActionMap import HelpableActionMap, HelpableNumberActionMap
from Components.Button import Button
from Components.config import config, configfile, ConfigClock
from Components.EpgList import EPGList, EPGBouquetList, TimelineText, EPG_TYPE_SINGLE, EPG_TYPE_SIMILAR, EPG_TYPE_MULTI, EPG_TYPE_ENHANCED, EPG_TYPE_INFOBAR, EPG_TYPE_INFOBARGRAPH, EPG_TYPE_GRAPH, MAX_TIMELINES
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.ServiceEvent import ServiceEvent
from Components.Sources.Event import Event
from Components.UsageConfig import preferredTimerPath
from Screens.TimerEdit import TimerSanityConflict
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.PictureInPicture import PictureInPicture
from Screens.Setup import Setup
from TimeDateInput import TimeDateInput
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT
from TimerEntry import TimerEntry, InstantRecordTimerEntry
from ServiceReference import ServiceReference
mepg_config_initialized = False
# PiPServiceRelation installed?
try:
from Plugins.SystemPlugins.PiPServiceRelation.plugin import getRelationDict
plugin_PiPServiceRelation_installed = True
except:
plugin_PiPServiceRelation_installed = False
class EPGSelection(Screen, HelpableScreen):
EMPTY = 0
ADD_TIMER = 1
REMOVE_TIMER = 2
ZAP = 1
def __init__(self, session, service = None, zapFunc = None, eventid = None, bouquetChangeCB=None, serviceChangeCB = None, EPGtype = None, StartBouquet = None, StartRef = None, bouquets = None):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.zapFunc = zapFunc
self.serviceChangeCB = serviceChangeCB
self.bouquets = bouquets
graphic = False
if EPGtype == 'single':
self.type = EPG_TYPE_SINGLE
elif EPGtype == 'infobar':
self.type = EPG_TYPE_INFOBAR
elif EPGtype == 'enhanced':
self.type = EPG_TYPE_ENHANCED
elif EPGtype == 'graph':
self.type = EPG_TYPE_GRAPH
if config.epgselection.graph_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'infobargraph':
self.type = EPG_TYPE_INFOBARGRAPH
if config.epgselection.infobar_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'multi':
self.type = EPG_TYPE_MULTI
else:
self.type = EPG_TYPE_SIMILAR
if not self.type == EPG_TYPE_SINGLE:
self.StartBouquet = StartBouquet
self.StartRef = StartRef
self.servicelist = None
self.longbuttonpressed = False
self.ChoiceBoxDialog = None
self.ask_time = -1
self.closeRecursive = False
self.eventviewDialog = None
self.eventviewWasShown = False
self.currch = None
self.session.pipshown = False
self.cureventindex = None
if plugin_PiPServiceRelation_installed:
self.pipServiceRelation = getRelationDict()
else:
self.pipServiceRelation = {}
self.zapnumberstarted = False
self.NumberZapTimer = eTimer()
self.NumberZapTimer.callback.append(self.dozumberzap)
self.NumberZapField = None
self.CurrBouquet = None
self.CurrService = None
self["number"] = Label()
self["number"].hide()
self['Service'] = ServiceEvent()
self['Event'] = Event()
self['lab1'] = Label(_('Please wait while gathering data...'))
self.key_green_choice = self.EMPTY
self['key_red'] = Button(_('IMDb Search'))
self['key_green'] = Button(_('Add Timer'))
self['key_yellow'] = Button(_('EPG Search'))
self['key_blue'] = Button(_('Add AutoTimer'))
self['dialogactions'] = HelpableActionMap(self, 'WizardActions',
{
'back': (self.closeChoiceBoxDialog, _('Close dialog')),
}, -1)
self['dialogactions'].csel = self
self["dialogactions"].setEnabled(False)
self['okactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'cancel': (self.closeScreen, _('Exit EPG')),
'OK': (self.OK, _('Zap to channel (setup in menu)')),
'OKLong': (self.OKLong, _('Zap to channel and close (setup in menu)'))
}, -1)
self['okactions'].csel = self
self['colouractions'] = HelpableActionMap(self, 'ColorActions',
{
'red': (self.redButtonPressed, _('IMDB search for current event')),
'redlong': (self.redlongButtonPressed, _('Sort EPG List')),
'green': (self.greenButtonPressed, _('Add/Remove timer for current event')),
'yellow': (self.yellowButtonPressed, _('Search for similar events')),
'greenlong': (self.showTimerList, _('Show Timer List')),
'blue': (self.blueButtonPressed, _('Add a auto timer for current event')),
'bluelong': (self.blueButtonPressedLong, _('Show AutoTimer List'))
}, -1)
self['colouractions'].csel = self
self['recordingactions'] = HelpableActionMap(self, 'InfobarInstantRecord',
{
'ShortRecord': (self.recButtonPressed, _('Add a record timer for current event')),
'LongRecord': (self.reclongButtonPressed, _('Add a zap timer for current event'))
}, -1)
self['recordingactions'].csel = self
if self.type == EPG_TYPE_SIMILAR:
self.currentService = service
self.eventid = eventid
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
elif self.type == EPG_TYPE_SINGLE:
self.currentService = ServiceReference(service)
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'epg': (self.Info, _('Show detailed event info')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevPage, _('Move up a page')),
'right': (self.nextPage, _('Move down a page')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_ENHANCED:
if self.type == EPG_TYPE_INFOBAR:
self.skinName = 'QuickEPG'
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextPage, _('Move down a page')),
'prevService': (self.prevPage, _('Move up a page')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevService, _('Goto previous channel')),
'right': (self.nextService, _('Goto next channel')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_ENHANCED:
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextService, _('Goto next channel')),
'prevService': (self.prevService, _('Goto previous channel')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevPage, _('Move up a page')),
'right': (self.nextPage, _('Move down a page')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions',
{
'0': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'1': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'2': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'3': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'4': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'5': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'6': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'7': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'8': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'9': (self.keyNumberGlobal, _('enter number to jump to channel.'))
}, -1)
self['input_actions'].csel = self
self.list = []
self.servicelist = service
self.currentService = self.session.nav.getCurrentlyPlayingServiceOrGroup()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
if not config.epgselection.graph_pig.value:
self.skinName = 'GraphicalEPG'
else:
self.skinName = 'GraphicalEPGPIG'
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.skinName = 'GraphicalInfoBarEPG'
now = time() - int(config.epg.histminutes.value) * 60
if self.type == EPG_TYPE_GRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self.closeRecursive = False
self.bouquetlist_active = False
self['bouquetlist'] = EPGBouquetList(graphic=graphic)
self['bouquetlist'].hide()
self['timeline_text'] = TimelineText(type=self.type,graphic=graphic)
self['Event'] = Event()
self['primetime'] = Label(_('PRIMETIME'))
self['change_bouquet'] = Label(_('CHANGE BOUQUET'))
self['jump'] = Label(_('JUMP 24 HOURS'))
self['page'] = Label(_('PAGE UP/DOWN'))
self.time_lines = []
for x in range(0, MAX_TIMELINES):
pm = Pixmap()
self.time_lines.append(pm)
self['timeline%d' % x] = pm
self['timeline_now'] = Pixmap()
self.updateTimelineTimer = eTimer()
self.updateTimelineTimer.callback.append(self.moveTimeLines)
self.updateTimelineTimer.start(60000)
self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'cancel': (self.BouquetlistHide, _('Close bouquet list.')),
'OK': (self.BouquetOK, _('Change to bouquet')),
}, -1)
self['bouquetokactions'].csel = self
self["bouquetokactions"].setEnabled(False)
self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.moveBouquetPageUp, _('Goto previous event')),
'right': (self.moveBouquetPageDown, _('Goto next event')),
'up': (self.moveBouquetUp, _('Goto previous channel')),
'down': (self.moveBouquetDown, _('Goto next channel'))
}, -1)
self['bouquetcursoractions'].csel = self
self["bouquetcursoractions"].setEnabled(False)
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.leftPressed, _('Goto previous event')),
'right': (self.rightPressed, _('Goto next event')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextService': (self.nextService, _('Jump forward 24 hours')),
'prevService': (self.prevService, _('Jump back 24 hours')),
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')),
'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions',
{
'1': (self.keyNumberGlobal, _('Reduce time scale')),
'2': (self.keyNumberGlobal, _('Page up')),
'3': (self.keyNumberGlobal, _('Increase time scale')),
'4': (self.keyNumberGlobal, _('page left')),
'5': (self.keyNumberGlobal, _('Jump to current time')),
'6': (self.keyNumberGlobal, _('Page right')),
'7': (self.keyNumberGlobal, _('No of items switch (increase or reduced)')),
'8': (self.keyNumberGlobal, _('Page down')),
'9': (self.keyNumberGlobal, _('Jump to prime time')),
'0': (self.keyNumberGlobal, _('Move to home of list'))
}, -1)
self['input_actions'].csel = self
elif self.type == EPG_TYPE_MULTI:
self.skinName = 'EPGSelectionMulti'
self['bouquetlist'] = EPGBouquetList(graphic=graphic)
self['bouquetlist'].hide()
self['now_button'] = Pixmap()
self['next_button'] = Pixmap()
self['more_button'] = Pixmap()
self['now_button_sel'] = Pixmap()
self['next_button_sel'] = Pixmap()
self['more_button_sel'] = Pixmap()
self['now_text'] = Label()
self['next_text'] = Label()
self['more_text'] = Label()
self['date'] = Label()
self.bouquetlist_active = False
self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'OK': (self.BouquetOK, _('Change to bouquet')),
}, -1)
self['bouquetokactions'].csel = self
self["bouquetokactions"].setEnabled(False)
self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.moveBouquetPageUp, _('Goto previous event')),
'right': (self.moveBouquetPageDown, _('Goto next event')),
'up': (self.moveBouquetUp, _('Goto previous channel')),
'down': (self.moveBouquetDown, _('Goto next channel'))
}, -1)
self['bouquetcursoractions'].csel = self
self['bouquetcursoractions'].setEnabled(False)
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.leftPressed, _('Goto previous event')),
'right': (self.rightPressed, _('Goto next event')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextService': (self.nextPage, _('Move down a page')),
'prevService': (self.prevPage, _('Move up a page')),
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
if self.type == EPG_TYPE_GRAPH:
time_epoch=int(config.epgselection.graph_prevtimeperiod.value)
elif self.type == EPG_TYPE_INFOBARGRAPH:
time_epoch=int(config.epgselection.infobar_prevtimeperiod.value)
else:
time_epoch=None
self['list'] = EPGList(type=self.type, selChangedCB=self.onSelectionChanged, timer=session.nav.RecordTimer, time_epoch=time_epoch, overjump_empty=config.epgselection.overjump.value, graphic=graphic)
self.refreshTimer = eTimer()
self.refreshTimer.timeout.get().append(self.refreshlist)
self.listTimer = eTimer()
self.listTimer.callback.append(self.hidewaitingtext)
if about.getCPUString() != 'BCM7346B2' and about.getCPUString() != 'BCM7425B2':
self.createTimer = eTimer()
self.createTimer.callback.append(self.onCreate)
self.onLayoutFinish.append(self.LayoutFinish)
else:
self.onLayoutFinish.append(self.onCreate)
def createSetup(self):
self.closeEventViewDialog()
key = None
if self.type == EPG_TYPE_SINGLE:
key = 'epgsingle'
elif self.type == EPG_TYPE_MULTI:
key = 'epgmulti'
elif self.type == EPG_TYPE_ENHANCED:
key = 'epgenhanced'
elif self.type == EPG_TYPE_INFOBAR:
key = 'epginfobar'
elif self.type == EPG_TYPE_GRAPH:
key = 'epggraphical'
elif self.type == EPG_TYPE_INFOBARGRAPH:
key = 'epginfobargraphical'
if key:
self.session.openWithCallback(self.onSetupClose, Setup, key)
def onSetupClose(self, test = None):
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
self.close('reopengraph')
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.close('reopeninfobargraph')
else:
if self.type == EPG_TYPE_INFOBAR:
self.close('reopeninfobar')
def togglePIG(self):
if not config.epgselection.graph_pig.value:
config.epgselection.graph_pig.setValue(True)
else:
config.epgselection.graph_pig.setValue(False)
config.epgselection.graph_pig.save()
configfile.save()
self.close('reopengraph')
def hidewaitingtext(self):
self.listTimer.stop()
if self.type == EPG_TYPE_MULTI:
self['list'].moveToService(self.session.nav.getCurrentlyPlayingServiceOrGroup())
self['lab1'].hide()
def getBouquetServices(self, bouquet):
services = []
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def LayoutFinish(self):
self['lab1'].show()
self.createTimer.start(800)
def onCreate(self):
if about.getCPUString() != 'BCM7346B2' and about.getCPUString() != 'BCM7425B2':
self.createTimer.stop()
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
title = None
self['list'].recalcEntrySize()
self.BouquetRoot = False
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.StartBouquet.toString().startswith('1:7:0'):
self.BouquetRoot = True
self.services = self.getBouquetServices(self.StartBouquet)
self['list'].fillGraphEPG(self.services, self.ask_time)
self['list'].moveToService(serviceref)
self['list'].setCurrentlyPlaying(serviceref)
self['bouquetlist'].recalcEntrySize()
self['bouquetlist'].fillBouquetList(self.bouquets)
self['bouquetlist'].moveToService(self.StartBouquet)
self['bouquetlist'].setCurrentBouquet(self.StartBouquet )
self.setTitle(self['bouquetlist'].getCurrentBouquet())
if self.type == EPG_TYPE_GRAPH:
self['list'].setShowServiceMode(config.epgselection.graph_servicetitle_mode.value)
self.moveTimeLines()
if config.epgselection.graph_channel1.value:
self['list'].instance.moveSelectionTo(0)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self['list'].setShowServiceMode(config.epgselection.infobar_servicetitle_mode.value)
self.moveTimeLines()
elif self.type == EPG_TYPE_MULTI:
self['bouquetlist'].recalcEntrySize()
self['bouquetlist'].fillBouquetList(self.bouquets)
self['bouquetlist'].moveToService(self.StartBouquet)
self['bouquetlist'].fillBouquetList(self.bouquets)
self.services = self.getBouquetServices(self.StartBouquet)
self['list'].fillMultiEPG(self.services, self.ask_time)
self['list'].setCurrentlyPlaying(serviceref)
self.setTitle(self['bouquetlist'].getCurrentBouquet())
elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
if self.type == EPG_TYPE_SINGLE:
service = self.currentService
elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
service = ServiceReference(self.servicelist.getCurrentSelection())
title = ServiceReference(self.servicelist.getRoot()).getServiceName()
self['Service'].newService(service.ref)
if title:
title = title + ' - ' + service.getServiceName()
else:
title = service.getServiceName()
self.setTitle(title)
self['list'].fillSingleEPG(service)
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
else:
self['list'].fillSimilarList(self.currentService, self.eventid)
self.listTimer.start(10)
def refreshlist(self):
self.refreshTimer.stop()
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
elif self.type == EPG_TYPE_MULTI:
self['list'].fillMultiEPG(self.services, self.ask_time)
elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
try:
if self.type == EPG_TYPE_SINGLE:
service = self.currentService
elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
service = ServiceReference(self.servicelist.getCurrentSelection())
if not self.cureventindex:
index = self['list'].getCurrentIndex()
else:
index = self.cureventindex
self.cureventindex = None
self['list'].fillSingleEPG(service)
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
self['list'].setCurrentIndex(index)
except:
pass
def moveUp(self):
self['list'].moveTo(self['list'].instance.moveUp)
def moveDown(self):
self['list'].moveTo(self['list'].instance.moveDown)
def updEvent(self, dir, visible = True):
ret = self['list'].selEntry(dir, visible)
if ret:
self.moveTimeLines(True)
def nextPage(self):
self['list'].moveTo(self['list'].instance.pageDown)
def prevPage(self):
self['list'].moveTo(self['list'].instance.pageUp)
def toTop(self):
self['list'].moveTo(self['list'].instance.moveTop)
def toEnd(self):
self['list'].moveTo(self['list'].instance.moveEnd)
def leftPressed(self):
if self.type == EPG_TYPE_MULTI:
self['list'].updateMultiEPG(-1)
else:
self.updEvent(-1)
def rightPressed(self):
if self.type == EPG_TYPE_MULTI:
self['list'].updateMultiEPG(1)
else:
self.updEvent(+1)
def Bouquetlist(self):
if not self.bouquetlist_active:
self.BouquetlistShow()
else:
self.BouquetlistHide()
def BouquetlistShow(self):
self.curindex = self['bouquetlist'].l.getCurrentSelectionIndex()
self["epgcursoractions"].setEnabled(False)
self["okactions"].setEnabled(False)
self['bouquetlist'].show()
self["bouquetokactions"].setEnabled(True)
self["bouquetcursoractions"].setEnabled(True)
self.bouquetlist_active = True
def BouquetlistHide(self, cancel=True):
self["bouquetokactions"].setEnabled(False)
self["bouquetcursoractions"].setEnabled(False)
self['bouquetlist'].hide()
if cancel:
self['bouquetlist'].setCurrentIndex(self.curindex)
self["okactions"].setEnabled(True)
self["epgcursoractions"].setEnabled(True)
self.bouquetlist_active = False
def getCurrentBouquet(self):
if self.BouquetRoot:
return self.StartBouquet
elif self.has_key('bouquetlist'):
cur = self["bouquetlist"].l.getCurrentSelection()
return cur and cur[1]
else:
return self.servicelist.getRoot()
def BouquetOK(self):
self.BouquetRoot = False
now = time() - int(config.epg.histminutes.value) * 60
self.services = self.getBouquetServices(self.getCurrentBouquet())
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(self.services, self.ask_time)
self.moveTimeLines(True)
elif self.type == EPG_TYPE_MULTI:
self['list'].fillMultiEPG(self.services, self.ask_time)
self['list'].instance.moveSelectionTo(0)
self.setTitle(self['bouquetlist'].getCurrentBouquet())
self.BouquetlistHide(False)
def moveBouquetUp(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveUp)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetDown(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveDown)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetPageUp(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageUp)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetPageDown(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageDown)
self['bouquetlist'].fillBouquetList(self.bouquets)
def nextBouquet(self):
if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.moveBouquetDown()
self.BouquetOK()
elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self.servicelist.nextBouquet()
self.onCreate()
def prevBouquet(self):
if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.moveBouquetUp()
self.BouquetOK()
elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self.servicelist.prevBouquet()
self.onCreate()
def nextService(self):
if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self['list'].instance.moveSelectionTo(0)
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
self.servicelist.moveDown()
if self.isPlayable():
self.onCreate()
if not self['list'].getCurrent()[1] and config.epgselection.overjump.value:
self.nextService()
else:
self.nextService()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(+24)
elif self.serviceChangeCB:
self.serviceChangeCB(1, self)
def prevService(self):
if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self['list'].instance.moveSelectionTo(0)
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
self.servicelist.moveUp()
if self.isPlayable():
self.onCreate()
if not self['list'].getCurrent()[1] and config.epgselection.overjump.value:
self.prevService()
else:
self.prevService()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(-24)
elif self.serviceChangeCB:
self.serviceChangeCB(-1, self)
def enterDateTime(self):
global mepg_config_initialized
if self.type == EPG_TYPE_MULTI:
if not mepg_config_initialized:
config.misc.prev_mepg_time = ConfigClock(default=time())
mepg_config_initialized = True
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.misc.prev_mepg_time)
elif self.type == EPG_TYPE_GRAPH:
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.graph_prevtime)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.infobar_prevtime)
def onDateTimeInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
if self.type == EPG_TYPE_MULTI:
self.ask_time = ret[1]
self['list'].fillMultiEPG(self.services, ret[1])
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
now = time() - int(config.epg.histminutes.value) * 60
if self.type == EPG_TYPE_GRAPH:
self.ask_time -= self.ask_time % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time -= self.ask_time % (int(config.epgselection.infobar_roundto.value) * 60)
l = self['list']
l.resetOffset()
l.fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH):
self.infoKeyPressed(True)
def closeScreen(self):
if self.type == None:
self.close()
return
if self.type == EPG_TYPE_SINGLE:
self.close()
return # stop and do not continue.
if self.CurrBouquet and self.CurrService and (self.CurrBouquet != self.StartBouquet or self.CurrService != self.StartRef):
self.zapToNumber(self.StartRef, self.StartBouquet)
if self.session.nav.getCurrentlyPlayingServiceOrGroup() and self.StartRef and self.session.nav.getCurrentlyPlayingServiceOrGroup().toString() != self.StartRef.toString():
if self.zapFunc and ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_preview_mode.value) or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_preview_mode.value) or (
self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH and config.epgselection.infobar_preview_mode.value == '1' or config.epgselection.infobar_preview_mode.value == '2') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_preview_mode.value)) and self.StartRef and self.StartBouquet:
if '0:0:0:0:0:0:0:0:0' not in self.StartRef.toString():
self.zapFunc(None, zapback = True)
elif '0:0:0:0:0:0:0:0:0' in self.StartRef.toString():
self.session.nav.playService(self.StartRef)
if self.session.pipshown:
self.session.pipshown = False
del self.session.pip
self.closeEventViewDialog()
self.close(True)
def infoKeyPressed(self, eventviewopen=False):
cur = self['list'].getCurrent()
event = cur[0]
service = cur[1]
if event is not None and not self.eventviewDialog and not eventviewopen:
if self.type != EPG_TYPE_SIMILAR:
if self.type == EPG_TYPE_INFOBARGRAPH:
self.eventviewDialog = self.session.instantiateDialog(EventViewSimple,event, service, skin='InfoBarEventView')
self.eventviewDialog.show()
else:
self.session.open(EventViewEPGSelect, event, service, callback=self.eventViewCallback, similarEPGCB=self.openSimilarList)
elif self.eventviewDialog and not eventviewopen:
self.eventviewDialog.hide()
del self.eventviewDialog
self.eventviewDialog = None
elif event is not None and self.eventviewDialog and eventviewopen:
if self.type != EPG_TYPE_SIMILAR:
if self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH:
self.eventviewDialog.hide()
self.eventviewDialog = self.session.instantiateDialog(EventViewSimple,event, service, skin='InfoBarEventView')
self.eventviewDialog.show()
def redButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.openIMDb()
else:
self.longbuttonpressed = False
def redlongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.sortEpg()
def greenButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.RecordTimerQuestion(True)
else:
self.longbuttonpressed = False
def greenlongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.showAutoTimerList()
def yellowButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.openEPGSearch()
else:
self.longbuttonpressed = False
def blueButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.addAutoTimer()
else:
self.longbuttonpressed = False
def bluelongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.showAutoTimerList()
def blueButtonPressedLong(self):
self.closeEventViewDialog()
from InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance.LongButtonPressed:
self.showAutoTimerList()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def setServices(self, services):
self.services = services
self.onCreate()
def setService(self, service):
self.currentService = service
self.onCreate()
def eventViewCallback(self, setEvent, setService, val):
l = self['list']
old = l.getCurrent()
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(val, False)
elif val == -1:
self.moveUp()
elif val == +1:
self.moveDown()
cur = l.getCurrent()
if (self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH) and cur[0] is None and cur[1].ref != old[1].ref:
self.eventViewCallback(setEvent, setService, val)
else:
setService(cur[1])
setEvent(cur[0])
def eventSelected(self):
self.infoKeyPressed()
def sortEpg(self):
if self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
if config.epgselection.sort.value == '0':
config.epgselection.sort.setValue('1')
else:
config.epgselection.sort.setValue('0')
config.epgselection.sort.save()
configfile.save()
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
def OpenSingleEPG(self):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1].ref
if serviceref is not None:
self.session.open(SingleEPG, serviceref)
def openIMDb(self):
try:
from Plugins.Extensions.IMDb.plugin import IMDB, IMDBEPGSelection
try:
cur = self['list'].getCurrent()
event = cur[0]
name = event.getEventName()
except:
name = ''
self.session.open(IMDB, name, False)
except ImportError:
self.session.open(MessageBox, _('The IMDb plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def openEPGSearch(self):
try:
from Plugins.Extensions.EPGSearch.EPGSearch import EPGSearch
try:
cur = self['list'].getCurrent()
event = cur[0]
name = event.getEventName()
except:
name = ''
self.session.open(EPGSearch, name, False)
except ImportError:
self.session.open(MessageBox, _('The EPGSearch plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def addAutoTimer(self):
try:
from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEvent
cur = self['list'].getCurrent()
event = cur[0]
if not event:
return
serviceref = cur[1]
addAutotimerFromEvent(self.session, evt=event, service=serviceref)
self.refreshTimer.start(3000)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def addAutoTimerSilent(self):
try:
from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEventSilent
cur = self['list'].getCurrent()
event = cur[0]
if not event:
return
serviceref = cur[1]
addAutotimerFromEventSilent(self.session, evt=event, service=serviceref)
self.refreshTimer.start(3000)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def showTimerList(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
def showAutoTimerList(self):
global autopoller
global autotimer
try:
from Plugins.Extensions.AutoTimer.plugin import main, autostart
from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
autopoller = AutoPoller()
autotimer = AutoTimer()
try:
autotimer.readXml()
except SyntaxError as se:
self.session.open(MessageBox, _('Your config file is not well-formed:\n%s') % str(se), type=MessageBox.TYPE_ERROR, timeout=10)
return
if autopoller is not None:
autopoller.stop()
from Plugins.Extensions.AutoTimer.AutoTimerOverview import AutoTimerOverview
self.session.openWithCallback(self.editCallback, AutoTimerOverview, autotimer)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def editCallback(self, session):
global autopoller
global autotimer
if session is not None:
autotimer.writeXml()
autotimer.parseEPG()
if config.plugins.autotimer.autopoll.value:
if autopoller is None:
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
autopoller = AutoPoller()
autopoller.start()
else:
autopoller = None
autotimer = None
def timerAdd(self):
self.RecordTimerQuestion(True)
def editTimer(self, timer):
self.session.open(TimerEntry, timer)
def removeTimer(self, timer):
self.closeChoiceBoxDialog()
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def disableTimer(self, timer):
self.closeChoiceBoxDialog()
timer.disable()
self.session.nav.RecordTimer.timeChanged(timer)
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def RecordTimerQuestion(self, manual=False):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1]
if event is None:
return
eventid = event.getEventId()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
title = None
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
cb_func1 = lambda ret: self.removeTimer(timer)
cb_func2 = lambda ret: self.editTimer(timer)
cb_func3 = lambda ret: self.disableTimer(timer)
menu = [(_("Delete timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func1), (_("Edit timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func2), (_("Disable timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func3)]
title = _("Select action for timer %s:") % event.getEventName()
break
else:
if not manual:
menu = [(_("Add Timer"), 'CALLFUNC', self.ChoiceBoxCB, self.doRecordTimer), (_("Add AutoTimer"), 'CALLFUNC', self.ChoiceBoxCB, self.addAutoTimerSilent)]
title = "%s?" % event.getEventName()
else:
newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, dirname=preferredTimerPath(), *parseEvent(event))
self.session.openWithCallback(self.finishedAdd, TimerEntry, newEntry)
if title:
self.ChoiceBoxDialog = self.session.instantiateDialog(ChoiceBox, title=title, list=menu, keys=['green', 'blue'], skin_name="RecordTimerQuestion")
serviceref = eServiceReference(str(self['list'].getCurrent()[1]))
posy = self['list'].getSelectionPosition(serviceref)
self.ChoiceBoxDialog.instance.move(ePoint(posy[0]-self.ChoiceBoxDialog.instance.size().width(),self.instance.position().y()+posy[1]))
self.showChoiceBoxDialog()
def recButtonPressed(self):
if not self.longbuttonpressed:
self.RecordTimerQuestion()
else:
self.longbuttonpressed = False
def reclongButtonPressed(self):
self.longbuttonpressed = True
self.doZapTimer()
def RemoveChoiceBoxCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
choice(self)
def ChoiceBoxCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
try:
choice()
except:
choice
def showChoiceBoxDialog(self):
self['okactions'].setEnabled(False)
if self.has_key('epgcursoractions'):
self['epgcursoractions'].setEnabled(False)
self['colouractions'].setEnabled(False)
self['recordingactions'].setEnabled(False)
self['epgactions'].setEnabled(False)
self["dialogactions"].setEnabled(True)
self.ChoiceBoxDialog['actions'].execBegin()
self.ChoiceBoxDialog.show()
if self.has_key('input_actions'):
self['input_actions'].setEnabled(False)
def closeChoiceBoxDialog(self):
self["dialogactions"].setEnabled(False)
if self.ChoiceBoxDialog:
self.ChoiceBoxDialog['actions'].execEnd()
self.session.deleteDialog(self.ChoiceBoxDialog)
self['okactions'].setEnabled(True)
if self.has_key('epgcursoractions'):
self['epgcursoractions'].setEnabled(True)
self['colouractions'].setEnabled(True)
self['recordingactions'].setEnabled(True)
self['epgactions'].setEnabled(True)
if self.has_key('input_actions'):
self['input_actions'].setEnabled(True)
def doRecordTimer(self):
self.doInstantTimer(0)
def doZapTimer(self):
self.doInstantTimer(1)
def doInstantTimer(self, zap):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1]
if event is None:
return
eventid = event.getEventId()
refstr = serviceref.ref.toString()
newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, *parseEvent(event))
self.InstantRecordDialog = self.session.instantiateDialog(InstantRecordTimerEntry, newEntry, zap)
retval = [True, self.InstantRecordDialog.retval()]
self.session.deleteDialogWithCallback(self.finishedAdd, self.InstantRecordDialog, retval)
def finishedAdd(self, answer):
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def OK(self):
if self.zapnumberstarted:
self.dozumberzap()
else:
if config.epgselection.graph_ok.value == 'Zap' or config.epgselection.enhanced_ok.value == 'Zap' or config.epgselection.infobar_ok.value == 'Zap' or config.epgselection.multi_ok.value == 'Zap':
self.zapTo()
if config.epgselection.graph_ok.value == 'Zap + Exit' or config.epgselection.enhanced_ok.value == 'Zap + Exit' or config.epgselection.infobar_ok.value == 'Zap + Exit' or config.epgselection.multi_ok.value == 'Zap + Exit':
self.zap()
def OKLong(self):
if self.zapnumberstarted:
self.dozumberzap()
else:
if config.epgselection.graph_oklong.value == 'Zap' or config.epgselection.enhanced_oklong.value == 'Zap' or config.epgselection.infobar_oklong.value == 'Zap' or config.epgselection.multi_oklong.value == 'Zap':
self.zapTo()
if config.epgselection.graph_oklong.value == 'Zap + Exit' or config.epgselection.enhanced_oklong.value == 'Zap + Exit' or config.epgselection.infobar_oklong.value == 'Zap + Exit' or config.epgselection.multi_oklong.value == 'Zap + Exit':
self.zap()
def Info(self):
if self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Channel Info':
self.infoKeyPressed()
elif self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Single EPG':
self.OpenSingleEPG()
else:
self.infoKeyPressed()
def InfoLong(self):
if self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Channel Info':
self.infoKeyPressed()
elif self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Single EPG':
self.OpenSingleEPG()
else:
self.OpenSingleEPG()
def applyButtonState(self, state):
if state == 0:
self['now_button'].hide()
self['now_button_sel'].hide()
self['next_button'].hide()
self['next_button_sel'].hide()
self['more_button'].hide()
self['more_button_sel'].hide()
self['now_text'].hide()
self['next_text'].hide()
self['more_text'].hide()
self['key_red'].setText('')
else:
if state == 1:
self['now_button_sel'].show()
self['now_button'].hide()
else:
self['now_button'].show()
self['now_button_sel'].hide()
if state == 2:
self['next_button_sel'].show()
self['next_button'].hide()
else:
self['next_button'].show()
self['next_button_sel'].hide()
if state == 3:
self['more_button_sel'].show()
self['more_button'].hide()
else:
self['more_button'].show()
self['more_button_sel'].hide()
def onSelectionChanged(self):
cur = self['list'].getCurrent()
event = cur[0]
self['Event'].newEvent(event)
if cur[1] is None:
self['Service'].newService(None)
else:
self['Service'].newService(cur[1].ref)
if self.type == EPG_TYPE_MULTI:
count = self['list'].getCurrentChangeCount()
if self.ask_time != -1:
self.applyButtonState(0)
elif count > 1:
self.applyButtonState(3)
elif count > 0:
self.applyButtonState(2)
else:
self.applyButtonState(1)
datestr = ''
if event is not None:
now = time()
beg = event.getBeginTime()
nowTime = localtime(now)
begTime = localtime(beg)
if nowTime[2] != begTime[2]:
datestr = strftime(_('%A %e %b'), begTime)
else:
datestr = '%s' % _('Today')
self['date'].setText(datestr)
if cur[1] is None or cur[1].getServiceName() == '':
if self.key_green_choice != self.EMPTY:
self['key_green'].setText('')
self.key_green_choice = self.EMPTY
return
if event is None:
if self.key_green_choice != self.EMPTY:
self['key_green'].setText('')
self.key_green_choice = self.EMPTY
return
serviceref = cur[1]
eventid = event.getEventId()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
isRecordEvent = True
break
if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER:
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER:
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH):
self.infoKeyPressed(True)
def moveTimeLines(self, force = False):
self.updateTimelineTimer.start((60 - int(time()) % 60) * 1000)
self['timeline_text'].setEntries(self['list'], self['timeline_now'], self.time_lines, force)
self['list'].l.invalidate()
def isPlayable(self):
current = ServiceReference(self.servicelist.getCurrentSelection())
return not current.ref.flags & (eServiceReference.isMarker | eServiceReference.isDirectory)
def setServicelistSelection(self, bouquet, service):
if self.servicelist:
if self.servicelist.getRoot() != bouquet:
self.servicelist.clearPath()
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service)
def closeEventViewDialog(self):
if self.eventviewDialog:
self.eventviewDialog.hide()
del self.eventviewDialog
self.eventviewDialog = None
def zap(self):
if self.zapFunc:
self.zapSelectedService()
self.closeEventViewDialog()
self.close(True)
else:
self.closeEventViewDialog()
self.close()
def zapSelectedService(self, prev=False):
if self.session.pipshown:
self.prevch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString()) or None
else:
self.prevch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString()) or None
lst = self["list"]
count = lst.getCurrentChangeCount()
if count == 0:
ref = lst.getCurrent()[1]
if ref is not None:
if (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_preview_mode.value == '2':
if not self.session.pipshown:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
self.session.pipshown = True
n_service = self.pipServiceRelation.get(str(ref.ref), None)
if n_service is not None:
service = eServiceReference(n_service)
else:
service = ref.ref
if self.session.pipshown and self.currch == service.toString():
self.session.pipshown = False
del self.session.pip
self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = False)
return
self.session.pip.playService(service)
self.currch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString())
else:
self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = prev)
self.currch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString())
self['list'].setCurrentlyPlaying(self.session.nav.getCurrentlyPlayingServiceOrGroup())
def zapTo(self):
if self.session.nav.getCurrentlyPlayingServiceOrGroup() and '0:0:0:0:0:0:0:0:0' in self.session.nav.getCurrentlyPlayingServiceOrGroup().toString():
from Screens.InfoBarGenerics import setResumePoint
setResumePoint(self.session)
if self.zapFunc:
self.zapSelectedService(True)
self.refreshTimer.start(2000)
if not self.currch or self.currch == self.prevch:
if self.zapFunc:
self.zapFunc(None, False)
self.closeEventViewDialog()
self.close('close')
else:
self.closeEventViewDialog()
self.close()
def keyNumberGlobal(self, number):
if self.type == EPG_TYPE_GRAPH:
if number == 1:
timeperiod = int(config.epgselection.graph_prevtimeperiod.value)
if timeperiod > 60:
timeperiod -= 60
self['list'].setEpoch(timeperiod)
config.epgselection.graph_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 2:
self.prevPage()
elif number == 3:
timeperiod = int(config.epgselection.graph_prevtimeperiod.value)
if timeperiod < 300:
timeperiod += 60
self['list'].setEpoch(timeperiod)
config.epgselection.graph_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 4:
self.updEvent(-2)
elif number == 5:
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 6:
self.updEvent(+2)
elif number == 7:
if config.epgselection.graph_heightswitch.value:
config.epgselection.graph_heightswitch.setValue(False)
else:
config.epgselection.graph_heightswitch.setValue(True)
self['list'].setItemsPerPage()
self['list'].fillGraphEPG(None)
self.moveTimeLines()
elif number == 8:
self.nextPage()
elif number == 9:
basetime = localtime(self['list'].getTimeBase())
basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.graph_primetimehour.value), int(config.epgselection.graph_primetimemins.value), 0, basetime[6], basetime[7], basetime[8])
self.ask_time = mktime(basetime)
if self.ask_time + 3600 < time():
self.ask_time += 86400
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 0:
self.toTop()
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
elif self.type == EPG_TYPE_INFOBARGRAPH:
if number == 1:
timeperiod = int(config.epgselection.infobar_prevtimeperiod.value)
if timeperiod > 60:
timeperiod -= 60
self['list'].setEpoch(timeperiod)
config.epgselection.infobar_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 2:
self.prevPage()
elif number == 3:
timeperiod = int(config.epgselection.infobar_prevtimeperiod.value)
if timeperiod < 300:
timeperiod += 60
self['list'].setEpoch(timeperiod)
config.epgselection.infobar_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 4:
self.updEvent(-2)
elif number == 5:
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 6:
self.updEvent(+2)
elif number == 8:
self.nextPage()
elif number == 9:
basetime = localtime(self['list'].getTimeBase())
basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.infobar_primetimehour.value), int(config.epgselection.infobar_primetimemins.value), 0, basetime[6], basetime[7], basetime[8])
self.ask_time = mktime(basetime)
if self.ask_time + 3600 < time():
self.ask_time += 86400
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 0:
self.toTop()
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
else:
self.zapnumberstarted = True
self.NumberZapTimer.start(5000, True)
if not self.NumberZapField:
self.NumberZapField = str(number)
else:
self.NumberZapField += str(number)
self.handleServiceName()
self["number"].setText(self.zaptoservicename+'\n'+self.NumberZapField)
self["number"].show()
if len(self.NumberZapField) >= 4:
self.dozumberzap()
def dozumberzap(self):
self.zapnumberstarted = False
self.numberEntered(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self.NumberZapField))
self.zaptoservicename = ServiceReference(self.service).getServiceName()
def numberEntered(self, service = None, bouquet = None):
if service is not None:
self.zapToNumber(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist is not None:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number):
bouquet = self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service is None:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist is not None:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service is not None:
playable = not service.flags & (eServiceReference.isMarker | eServiceReference.isDirectory) or service.flags & eServiceReference.isNumberedMarker
if not playable:
service = None
break
if config.usage.alternative_number_mode.value:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def zapToNumber(self, service, bouquet):
self["number"].hide()
self.NumberZapField = None
self.CurrBouquet = bouquet
self.CurrService = service
if service is not None:
self.setServicelistSelection(bouquet, service)
self.onCreate()
class SingleEPG(EPGSelection):
def __init__(self, session, service, EPGtype="single"):
EPGSelection.__init__(self, session, service=service, EPGtype=EPGtype)
self.skinName = 'EPGSelection'
|
ellxc/piperbot
|
plugins/weather.py
|
# Get weather data from various online sources
# -*- coding: utf-8 -*-
import requests
from wrappers import *
@plugin
class yweather:
@command("weather")
def weather(self, message):
"""Get the current condition in a given location, from the Yahoo! Weather Service
"""
w = self.get_yahoo_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w,
text="Weather for {0[city]}, {0[country]}: {0[condition]}, {0[temperature]}. Wind Speed: {0[wind_speed]} ({0[wind_direction]}), Wind Chill: {0[wind_chill]}. Visibility {0[visibility]}. High Temp: {0[high]}°C, Low Temp: {0[low]}°C. Sunrise: {0[sunrise]}, Sunset: {0[sunset]}.".format(w)
)
else:
return message.reply(data=w, text=w)
@command("forecast")
def forecast(self, message):
"""Get the 5 day forcast for a given location, from the Yahoo! Weather Service
"""
w = self.get_yahoo_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w['forecast'], text="; ".join(["{0[day]}: {0[condition]}. High: {0[high]}, Low: {0[low]}.".format(x) for x in w['forecast']]))
else:
return message.reply(data=w, text=w)
def get_yahoo_weather(self, place):
if not place:
raise Exception("You must provide a place name.")
# Use Yahoo's yql to build the query
url = 'https://query.yahooapis.com/v1/public/yql?q=select * from weather.forecast where woeid in(select woeid from geo.places(1) where text="' + place + '") and u="c"&format=json'
# Fetch the results
r = requests.get(url)
json = r.json()
result = json['query']['results']
if not result:
return "No weather could be found for " + place + "."
# Read the pertinant parts of the result, and format them nicely.
channel = result['channel']
city = channel['location']['city']
country = channel['location']['country']
region = channel['location']['region']
high = channel['item']['forecast'][0]['high']
low = channel['item']['forecast'][0]['low']
# There's a bug in the weather API where windchill is reported as "feels like" in farenheight.
feelsLike = (float(channel['wind']['chill']) - 32) / 1.8
chill = feelsLike - float(channel['item']['condition']['temp'])
windChill = "{0:.2f}°{1}".format(chill, channel['units']['temperature'])
windDir = "{0:03d}deg".format(int(channel['wind']['direction']))
windSpeed = "{0} {1}".format(channel['wind']['speed'], channel['units']['speed'])
humidity = "{0}%".format(channel['atmosphere']['humidity'])
pressure = "{0}{1}".format(channel['atmosphere']['pressure'], channel['units']['pressure'])
rising = channel['atmosphere']['rising']
visibility = "{0}{1}".format(channel['atmosphere']['visibility'], channel['units']['distance'])
sunrise = channel['astronomy']['sunrise']
sunset = channel['astronomy']['sunset']
condition = channel['item']['condition']['text']
temperature = "{0}°{1}".format(channel['item']['condition']['temp'], channel['units']['temperature'])
forecast = []
for pred in channel['item']['forecast']:
c = {"day": pred['day'],
"condition": pred['text'],
"high": "{0}°{1}".format(pred['high'], channel['units']['temperature']),
"low": "{0}°{1}".format(pred['low'], channel['units']['temperature'])}
forecast.append(c)
return {"city":city,
"country":country,
"region":region,
"high":high,
"low":low,
"temperature": temperature,
"wind_chill":windChill,
"wind_direction":windDir,
"wind_speed":windSpeed,
"humidity":humidity,
"pressure":pressure,
"rising":rising,
"visibility":visibility,
"sunrise":sunrise,
"sunset":sunset,
"condition":condition,
"forecast":forecast
}
@plugin
class pollen:
@command("pollen")
def pollen(self, message):
"""Get the pollen index for a given location
"""
if not message:
raise Exception("You must provide a place name.")
# Use Yahoo's yql to build the query
yurl = 'https://query.yahooapis.com/v1/public/yql?q=select woeid from geo.places(1) where text = "' + message.data + '"&format=json'
# Fetch the results
r = requests.get(yurl)
json = r.json()
if not json['query']['results']:
return "Could not find " + place + "."
woeid = json['query']['results']['place']['woeid']
print(woeid)
purl = "https://pollencheck.p.mashape.com/api/1/forecasts/" + woeid
headers = {
"X-Mashape-Key": "O6cwEp209Jmsh614NhNE6DpXIUKhp1npOMrjsnvWzdpgHYgzob",
"Accept": "application/json"
}
pollen_data = requests.get(purl, headers=headers)
p_json = pollen_data.json()
if not p_json:
raise Exception("Could not get data for '" + message.data + "', try a large city.")
return message.reply(data=p_json, text="Total pollen count: {0[maxLevel]}".format(p_json['periods'][0]['combined']))
@plugin
class forecast_io:
@command("whereis")
def whereis(self, message):
"""Get the latitude and longitdue of a given place
"""
if not message:
raise Exception("You must provide a place name.")
ll = self.latlong(message.data)
if isinstance(ll, dict):
return message.reply(data=ll, text="Latitude: {}, Longitude: {}".format(ll['latitude'], ll['longitude']))
else:
return message.reply(data=ll, text=ll)
@command("condition")
def condition(self, message):
"""Get the current weather using the https://developer.forecast.io/docs/v2 API.
"""
if not message:
raise Exception("You must provide a place name.")
w = self.get_forecast_io_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w,
text="Current condition for {1}: {0[summary]} P({0[precipProbability]}) probability of precipitation. \
{0[temperature]}°C, feels like {0[apparentTemperature]}°C. Dew Point: {0[dewPoint]}°C. \
Humidity: {0[humidity]}. Wind Speed: {0[windSpeed]}mph bearing {0[windBearing]:03d}. \
Cloud Cover: {0[cloudCover]}. Pressure: {0[pressure]}mb. Ozone: {0[ozone]}.".format(w['currently'], message.data))
else:
return message.reply(data=w, text=w)
def latlong(self, place):
# Use Yahoo's yql to build the query
if not place:
raise Exception("You must provide a place name.")
url = 'https://query.yahooapis.com/v1/public/yql?q=select centroid from geo.places(1) where text = "' + place + '"&format=json'
# Fetch the results
r = requests.get(url)
json = r.json()
if not json['query']['results']:
return "Could not find " + place + "."
return json['query']['results']['place']['centroid']
def get_forecast_io_weather(self, place):
if not place:
raise Exception("You must provide a place name.")
ll = self.latlong(place)
# TODO: yeild an error
if not isinstance(ll, dict):
return ll
# Build a forecast IO request string. TODO: Remove API key and regenerate it
url = 'https://api.forecast.io/forecast/da05193c059f48ff118de841ccb7cd92/' + ll['latitude'] + "," + ll['longitude'] + "?units=uk"
# Fetch the results
r = requests.get(url)
json = r.json()
return json
|
heolin123/day_or_night
|
mainapp/migrations/0008_auto_20151023_1317.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0007_auto_20151023_1012'),
]
operations = [
migrations.AddField(
model_name='documentclassification',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='documentclassification',
name='ip',
field=models.CharField(default=b'', max_length=100),
),
]
|
tdjordan/tortoisegit
|
tracelog.py
|
#
# A PyGtk-based Python Trace Collector window
#
# Copyright (C) 2007 TK Soh <teekaysoh@gmail.com>
#
import pygtk
pygtk.require("2.0")
import gtk
import gobject
import pango
import threading
import Queue
import win32trace
try:
from gitgtk.gitlib import toutf
except ImportError:
import locale
_encoding = locale.getpreferredencoding()
def toutf(s):
return s.decode(_encoding, 'replace').encode('utf-8')
class TraceLog():
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Python Trace Collector")
# construct window
self.window.set_default_size(700, 400)
self.main_area = gtk.VBox()
self.window.add(self.main_area)
# mimic standard dialog widgets
self.action_area = gtk.HBox()
self.main_area.pack_end(self.action_area, False, False, 5)
sep = gtk.HSeparator()
self.main_area.pack_end(sep, False, False, 0)
self.vbox = gtk.VBox()
self.main_area.pack_end(self.vbox)
# add python trace ouput window
scrolledwindow = gtk.ScrolledWindow()
scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.textview = gtk.TextView(buffer=None)
self.textview.set_editable(False)
self.textview.modify_font(pango.FontDescription("Monospace"))
scrolledwindow.add(self.textview)
self.textview.set_editable(False)
self.textbuffer = self.textview.get_buffer()
self.vbox.pack_start(scrolledwindow, True, True)
self.vbox.show_all()
# add buttons
self._button_quit = gtk.Button("Quit")
self._button_quit.connect('clicked', self._on_ok_clicked)
self.action_area.pack_end(self._button_quit, False, False, 5)
self._button_clear = gtk.Button("Clear")
self._button_clear.connect('clicked', self._on_clear_clicked)
self.action_area.pack_end(self._button_clear, False, False, 5)
# add assorted window event handlers
self.window.connect('map_event', self._on_window_map_event)
self.window.connect('delete_event', self._on_window_close_clicked)
def _on_ok_clicked(self, button):
self._stop_read_thread()
gtk.main_quit()
def _on_clear_clicked(self, button):
self.write("", False)
def _on_window_close_clicked(self, event, param):
self._stop_read_thread()
gtk.main_quit()
def _on_window_map_event(self, event, param):
self._begin_trace()
def _begin_trace(self):
self.queue = Queue.Queue()
win32trace.InitRead()
self.write("Collecting Python Trace Output...\n")
gobject.timeout_add(10, self._process_queue)
self._start_read_thread()
def _start_read_thread(self):
self._read_trace = True
self.thread1 = threading.Thread(target=self._do_read_trace)
self.thread1.start()
def _stop_read_thread(self):
self._read_trace = False
# wait for worker thread to to fix Unhandled exception in thread
self.thread1.join()
def _process_queue(self):
"""
Handle all the messages currently in the queue (if any).
"""
while self.queue.qsize():
try:
msg = self.queue.get(0)
self.write(msg)
except Queue.Empty:
pass
return True
def _do_read_trace(self):
"""
print buffer collected in win32trace
"""
while self._read_trace:
msg = win32trace.read()
if msg:
self.queue.put(msg)
def write(self, msg, append=True):
msg = toutf(msg)
if append:
enditer = self.textbuffer.get_end_iter()
self.textbuffer.insert(enditer, msg)
else:
self.textbuffer.set_text(msg)
def main(self):
self.window.show_all()
gtk.main()
def run():
dlg = TraceLog()
dlg.main()
if __name__ == "__main__":
run()
|
yast/yast-python-bindings
|
examples/HCenter3.py
|
# encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class HCenter3Client:
def main(self):
UI.OpenDialog(
Opt("defaultsize"),
VBox(
VCenter(PushButton(Opt("vstretch"), "Button 1")),
VCenter(PushButton(Opt("vstretch"), "Button 2")),
VCenter(PushButton(Opt("vstretch"), "Button 3"))
)
)
UI.UserInput()
UI.CloseDialog()
HCenter3Client().main()
|
thomasvdv/flightbit
|
forecast/keys_iterator.py
|
import traceback
import sys
from gribapi import *
INPUT = 'rap_130_20120822_2200_001.grb2'
VERBOSE = 1 # verbose error reporting
def example():
f = open(INPUT)
while 1:
gid = grib_new_from_file(f)
if gid is None: break
iterid = grib_keys_iterator_new(gid, 'ls')
# Different types of keys can be skipped
# grib_skip_computed(iterid)
# grib_skip_coded(iterid)
# grib_skip_edition_specific(iterid)
# grib_skip_duplicates(iterid)
# grib_skip_read_only(iterid)
# grib_skip_function(iterid)
while grib_keys_iterator_next(iterid):
keyname = grib_keys_iterator_get_name(iterid)
keyval = grib_get_string(iterid, keyname)
print "%s = %s" % (keyname, keyval)
grib_keys_iterator_delete(iterid)
grib_release(gid)
f.close()
def main():
try:
example()
except GribInternalError, err:
if VERBOSE:
traceback.print_exc(file=sys.stderr)
else:
print >> sys.stderr, err.msg
return 1
if __name__ == "__main__":
sys.exit(main())
|
ljx0305/ice
|
python/test/Ice/facets/TestI.py
|
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import Test
class AI(Test.A):
def callA(self, current=None):
return "A"
class BI(Test.B, AI):
def callB(self, current=None):
return "B"
class CI(Test.C, AI):
def callC(self, current=None):
return "C"
class DI(Test.D, BI, CI):
def callD(self, current=None):
return "D"
class EI(Test.E):
def callE(self, current=None):
return "E"
class FI(Test.F, EI):
def callF(self, current=None):
return "F"
class GI(Test.G):
def __init__(self, communicator):
self._communicator = communicator
def shutdown(self, current=None):
self._communicator.shutdown()
def callG(self, current=None):
return "G"
class HI(Test.H, GI):
def __init__(self, communicator):
GI.__init__(self, communicator)
def callH(self, current=None):
return "H"
|
en0/PivotalPoker
|
src/utils/async_job.py
|
__author__ = 'en0'
from http import context
from uuid import uuid4
from redis import Redis
from gevent import spawn
from functools import wraps
class AsyncJob(object):
def __init__(self, target):
assert isinstance(context.db, Redis)
self._target = target
self._db = context.db
def __call__(self, fn):
wraps(fn)
def _wrapper(*args, **kwargs):
_args = fn(*args, **kwargs)
_job_id = str(uuid4())
_key = "jobs:{0}".format(_job_id)
_status_key = "jobs:{0}:status".format(_job_id)
_expire_time = 3600
self._db.set(_status_key, 202)
self._db.expire(_status_key, _expire_time)
def task():
# noinspection PyBroadException
try:
data = self._target(*_args)
except:
self._db.set(_status_key, 500)
else:
self._db.set(_key, data)
self._db.set(_status_key, 200)
self._db.expire(_key, _expire_time)
self._db.expire(_status_key, _expire_time)
spawn(task)
return dict(job=_job_id)
return _wrapper
|
jrbl/invenio
|
modules/bibauthorid/lib/bibauthorid_prob_matrix.py
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import bibauthorid_config as bconfig
from bibauthorid_comparison import compare_bibrefrecs
from bibauthorid_comparison import clear_all_caches as clear_comparison_caches
from bibauthorid_backinterface import bib_matrix
from bibauthorid_backinterface import get_sql_time
from bibauthorid_backinterface import filter_modified_record_ids
from bibauthorid_general_utils import update_status \
, update_status_final
if bconfig.DEBUG_CHECKS:
def _debug_is_eq(v1, v2):
eps = 1e-2
return v1 + eps > v2 and v2 + eps > v1
def _debug_is_eq_v(vl1, vl2):
if isinstance(vl1, str) and isinstance(vl2, str):
return vl1 == vl2
if isinstance(vl1, tuple) and isinstance(vl2, tuple):
return _debug_is_eq(vl1[0], vl2[0]) and _debug_is_eq(vl1[1], vl2[1])
return False
class probability_matrix:
'''
This class contains and maintains the comparison
between all virtual authors. It is able to write
and read from the database and update the results.
'''
def __init__(self, cluster_set, use_cache=False, save_cache=False):
'''
Constructs probability matrix. If use_cache is true, it will
try to load old computations from the database. If save cache
is true it will save the current results into the database.
@param cluster_set: A cluster set object, used to initialize
the matrix.
'''
def check_for_cleaning(cur_calc):
if cur_calc % 10000000 == 0:
clear_comparison_caches()
self._bib_matrix = bib_matrix(cluster_set)
old_matrix = bib_matrix()
ncl = sum(len(cl.bibs) for cl in cluster_set.clusters)
expected = ((ncl * (ncl - 1)) / 2)
if expected == 0:
expected = 1
if use_cache and old_matrix.load(cluster_set.last_name):
cached_bibs = set(filter_modified_record_ids(
old_matrix.get_keys(),
old_matrix.creation_time))
else:
cached_bibs = set()
if save_cache:
creation_time = get_sql_time()
cur_calc, opti = 0, 0
for cl1 in cluster_set.clusters:
update_status((float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti))
for cl2 in cluster_set.clusters:
if id(cl1) < id(cl2) and not cl1.hates(cl2):
for bib1 in cl1.bibs:
for bib2 in cl2.bibs:
if bib1 in cached_bibs and bib2 in cached_bibs:
val = old_matrix[bib1, bib2]
if not val:
cur_calc += 1
check_for_cleaning(cur_calc)
val = compare_bibrefrecs(bib1, bib2)
else:
opti += 1
if bconfig.DEBUG_CHECKS:
assert _debug_is_eq_v(val, compare_bibrefrecs(bib1, bib2))
else:
cur_calc += 1
check_for_cleaning(cur_calc)
val = compare_bibrefrecs(bib1, bib2)
self._bib_matrix[bib1, bib2] = val
clear_comparison_caches()
if save_cache:
update_status(1., "saving...")
self._bib_matrix.store(cluster_set.last_name, creation_time)
update_status_final("Matrix done. %d calc, %d opt." % (cur_calc, opti))
def __getitem__(self, bibs):
return self._bib_matrix[bibs[0], bibs[1]]
|
deralexxx/maltego-viper
|
src/viper/transforms/common/entities.py
|
#!/usr/bin/env python
from canari.maltego.message import Entity, EntityField, EntityFieldType, MatchingRule
__author__ = 'jaegeral'
__copyright__ = 'Copyright 2014, Viper Project'
__credits__ = []
__license__ = 'GPL'
__version__ = '0.1'
__maintainer__ = 'jaegeral'
__email__ = 'mail@alexanderjaeger.de'
__status__ = 'Development'
__all__ = [
'viperentity',
'viperhash',
'vipertag',
'vipername',
'viperfile',
'myViperEntity'
]
class viperentity(Entity):
_namespace_ = 'viper'
@EntityField(name='hashtype', propname='hashtype', displayname='Hash Type')
class viperhash(viperentity):
pass
#TODO: Remove
class MyViperEntity(viperentity):
pass
class viperfile(viperentity):
pass
class vipername(viperentity):
pass
class vipertag(viperentity):
pass
|
robwebset/script.ebooks
|
resources/lib/kiehinen/ebook.py
|
from struct import unpack, pack, calcsize
from mobi_languages import LANGUAGES
from lz77 import uncompress
def LOG(*args):
pass
MOBI_HDR_FIELDS = (
("id", 16, "4s"),
("header_len", 20, "I"),
("mobi_type", 24, "I"),
("encoding", 28, "I"),
("UID", 32, "I"),
("generator_version", 36, "I"),
("reserved", 40, "40s"),
("first_nonbook_idx", 80, "I"),
("full_name_offs", 84, "I"),
("full_name_len", 88, "I"),
("locale_highbytes", 92, "H"),
("locale_country", 94, "B"),
("locale_language", 95, "B"),
("input_lang", 96, "I"),
("output_lang", 100, "I"),
("format_version", 104, "I"),
("first_image_idx", 108, "I"),
("huff/cdic_record", 112, "I"),
("huff/cdic_count", 116, "I"),
("datp_record", 120, "I"),
("datp_count", 124, "I"),
("exth_flags", 128, "I"),
("unknowni@132", 132, "32s"),
("unknown@164", 164, "I"),
("drm_offs", 168, "I"),
("drm_count", 172, "I"),
("drm_size", 176, "I"),
("drm_flags", 180, "I"),
("unknown@184", 184, "I"),
("unknown@188", 188, "I"),
("unknown@192", 192, "H"),
("last_image_record", 194, "H"),
("unknown@196", 196, "I"),
("fcis_record", 200, "I"),
("unknown@204", 204, "I"),
("flis_record", 208, "I"),
("unknown@212", 212, "I"),
("extra_data_flags", 242, "H")
)
EXTH_FMT = ">4x2I"
'''4x = "EXTH", I = hlen, I = record count'''
EXTH_RECORD_TYPES = {
1: 'drm server id',
2: 'drm commerce id',
3: 'drm ebookbase book id',
100: 'author', # list
101: 'publisher', # list
102: 'imprint',
103: 'description',
104: 'isbn', # list
105: 'subject', # list
106: 'publication date',
107: 'review',
108: 'contributor', # list
109: 'rights',
110: 'subjectcode', # list
111: 'type',
112: 'source',
113: 'asin',
114: 'version number', # int
115: 'sample', # int (or bool)?
116: 'start reading',
117: 'adult',
118: 'retail price',
119: 'retail price currency',
201: 'cover offset', # int
202: 'thumbnail offset', # int
203: 'has fake cover', # bool?
208: 'watermark',
209: 'tamper proof keys',
401: 'clipping limit', # int
402: 'publisher limit',
404: 'ttsflag',
501: 'cde type',
502: 'last update time',
503: 'updated title'
}
PRC_HDRFMT = '>H2xIHHI' # Compression,unused,Len,Count,Size,Pos
def parse_palmdb(filename):
import palm
db = palm.Database(filename)
return db
class Book:
def __init__(self, fn):
self.filename = fn
# Set some fields to defaults
self.title = fn
self.author = "??"
self.language = "??"
# Rob Addition: Description
self.description = ""
self.is_a_book = False
f = open(fn)
d = f.read(68)
f.close()
encodings = {
1252: 'cp1252',
65001: 'utf-8'
}
supported_types = ('BOOKMOBI', 'TEXtREAd')
self.type = d[60:68]
if self.type not in supported_types:
LOG(1, "Unsupported file type %s" % (self.type))
return None
try:
db = parse_palmdb(fn)
except:
return None
self.is_a_book = True
# now we have a better guess at the title, use it for now
self.title = db.name
self.records = db.records
rec0 = self.records[0].data
#LOG(5,repr(rec0))
if self.type == 'BOOKMOBI':
LOG(3, "This is a MOBI book")
self.mobi = {}
for field, pos, fmt in MOBI_HDR_FIELDS:
end = pos + calcsize(fmt)
if (end > len(rec0) or
("header_len" in self.mobi
and end > self.mobi["header_len"])):
continue
LOG(4, "field: %s, fmt: %s, @ [%d:%d], data: %s" % (
field, fmt, pos, end, repr(rec0[pos:end])))
(self.mobi[field], ) = unpack(">%s" % fmt, rec0[pos:end])
LOG(3, "self.mobi: %s" % repr(self.mobi))
# Get and decode the book name
if self.mobi['locale_language'] in LANGUAGES:
lang = LANGUAGES[self.mobi['locale_language']]
if self.mobi['locale_country'] == 0:
LOG(2, "Book language: %s" % lang[0][1])
self.language = "%s (%s)" % (lang[0][1], lang[0][0])
elif self.mobi['locale_country'] in lang:
country = lang[self.mobi['locale_country']]
LOG(2, "Book language is %s (%s)" % (
lang[0][1], country[1]))
self.language = "%s (%s-%s)" % (
lang[0][1],
lang[0][0],
country[0]
)
pos = self.mobi['full_name_offs']
end = pos + self.mobi['full_name_len']
self.title = rec0[pos:end].decode(encodings[self.mobi['encoding']])
LOG(2, "Book name: %s" % self.title)
if self.mobi['id'] != 'MOBI':
LOG(0, "Mobi header missing!")
return None
if (0x40 & self.mobi['exth_flags']): # check for EXTH
self.exth = parse_exth(rec0, self.mobi['header_len'] + 16)
LOG(3, "EXTH header: %s" % repr(self.exth))
if 'author' in self.exth:
self.author = ' & '.join(self.exth['author'])
else:
self.author = "n/a"
self.rawdata = d
if (('updated title' in self.exth) and
(type(self.exth['updated title']) is str)):
self.title = ' '.join(self.exth['updated title'])
if 'description' in self.exth:
self.description = ' <P> '.join(self.exth['description'])
elif self.type == 'TEXtREAd':
LOG(2, "This is an older MOBI book")
self.rawdata = d
compression, data_len, rec_count, rec_size, pos = unpack(
PRC_HDRFMT, rec0[:calcsize(PRC_HDRFMT)])
LOG(3, "compression %d, data_len %d, rec_count %d, rec_size %d" %
(compression, data_len, rec_count, rec_size))
if compression == 2:
data = uncompress(self.records[1].data)
else:
data = self.records[1].data
from BeautifulSoup import BeautifulSoup
soup = BeautifulSoup(data)
self.metadata = soup.fetch("dc-metadata")
try:
self.title = soup.fetch("dc:title")[0].getText()
self.author = soup.fetch("dc:creator")[0].getText()
self.language = soup.fetch("dc:language")[0].getText()
except:
self.title, self.author, self.language = ("Unknown", "Unknown",
"en-us")
try:
self.description = soup.fetch("dc:description")[0].getText()
except:
pass
def to_html(self):
last_idx = (
self.mobi['first_image_idx'] if 'mobi' in self.__dict__ else -1)
return ''.join([uncompress(x.data) for x in self.records[1:last_idx]])
def parse_exth(data, pos):
ret = {}
n = 0
if (pos != data.find('EXTH')):
LOG(0, "EXTH header not found where it should be @%d" % pos)
return None
else:
end = pos + calcsize(EXTH_FMT)
(hlen, count) = unpack(EXTH_FMT, data[pos:end])
LOG(4, "pos: %d, EXTH header len: %d, record count: %d" % (
pos, hlen, count))
pos = end
while n < count:
end = pos + calcsize(">2I")
t, l = unpack(">2I", data[pos:end])
v = data[end:pos + l]
if l - 8 == 4:
v = unpack(">I", v)[0]
if t in EXTH_RECORD_TYPES:
rec = EXTH_RECORD_TYPES[t]
LOG(4, "EXTH record '%s' @%d+%d: '%s'" % (
rec, pos, l - 8, v))
if rec not in ret:
ret[rec] = [v]
else:
ret[rec].append(v)
else:
LOG(4, "Found an unknown EXTH record type %d @%d+%d: '%s'" %
(t, pos, l - 8, repr(v)))
pos += l
n += 1
return ret
|
ehabkost/virt-test
|
qemu/tests/multi_vms_file_transfer.py
|
import time, os, logging
from autotest.client import utils
from autotest.client.shared import error
from virttest import remote, utils_misc
@error.context_aware
def run_multi_vms_file_transfer(test, params, env):
"""
Transfer a file back and forth between multi VMs for long time.
1) Boot up two VMs .
2) Create a large file by dd on host.
3) Copy this file to VM1.
4) Compare copied file's md5 with original file.
5) Copy this file from VM1 to VM2.
6) Compare copied file's md5 with original file.
7) Copy this file from VM2 to VM1.
8) Compare copied file's md5 with original file.
9) Repeat step 5-8
@param test: KVM test object.
@param params: Dictionary with the test parameters.
@param env: Dictionary with test environment.
"""
def md5_check(session, orig_md5):
msg = "Compare copied file's md5 with original file."
error.context(msg, logging.info)
md5_cmd = "md5sum %s | awk '{print $1}'" % guest_path
s, o = session.cmd_status_output(md5_cmd)
if s:
msg = "Fail to get md5 value from guest. Output is %s" % o
raise error.TestError(msg)
new_md5 = o.splitlines()[-1]
if new_md5 != orig_md5:
msg = "File changed after transfer host -> VM1. Original md5 value"
msg += " is %s. Current md5 value is %s" % (orig_md5, new_md5)
raise error.TestFail(msg)
vm1 = env.get_vm(params["main_vm"])
vm1.verify_alive()
login_timeout = int(params.get("login_timeout", 360))
vm2 = env.get_vm(params["vms"].split()[-1])
vm2.verify_alive()
session_vm1 = vm1.wait_for_login(timeout=login_timeout)
session_vm2 = vm2.wait_for_login(timeout=login_timeout)
transfer_timeout = int(params.get("transfer_timeout", 1000))
username = params.get("username")
password = params.get("password")
port = int(params.get("file_transfer_port"))
if (not port) or (not username) or (not password):
raise error.TestError("Please set file_transfer_port, username,"
" password paramters for guest")
tmp_dir = params.get("tmp_dir", "/tmp/")
repeat_time = int(params.get("repeat_time", "10"))
clean_cmd = params.get("clean_cmd", "rm -f")
filesize = int(params.get("filesize", 4000))
count = int(filesize / 10)
if count == 0:
count = 1
host_path = os.path.join(tmp_dir, "tmp-%s" %
utils_misc.generate_random_string(8))
cmd = "dd if=/dev/zero of=%s bs=10M count=%d" % (host_path, count)
guest_path = (tmp_dir + "file_transfer-%s" %
utils_misc.generate_random_string(8))
try:
error.context("Creating %dMB file on host" % filesize, logging.info)
utils.run(cmd)
orig_md5 = utils.hash_file(host_path, method="md5")
error.context("Transfering file host -> VM1, timeout: %ss" % \
transfer_timeout, logging.info)
t_begin = time.time()
vm1.copy_files_to(host_path, guest_path, timeout=transfer_timeout)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer host -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
ip_vm1 = vm1.get_address()
ip_vm2 = vm2.get_address()
for i in range(repeat_time):
log_vm1 = os.path.join(test.debugdir, "remote_scp_to_vm1_%s.log" %i)
log_vm2 = os.path.join(test.debugdir, "remote_scp_to_vm2_%s.log" %i)
msg = "Transfering file VM1 -> VM2, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time)
error.context(msg, logging.info)
t_begin = time.time()
s = remote.scp_between_remotes(src=ip_vm1, dst=ip_vm2, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM1 -> VM2 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm2, orig_md5)
session_vm1.cmd("rm -rf %s" % guest_path)
msg = "Transfering file VM2 -> VM1, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time)
error.context(msg, logging.info)
t_begin = time.time()
remote.scp_between_remotes(src=ip_vm2, dst=ip_vm1, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM2 -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
finally:
try:
session_vm1.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
os.remove(host_path)
except OSError:
pass
if session_vm1:
session_vm1.close()
if session_vm2:
session_vm2.close()
|
repotvsupertuga/repo
|
plugin.program.jogosEmuladores/speedtest.py
|
# This code is licensed under The GNU General Public License version 2 (GPLv2)
# If you decide to fork this code please obey by the licensing rules.
#
# Thanks go to the-one who initially created the initial speedtest code in early 2014
# That code broke but it didn't take too much to fix it, if you get problems it's most likely
# down to the fact that you need to use another download link that plays nicely with XBMC/Kodi
import xbmc, xbmcplugin
import xbmcgui
import xbmcaddon
import urllib
import time
import os
import sys
import datetime
ADDON_ID = 'plugin.program.jogosEmuladores'
ADDON = xbmcaddon.Addon(id=ADDON_ID)
HOME = ADDON.getAddonInfo('path')
addon_name="Speed Test"
AddonTitle="[COLOR ghostwhite]Project X[/COLOR] [COLOR lightsteelblue]Wizard[/COLOR]"
max_Bps = 0.0
currently_downloaded_bytes = 0.0
#-----------------------------------------------------------------------------------------------------------------
def download(url, dest, dp = None):
if not dp:
dp = xbmcgui.DialogProgress()
dp.create(AddonTitle,"Connecting to server",'[COLOR slategray][I]Testing your internet speed...[/I][/COLOR]', 'Please wait...')
dp.update(0)
start_time=time.time()
try:
urllib.urlretrieve(url, dest, lambda nb, bs, fs: _pbhook(nb, bs, fs, dp, start_time))
except:
pass
return ( time.time() - start_time )
#-----------------------------------------------------------------------------------------------------------------
def _pbhook(numblocks, blocksize, filesize, dp, start_time):
global max_Bps
global currently_downloaded_bytes
try:
percent = min(numblocks * blocksize * 100 / filesize, 100)
currently_downloaded_bytes = float(numblocks) * blocksize
currently_downloaded = currently_downloaded_bytes / (1024 * 1024)
Bps_speed = currently_downloaded_bytes / (time.time() - start_time)
if Bps_speed > 0:
eta = (filesize - numblocks * blocksize) / Bps_speed
if Bps_speed > max_Bps: max_Bps = Bps_speed
else:
eta = 0
kbps_speed = Bps_speed * 8 / 1024
mbps_speed = kbps_speed / 1024
total = float(filesize) / (1024 * 1024)
mbs = '%.02f MB of %.02f MB' % (currently_downloaded, total)
dp.update(percent)
except:
currently_downloaded_bytes = float(filesize)
percent = 100
dp.update(percent)
if dp.iscanceled():
dp.close()
raise Exception("Cancelled")
#-----------------------------------------------------------------------------------------------------------------
def make_dir(mypath, dirname):
''' Creates sub-directories if they are not found. '''
import xbmcvfs
if not xbmcvfs.exists(mypath):
try:
xbmcvfs.mkdirs(mypath)
except:
xbmcvfs.mkdir(mypath)
subpath = os.path.join(mypath, dirname)
if not xbmcvfs.exists(subpath):
try:
xbmcvfs.mkdirs(subpath)
except:
xbmcvfs.mkdir(subpath)
return subpath
#-----------------------------------------------------------------------------------------------------------------
def GetEpochStr():
time_now = datetime.datetime.now()
epoch = time.mktime(time_now.timetuple())+(time_now.microsecond/1000000.)
epoch_str = str('%f' % epoch)
epoch_str = epoch_str.replace('.','')
epoch_str = epoch_str[:-3]
return epoch_str
#-----------------------------------------------------------------------------------------------------------------
def runtest(url):
addon_profile_path = xbmc.translatePath(ADDON.getAddonInfo('profile'))
speed_test_files_dir = make_dir(addon_profile_path, 'speedtestfiles')
speed_test_download_file = os.path.join(speed_test_files_dir, GetEpochStr() + '.speedtest')
timetaken = download(url, speed_test_download_file)
os.remove(speed_test_download_file)
avgspeed = ((currently_downloaded_bytes / timetaken) * 8 / ( 1024 * 1024 ))
maxspeed = (max_Bps * 8/(1024*1024))
if avgspeed < 2:
livestreams = 'Very low quality streams may work.'
onlinevids = 'Expect buffering, do not try HD.'
rating = '[COLOR ghostwhite][B] Verdict: [I]Very Poor[/I] | Score: [COLOR slategray][I]1/10[/I][/B][/COLOR]'
elif avgspeed < 2.5:
livestreams = 'You should be ok for SD content only.'
onlinevids = 'SD/DVD quality should be ok, do not try HD.'
rating = '[COLOR ghostwhite][B][I]Poor[/I] | Score: [COLOR slategray][I]2/10[/I][/B][/COLOR]'
elif avgspeed < 5:
livestreams = 'Some HD streams may struggle, SD will be fine.'
onlinevids = '720p will be fine but some 1080p may struggle.'
rating = '[COLOR ghostwhite][B][I]OK[/I] | Score: [COLOR slategray][I]4/10[/I][/B][/COLOR]'
elif avgspeed < 9:
livestreams = 'All streams including HD should stream fine.'
onlinevids = 'Movies (720p & 1080p) will stream fine but 3D and 4K will struggle.'
rating = '[COLOR ghostwhite][B][I]Good[/I] | Score: [COLOR slategray][I]6/10[/I][/B][/COLOR]'
elif avgspeed < 15:
livestreams = 'All streams including HD should stream fine'
onlinevids = 'Movies (720p & 1080p and 3D) will stream fine but 4K may struggle.'
rating = '[COLOR ghostwhite][B][I]Very good[/I] | Score: [COLOR slategray][I]8/10[/I][/B][/COLOR]'
else:
livestreams = 'All streams including HD should stream fine'
onlinevids = 'You can play all movies (720p, 1080p, 3D and 4K)'
rating = '[COLOR ghostwhite][B][I]Excellent[/I] | Score: [COLOR slategray][I]10/10[/I][/B][/COLOR]'
print "Average Speed: " + str(avgspeed)
print "Max. Speed: " + str(maxspeed)
dialog = xbmcgui.Dialog()
ok = dialog.ok(
'[COLOR lightsteelblue][B]Your Result:[/COLOR][/B] ' + rating,
# '[COLOR blue]Duration:[/COLOR] %.02f secs' % timetaken,
'[COLOR lightsteelblue][B]Live Streams:[/COLOR][/B] ' + livestreams,
'[COLOR lightsteelblue][B]Movie Streams:[/COLOR][/B] ' + onlinevids,
'[COLOR lightsteelblue][B]Duration:[/COLOR][/B] %.02f secs ' % timetaken + '[COLOR lightsteelblue][B]Average Speed:[/B][/COLOR] %.02f Mb/s ' % avgspeed + '[COLOR lightsteelblue][B]Max Speed:[/B][/COLOR] %.02f Mb/s ' % maxspeed,
# '[COLOR blue]Maximum Speed:[/COLOR] %.02f Mb/s ' % maxspeed,
)
|
badp/ganeti
|
test/py/ganeti.rapi.client_unittest.py
|
#!/usr/bin/python
#
# Copyright (C) 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for unittesting the RAPI client module"""
import unittest
import warnings
import pycurl
from ganeti import opcodes
from ganeti import constants
from ganeti import http
from ganeti import serializer
from ganeti import utils
from ganeti import query
from ganeti import objects
from ganeti import rapi
from ganeti import errors
import ganeti.rapi.testutils
from ganeti.rapi import connector
from ganeti.rapi import rlib2
from ganeti.rapi import client
import testutils
# List of resource handlers which aren't used by the RAPI client
_KNOWN_UNUSED = set([
rlib2.R_root,
rlib2.R_2,
])
# Global variable for collecting used handlers
_used_handlers = None
class RapiMock(object):
def __init__(self):
self._mapper = connector.Mapper()
self._responses = []
self._last_handler = None
self._last_req_data = None
def ResetResponses(self):
del self._responses[:]
def AddResponse(self, response, code=200):
self._responses.insert(0, (code, response))
def CountPending(self):
return len(self._responses)
def GetLastHandler(self):
return self._last_handler
def GetLastRequestData(self):
return self._last_req_data
def FetchResponse(self, path, method, headers, request_body):
self._last_req_data = request_body
try:
(handler_cls, items, args) = self._mapper.getController(path)
# Record handler as used
_used_handlers.add(handler_cls)
self._last_handler = handler_cls(items, args, None)
if not hasattr(self._last_handler, method.upper()):
raise http.HttpNotImplemented(message="Method not implemented")
except http.HttpException, ex:
code = ex.code
response = ex.message
else:
if not self._responses:
raise Exception("No responses")
(code, response) = self._responses.pop()
return (code, NotImplemented, response)
class TestConstants(unittest.TestCase):
def test(self):
self.assertEqual(client.GANETI_RAPI_PORT, constants.DEFAULT_RAPI_PORT)
self.assertEqual(client.GANETI_RAPI_VERSION, constants.RAPI_VERSION)
self.assertEqual(client.HTTP_APP_JSON, http.HTTP_APP_JSON)
self.assertEqual(client._REQ_DATA_VERSION_FIELD, rlib2._REQ_DATA_VERSION)
self.assertEqual(client.JOB_STATUS_QUEUED, constants.JOB_STATUS_QUEUED)
self.assertEqual(client.JOB_STATUS_WAITING, constants.JOB_STATUS_WAITING)
self.assertEqual(client.JOB_STATUS_CANCELING,
constants.JOB_STATUS_CANCELING)
self.assertEqual(client.JOB_STATUS_RUNNING, constants.JOB_STATUS_RUNNING)
self.assertEqual(client.JOB_STATUS_CANCELED, constants.JOB_STATUS_CANCELED)
self.assertEqual(client.JOB_STATUS_SUCCESS, constants.JOB_STATUS_SUCCESS)
self.assertEqual(client.JOB_STATUS_ERROR, constants.JOB_STATUS_ERROR)
self.assertEqual(client.JOB_STATUS_PENDING, constants.JOBS_PENDING)
self.assertEqual(client.JOB_STATUS_FINALIZED, constants.JOBS_FINALIZED)
self.assertEqual(client.JOB_STATUS_ALL, constants.JOB_STATUS_ALL)
# Node evacuation
self.assertEqual(client.NODE_EVAC_PRI, constants.NODE_EVAC_PRI)
self.assertEqual(client.NODE_EVAC_SEC, constants.NODE_EVAC_SEC)
self.assertEqual(client.NODE_EVAC_ALL, constants.NODE_EVAC_ALL)
# Legacy name
self.assertEqual(client.JOB_STATUS_WAITLOCK, constants.JOB_STATUS_WAITING)
# RAPI feature strings
self.assertEqual(client._INST_CREATE_REQV1, rlib2._INST_CREATE_REQV1)
self.assertEqual(client.INST_CREATE_REQV1, rlib2._INST_CREATE_REQV1)
self.assertEqual(client._INST_REINSTALL_REQV1, rlib2._INST_REINSTALL_REQV1)
self.assertEqual(client.INST_REINSTALL_REQV1, rlib2._INST_REINSTALL_REQV1)
self.assertEqual(client._NODE_MIGRATE_REQV1, rlib2._NODE_MIGRATE_REQV1)
self.assertEqual(client.NODE_MIGRATE_REQV1, rlib2._NODE_MIGRATE_REQV1)
self.assertEqual(client._NODE_EVAC_RES1, rlib2._NODE_EVAC_RES1)
self.assertEqual(client.NODE_EVAC_RES1, rlib2._NODE_EVAC_RES1)
def testErrors(self):
self.assertEqual(client.ECODE_ALL, errors.ECODE_ALL)
# Make sure all error codes are in both RAPI client and errors module
for name in filter(lambda s: (s.startswith("ECODE_") and s != "ECODE_ALL"),
dir(client)):
value = getattr(client, name)
self.assertEqual(value, getattr(errors, name))
self.assertTrue(value in client.ECODE_ALL)
self.assertTrue(value in errors.ECODE_ALL)
class RapiMockTest(unittest.TestCase):
def test404(self):
(code, _, body) = RapiMock().FetchResponse("/foo", "GET", None, None)
self.assertEqual(code, 404)
self.assertTrue(body is None)
def test501(self):
(code, _, body) = RapiMock().FetchResponse("/version", "POST", None, None)
self.assertEqual(code, 501)
self.assertEqual(body, "Method not implemented")
def test200(self):
rapi = RapiMock()
rapi.AddResponse("2")
(code, _, response) = rapi.FetchResponse("/version", "GET", None, None)
self.assertEqual(200, code)
self.assertEqual("2", response)
self.failUnless(isinstance(rapi.GetLastHandler(), rlib2.R_version))
def _FakeNoSslPycurlVersion():
# Note: incomplete version tuple
return (3, "7.16.0", 462848, "mysystem", 1581, None, 0)
def _FakeFancySslPycurlVersion():
# Note: incomplete version tuple
return (3, "7.16.0", 462848, "mysystem", 1581, "FancySSL/1.2.3", 0)
def _FakeOpenSslPycurlVersion():
# Note: incomplete version tuple
return (2, "7.15.5", 462597, "othersystem", 668, "OpenSSL/0.9.8c", 0)
def _FakeGnuTlsPycurlVersion():
# Note: incomplete version tuple
return (3, "7.18.0", 463360, "somesystem", 1581, "GnuTLS/2.0.4", 0)
class TestExtendedConfig(unittest.TestCase):
def testAuth(self):
cl = client.GanetiRapiClient("master.example.com",
username="user", password="pw",
curl_factory=lambda: rapi.testutils.FakeCurl(RapiMock()))
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.HTTPAUTH), pycurl.HTTPAUTH_BASIC)
self.assertEqual(curl.getopt(pycurl.USERPWD), "user:pw")
def testInvalidAuth(self):
# No username
self.assertRaises(client.Error, client.GanetiRapiClient,
"master-a.example.com", password="pw")
# No password
self.assertRaises(client.Error, client.GanetiRapiClient,
"master-b.example.com", username="user")
def testCertVerifyInvalidCombinations(self):
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True, cafile="cert1.pem")
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True, capath="certs/")
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True,
cafile="cert1.pem", capath="certs/")
def testProxySignalVerifyHostname(self):
for use_gnutls in [False, True]:
if use_gnutls:
pcverfn = _FakeGnuTlsPycurlVersion
else:
pcverfn = _FakeOpenSslPycurlVersion
for proxy in ["", "http://127.0.0.1:1234"]:
for use_signal in [False, True]:
for verify_hostname in [False, True]:
cfgfn = client.GenericCurlConfig(proxy=proxy, use_signal=use_signal,
verify_hostname=verify_hostname,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com",
curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.PROXY), proxy)
self.assertEqual(curl.getopt(pycurl.NOSIGNAL), not use_signal)
if verify_hostname:
self.assertEqual(curl.getopt(pycurl.SSL_VERIFYHOST), 2)
else:
self.assertEqual(curl.getopt(pycurl.SSL_VERIFYHOST), 0)
def testNoCertVerify(self):
cfgfn = client.GenericCurlConfig()
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertFalse(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertFalse(curl.getopt(pycurl.CAINFO))
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCurlBundle(self):
cfgfn = client.GenericCurlConfig(use_curl_cabundle=True)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertFalse(curl.getopt(pycurl.CAINFO))
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCafile(self):
mycert = "/tmp/some/UNUSED/cert/file.pem"
cfgfn = client.GenericCurlConfig(cafile=mycert)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertEqual(curl.getopt(pycurl.CAINFO), mycert)
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCapath(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeOpenSslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertEqual(curl.getopt(pycurl.CAPATH), certdir)
self.assertFalse(curl.getopt(pycurl.CAINFO))
def testCertVerifyCapathGnuTls(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeGnuTlsPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(client.Error, cl._CreateCurl)
def testCertVerifyNoSsl(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeNoSslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(client.Error, cl._CreateCurl)
def testCertVerifyFancySsl(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeFancySslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(NotImplementedError, cl._CreateCurl)
def testCertVerifyCapath(self):
for connect_timeout in [None, 1, 5, 10, 30, 60, 300]:
for timeout in [None, 1, 30, 60, 3600, 24 * 3600]:
cfgfn = client.GenericCurlConfig(connect_timeout=connect_timeout,
timeout=timeout)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.CONNECTTIMEOUT), connect_timeout)
self.assertEqual(curl.getopt(pycurl.TIMEOUT), timeout)
class GanetiRapiClientTests(testutils.GanetiTestCase):
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.rapi = RapiMock()
self.curl = rapi.testutils.FakeCurl(self.rapi)
self.client = client.GanetiRapiClient("master.example.com",
curl_factory=lambda: self.curl)
def assertHandler(self, handler_cls):
self.failUnless(isinstance(self.rapi.GetLastHandler(), handler_cls))
def assertQuery(self, key, value):
self.assertEqual(value, self.rapi.GetLastHandler().queryargs.get(key, None))
def assertItems(self, items):
self.assertEqual(items, self.rapi.GetLastHandler().items)
def assertBulk(self):
self.assertTrue(self.rapi.GetLastHandler().useBulk())
def assertDryRun(self):
self.assertTrue(self.rapi.GetLastHandler().dryRun())
def assertUseForce(self):
self.assertTrue(self.rapi.GetLastHandler().useForce())
def testEncodeQuery(self):
query = [
("a", None),
("b", 1),
("c", 2),
("d", "Foo"),
("e", True),
]
expected = [
("a", ""),
("b", 1),
("c", 2),
("d", "Foo"),
("e", 1),
]
self.assertEqualValues(self.client._EncodeQuery(query),
expected)
# invalid types
for i in [[1, 2, 3], {"moo": "boo"}, (1, 2, 3)]:
self.assertRaises(ValueError, self.client._EncodeQuery, [("x", i)])
def testCurlSettings(self):
self.rapi.AddResponse("2")
self.assertEqual(2, self.client.GetVersion())
self.assertHandler(rlib2.R_version)
# Signals should be disabled by default
self.assert_(self.curl.getopt(pycurl.NOSIGNAL))
# No auth and no proxy
self.assertFalse(self.curl.getopt(pycurl.USERPWD))
self.assert_(self.curl.getopt(pycurl.PROXY) is None)
# Content-type is required for requests
headers = self.curl.getopt(pycurl.HTTPHEADER)
self.assert_("Content-type: application/json" in headers)
def testHttpError(self):
self.rapi.AddResponse(None, code=404)
try:
self.client.GetJobStatus(15140)
except client.GanetiApiError, err:
self.assertEqual(err.code, 404)
else:
self.fail("Didn't raise exception")
def testGetVersion(self):
self.rapi.AddResponse("2")
self.assertEqual(2, self.client.GetVersion())
self.assertHandler(rlib2.R_version)
def testGetFeatures(self):
for features in [[], ["foo", "bar", "baz"]]:
self.rapi.AddResponse(serializer.DumpJson(features))
self.assertEqual(features, self.client.GetFeatures())
self.assertHandler(rlib2.R_2_features)
def testGetFeaturesNotFound(self):
self.rapi.AddResponse(None, code=404)
self.assertEqual([], self.client.GetFeatures())
def testGetOperatingSystems(self):
self.rapi.AddResponse("[\"beos\"]")
self.assertEqual(["beos"], self.client.GetOperatingSystems())
self.assertHandler(rlib2.R_2_os)
def testGetClusterTags(self):
self.rapi.AddResponse("[\"tag\"]")
self.assertEqual(["tag"], self.client.GetClusterTags())
self.assertHandler(rlib2.R_2_tags)
def testAddClusterTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddClusterTags(["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_tags)
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteClusterTags(self):
self.rapi.AddResponse("5107")
self.assertEqual(5107, self.client.DeleteClusterTags(["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_tags)
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testGetInfo(self):
self.rapi.AddResponse("{}")
self.assertEqual({}, self.client.GetInfo())
self.assertHandler(rlib2.R_2_info)
def testGetInstances(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstances(bulk=True))
self.assertHandler(rlib2.R_2_instances)
self.assertBulk()
def testGetInstance(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstance("instance"))
self.assertHandler(rlib2.R_2_instances_name)
self.assertItems(["instance"])
def testGetInstanceInfo(self):
self.rapi.AddResponse("21291")
self.assertEqual(21291, self.client.GetInstanceInfo("inst3"))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst3"])
self.assertQuery("static", None)
self.rapi.AddResponse("3428")
self.assertEqual(3428, self.client.GetInstanceInfo("inst31", static=False))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst31"])
self.assertQuery("static", ["0"])
self.rapi.AddResponse("15665")
self.assertEqual(15665, self.client.GetInstanceInfo("inst32", static=True))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst32"])
self.assertQuery("static", ["1"])
def testInstancesMultiAlloc(self):
response = {
constants.JOB_IDS_KEY: ["23423"],
constants.ALLOCATABLE_KEY: ["foobar"],
constants.FAILED_KEY: ["foobar2"],
}
self.rapi.AddResponse(serializer.DumpJson(response))
insts = [self.client.InstanceAllocation("create", "foobar",
"plain", [], []),
self.client.InstanceAllocation("create", "foobar2",
"drbd8", [{"size": 100}], [])]
resp = self.client.InstancesMultiAlloc(insts)
self.assertEqual(resp, response)
self.assertHandler(rlib2.R_2_instances_multi_alloc)
def testCreateInstanceOldVersion(self):
# The old request format, version 0, is no longer supported
self.rapi.AddResponse(None, code=404)
self.assertRaises(client.GanetiApiError, self.client.CreateInstance,
"create", "inst1.example.com", "plain", [], [])
self.assertEqual(self.rapi.CountPending(), 0)
def testCreateInstance(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_CREATE_REQV1]))
self.rapi.AddResponse("23030")
job_id = self.client.CreateInstance("create", "inst1.example.com",
"plain", [], [], dry_run=True)
self.assertEqual(job_id, 23030)
self.assertHandler(rlib2.R_2_instances)
self.assertDryRun()
data = serializer.LoadJson(self.rapi.GetLastRequestData())
for field in ["dry_run", "beparams", "hvparams", "start"]:
self.assertFalse(field in data)
self.assertEqual(data["name"], "inst1.example.com")
self.assertEqual(data["disk_template"], "plain")
def testCreateInstance2(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_CREATE_REQV1]))
self.rapi.AddResponse("24740")
job_id = self.client.CreateInstance("import", "inst2.example.com",
"drbd8", [{"size": 100,}],
[{}, {"bridge": "br1", }],
dry_run=False, start=True,
pnode="node1", snode="node9",
ip_check=False)
self.assertEqual(job_id, 24740)
self.assertHandler(rlib2.R_2_instances)
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data[rlib2._REQ_DATA_VERSION], 1)
self.assertEqual(data["name"], "inst2.example.com")
self.assertEqual(data["disk_template"], "drbd8")
self.assertEqual(data["start"], True)
self.assertEqual(data["ip_check"], False)
self.assertEqualValues(data["disks"], [{"size": 100,}])
self.assertEqualValues(data["nics"], [{}, {"bridge": "br1", }])
def testDeleteInstance(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234, self.client.DeleteInstance("instance", dry_run=True))
self.assertHandler(rlib2.R_2_instances_name)
self.assertItems(["instance"])
self.assertDryRun()
def testGetInstanceTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstanceTags("fooinstance"))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["fooinstance"])
def testAddInstanceTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddInstanceTags("fooinstance", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["fooinstance"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteInstanceTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteInstanceTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testRebootInstance(self):
self.rapi.AddResponse("6146")
job_id = self.client.RebootInstance("i-bar", reboot_type="hard",
ignore_secondaries=True, dry_run=True,
reason="Updates")
self.assertEqual(6146, job_id)
self.assertHandler(rlib2.R_2_instances_name_reboot)
self.assertItems(["i-bar"])
self.assertDryRun()
self.assertQuery("type", ["hard"])
self.assertQuery("ignore_secondaries", ["1"])
self.assertQuery("reason", ["Updates"])
def testRebootInstanceDefaultReason(self):
self.rapi.AddResponse("6146")
job_id = self.client.RebootInstance("i-bar", reboot_type="hard",
ignore_secondaries=True, dry_run=True)
self.assertEqual(6146, job_id)
self.assertHandler(rlib2.R_2_instances_name_reboot)
self.assertItems(["i-bar"])
self.assertDryRun()
self.assertQuery("type", ["hard"])
self.assertQuery("ignore_secondaries", ["1"])
self.assertQuery("reason", None)
def testShutdownInstance(self):
self.rapi.AddResponse("1487")
self.assertEqual(1487, self.client.ShutdownInstance("foo-instance",
dry_run=True,
reason="NoMore"))
self.assertHandler(rlib2.R_2_instances_name_shutdown)
self.assertItems(["foo-instance"])
self.assertDryRun()
self.assertQuery("reason", ["NoMore"])
def testShutdownInstanceDefaultReason(self):
self.rapi.AddResponse("1487")
self.assertEqual(1487, self.client.ShutdownInstance("foo-instance",
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_shutdown)
self.assertItems(["foo-instance"])
self.assertDryRun()
self.assertQuery("reason", None)
def testStartupInstance(self):
self.rapi.AddResponse("27149")
self.assertEqual(27149, self.client.StartupInstance("bar-instance",
dry_run=True,
reason="New"))
self.assertHandler(rlib2.R_2_instances_name_startup)
self.assertItems(["bar-instance"])
self.assertDryRun()
self.assertQuery("reason", ["New"])
def testStartupInstanceDefaultReason(self):
self.rapi.AddResponse("27149")
self.assertEqual(27149, self.client.StartupInstance("bar-instance",
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_startup)
self.assertItems(["bar-instance"])
self.assertDryRun()
self.assertQuery("reason", None)
def testReinstallInstance(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("19119")
self.assertEqual(19119, self.client.ReinstallInstance("baz-instance",
os="DOS",
no_startup=True))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["baz-instance"])
self.assertQuery("os", ["DOS"])
self.assertQuery("nostartup", ["1"])
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceNew(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_REINSTALL_REQV1]))
self.rapi.AddResponse("25689")
self.assertEqual(25689, self.client.ReinstallInstance("moo-instance",
os="Debian",
no_startup=True))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["moo-instance"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["os"], "Debian")
self.assertEqual(data["start"], False)
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceWithOsparams1(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.ReinstallInstance,
"doo-instance", osparams={"x": "y"})
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceWithOsparams2(self):
osparams = {
"Hello": "World",
"foo": "bar",
}
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_REINSTALL_REQV1]))
self.rapi.AddResponse("1717")
self.assertEqual(1717, self.client.ReinstallInstance("zoo-instance",
osparams=osparams))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["zoo-instance"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["osparams"], osparams)
self.assertEqual(data["start"], True)
self.assertEqual(self.rapi.CountPending(), 0)
def testReplaceInstanceDisks(self):
self.rapi.AddResponse("999")
job_id = self.client.ReplaceInstanceDisks("instance-name",
disks=[0, 1], iallocator="hail")
self.assertEqual(999, job_id)
self.assertHandler(rlib2.R_2_instances_name_replace_disks)
self.assertItems(["instance-name"])
self.assertQuery("disks", ["0,1"])
self.assertQuery("mode", ["replace_auto"])
self.assertQuery("iallocator", ["hail"])
self.rapi.AddResponse("1000")
job_id = self.client.ReplaceInstanceDisks("instance-bar",
disks=[1], mode="replace_on_secondary", remote_node="foo-node")
self.assertEqual(1000, job_id)
self.assertItems(["instance-bar"])
self.assertQuery("disks", ["1"])
self.assertQuery("remote_node", ["foo-node"])
self.rapi.AddResponse("5175")
self.assertEqual(5175, self.client.ReplaceInstanceDisks("instance-moo"))
self.assertItems(["instance-moo"])
self.assertQuery("disks", None)
def testPrepareExport(self):
self.rapi.AddResponse("8326")
self.assertEqual(8326, self.client.PrepareExport("inst1", "local"))
self.assertHandler(rlib2.R_2_instances_name_prepare_export)
self.assertItems(["inst1"])
self.assertQuery("mode", ["local"])
def testExportInstance(self):
self.rapi.AddResponse("19695")
job_id = self.client.ExportInstance("inst2", "local", "nodeX",
shutdown=True)
self.assertEqual(job_id, 19695)
self.assertHandler(rlib2.R_2_instances_name_export)
self.assertItems(["inst2"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data["mode"], "local")
self.assertEqual(data["destination"], "nodeX")
self.assertEqual(data["shutdown"], True)
def testMigrateInstanceDefaults(self):
self.rapi.AddResponse("24873")
job_id = self.client.MigrateInstance("inst91")
self.assertEqual(job_id, 24873)
self.assertHandler(rlib2.R_2_instances_name_migrate)
self.assertItems(["inst91"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertFalse(data)
def testMigrateInstance(self):
for mode in constants.HT_MIGRATION_MODES:
for cleanup in [False, True]:
self.rapi.AddResponse("31910")
job_id = self.client.MigrateInstance("inst289", mode=mode,
cleanup=cleanup)
self.assertEqual(job_id, 31910)
self.assertHandler(rlib2.R_2_instances_name_migrate)
self.assertItems(["inst289"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["mode"], mode)
self.assertEqual(data["cleanup"], cleanup)
def testFailoverInstanceDefaults(self):
self.rapi.AddResponse("7639")
job_id = self.client.FailoverInstance("inst13579")
self.assertEqual(job_id, 7639)
self.assertHandler(rlib2.R_2_instances_name_failover)
self.assertItems(["inst13579"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertFalse(data)
def testFailoverInstance(self):
for iallocator in ["dumb", "hail"]:
for ignore_consistency in [False, True]:
for target_node in ["node-a", "node2"]:
self.rapi.AddResponse("19161")
job_id = \
self.client.FailoverInstance("inst251", iallocator=iallocator,
ignore_consistency=ignore_consistency,
target_node=target_node)
self.assertEqual(job_id, 19161)
self.assertHandler(rlib2.R_2_instances_name_failover)
self.assertItems(["inst251"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 3)
self.assertEqual(data["iallocator"], iallocator)
self.assertEqual(data["ignore_consistency"], ignore_consistency)
self.assertEqual(data["target_node"], target_node)
self.assertEqual(self.rapi.CountPending(), 0)
def testRenameInstanceDefaults(self):
new_name = "newnametha7euqu"
self.rapi.AddResponse("8791")
job_id = self.client.RenameInstance("inst18821", new_name)
self.assertEqual(job_id, 8791)
self.assertHandler(rlib2.R_2_instances_name_rename)
self.assertItems(["inst18821"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqualValues(data, {"new_name": new_name, })
def testRenameInstance(self):
new_name = "new-name-yiux1iin"
for ip_check in [False, True]:
for name_check in [False, True]:
self.rapi.AddResponse("24776")
job_id = self.client.RenameInstance("inst20967", new_name,
ip_check=ip_check,
name_check=name_check)
self.assertEqual(job_id, 24776)
self.assertHandler(rlib2.R_2_instances_name_rename)
self.assertItems(["inst20967"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 3)
self.assertEqual(data["new_name"], new_name)
self.assertEqual(data["ip_check"], ip_check)
self.assertEqual(data["name_check"], name_check)
def testGetJobs(self):
self.rapi.AddResponse('[ { "id": "123", "uri": "\\/2\\/jobs\\/123" },'
' { "id": "124", "uri": "\\/2\\/jobs\\/124" } ]')
self.assertEqual([123, 124], self.client.GetJobs())
self.assertHandler(rlib2.R_2_jobs)
self.rapi.AddResponse('[ { "id": "123", "uri": "\\/2\\/jobs\\/123" },'
' { "id": "124", "uri": "\\/2\\/jobs\\/124" } ]')
self.assertEqual([{"id": "123", "uri": "/2/jobs/123"},
{"id": "124", "uri": "/2/jobs/124"}],
self.client.GetJobs(bulk=True))
self.assertHandler(rlib2.R_2_jobs)
self.assertBulk()
def testGetJobStatus(self):
self.rapi.AddResponse("{\"foo\": \"bar\"}")
self.assertEqual({"foo": "bar"}, self.client.GetJobStatus(1234))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["1234"])
def testWaitForJobChange(self):
fields = ["id", "summary"]
expected = {
"job_info": [123, "something"],
"log_entries": [],
}
self.rapi.AddResponse(serializer.DumpJson(expected))
result = self.client.WaitForJobChange(123, fields, [], -1)
self.assertEqualValues(expected, result)
self.assertHandler(rlib2.R_2_jobs_id_wait)
self.assertItems(["123"])
def testCancelJob(self):
self.rapi.AddResponse("[true, \"Job 123 will be canceled\"]")
self.assertEqual([True, "Job 123 will be canceled"],
self.client.CancelJob(999, dry_run=True))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["999"])
self.assertDryRun()
def testGetNodes(self):
self.rapi.AddResponse("[ { \"id\": \"node1\", \"uri\": \"uri1\" },"
" { \"id\": \"node2\", \"uri\": \"uri2\" } ]")
self.assertEqual(["node1", "node2"], self.client.GetNodes())
self.assertHandler(rlib2.R_2_nodes)
self.rapi.AddResponse("[ { \"id\": \"node1\", \"uri\": \"uri1\" },"
" { \"id\": \"node2\", \"uri\": \"uri2\" } ]")
self.assertEqual([{"id": "node1", "uri": "uri1"},
{"id": "node2", "uri": "uri2"}],
self.client.GetNodes(bulk=True))
self.assertHandler(rlib2.R_2_nodes)
self.assertBulk()
def testGetNode(self):
self.rapi.AddResponse("{}")
self.assertEqual({}, self.client.GetNode("node-foo"))
self.assertHandler(rlib2.R_2_nodes_name)
self.assertItems(["node-foo"])
def testEvacuateNode(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_EVAC_RES1]))
self.rapi.AddResponse("9876")
job_id = self.client.EvacuateNode("node-1", remote_node="node-2")
self.assertEqual(9876, job_id)
self.assertHandler(rlib2.R_2_nodes_name_evacuate)
self.assertItems(["node-1"])
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "remote_node": "node-2", })
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_EVAC_RES1]))
self.rapi.AddResponse("8888")
job_id = self.client.EvacuateNode("node-3", iallocator="hail", dry_run=True,
mode=constants.NODE_EVAC_ALL,
early_release=True)
self.assertEqual(8888, job_id)
self.assertItems(["node-3"])
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()), {
"iallocator": "hail",
"mode": "all",
"early_release": True,
})
self.assertDryRun()
self.assertRaises(client.GanetiApiError,
self.client.EvacuateNode,
"node-4", iallocator="hail", remote_node="node-5")
self.assertEqual(self.rapi.CountPending(), 0)
def testEvacuateNodeOldResponse(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.EvacuateNode,
"node-4", accept_old=False)
self.assertEqual(self.rapi.CountPending(), 0)
for mode in [client.NODE_EVAC_PRI, client.NODE_EVAC_ALL]:
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.EvacuateNode,
"node-4", accept_old=True, mode=mode)
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse(serializer.DumpJson("21533"))
result = self.client.EvacuateNode("node-3", iallocator="hail",
dry_run=True, accept_old=True,
mode=client.NODE_EVAC_SEC,
early_release=True)
self.assertEqual(result, "21533")
self.assertItems(["node-3"])
self.assertQuery("iallocator", ["hail"])
self.assertQuery("early_release", ["1"])
self.assertFalse(self.rapi.GetLastRequestData())
self.assertDryRun()
self.assertEqual(self.rapi.CountPending(), 0)
def testMigrateNode(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("1111")
self.assertEqual(1111, self.client.MigrateNode("node-a", dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assert_("mode" not in self.rapi.GetLastHandler().queryargs)
self.assertDryRun()
self.assertFalse(self.rapi.GetLastRequestData())
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("1112")
self.assertEqual(1112, self.client.MigrateNode("node-a", dry_run=True,
mode="live"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assertQuery("mode", ["live"])
self.assertDryRun()
self.assertFalse(self.rapi.GetLastRequestData())
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.MigrateNode,
"node-c", target_node="foonode")
self.assertEqual(self.rapi.CountPending(), 0)
def testMigrateNodeBodyData(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_MIGRATE_REQV1]))
self.rapi.AddResponse("27539")
self.assertEqual(27539, self.client.MigrateNode("node-a", dry_run=False,
mode="live"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assertFalse(self.rapi.GetLastHandler().queryargs)
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "mode": "live", })
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_MIGRATE_REQV1]))
self.rapi.AddResponse("14219")
self.assertEqual(14219, self.client.MigrateNode("node-x", dry_run=True,
target_node="node9",
iallocator="ial"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-x"])
self.assertDryRun()
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "target_node": "node9", "iallocator": "ial", })
self.assertEqual(self.rapi.CountPending(), 0)
def testGetNodeRole(self):
self.rapi.AddResponse("\"master\"")
self.assertEqual("master", self.client.GetNodeRole("node-a"))
self.assertHandler(rlib2.R_2_nodes_name_role)
self.assertItems(["node-a"])
def testSetNodeRole(self):
self.rapi.AddResponse("789")
self.assertEqual(789,
self.client.SetNodeRole("node-foo", "master-candidate", force=True))
self.assertHandler(rlib2.R_2_nodes_name_role)
self.assertItems(["node-foo"])
self.assertQuery("force", ["1"])
self.assertEqual("\"master-candidate\"", self.rapi.GetLastRequestData())
def testPowercycleNode(self):
self.rapi.AddResponse("23051")
self.assertEqual(23051,
self.client.PowercycleNode("node5468", force=True))
self.assertHandler(rlib2.R_2_nodes_name_powercycle)
self.assertItems(["node5468"])
self.assertQuery("force", ["1"])
self.assertFalse(self.rapi.GetLastRequestData())
self.assertEqual(self.rapi.CountPending(), 0)
def testModifyNode(self):
self.rapi.AddResponse("3783")
job_id = self.client.ModifyNode("node16979.example.com", drained=True)
self.assertEqual(job_id, 3783)
self.assertHandler(rlib2.R_2_nodes_name_modify)
self.assertItems(["node16979.example.com"])
self.assertEqual(self.rapi.CountPending(), 0)
def testGetNodeStorageUnits(self):
self.rapi.AddResponse("42")
self.assertEqual(42,
self.client.GetNodeStorageUnits("node-x", "lvm-pv", "fields"))
self.assertHandler(rlib2.R_2_nodes_name_storage)
self.assertItems(["node-x"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("output_fields", ["fields"])
def testModifyNodeStorageUnits(self):
self.rapi.AddResponse("14")
self.assertEqual(14,
self.client.ModifyNodeStorageUnits("node-z", "lvm-pv", "hda"))
self.assertHandler(rlib2.R_2_nodes_name_storage_modify)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
self.assertQuery("allocatable", None)
for allocatable, query_allocatable in [(True, "1"), (False, "0")]:
self.rapi.AddResponse("7205")
job_id = self.client.ModifyNodeStorageUnits("node-z", "lvm-pv", "hda",
allocatable=allocatable)
self.assertEqual(7205, job_id)
self.assertHandler(rlib2.R_2_nodes_name_storage_modify)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
self.assertQuery("allocatable", [query_allocatable])
def testRepairNodeStorageUnits(self):
self.rapi.AddResponse("99")
self.assertEqual(99, self.client.RepairNodeStorageUnits("node-z", "lvm-pv",
"hda"))
self.assertHandler(rlib2.R_2_nodes_name_storage_repair)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
def testGetNodeTags(self):
self.rapi.AddResponse("[\"fry\", \"bender\"]")
self.assertEqual(["fry", "bender"], self.client.GetNodeTags("node-k"))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-k"])
def testAddNodeTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddNodeTags("node-v", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-v"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteNodeTags(self):
self.rapi.AddResponse("16861")
self.assertEqual(16861, self.client.DeleteNodeTags("node-w", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-w"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testGetGroups(self):
groups = [{"name": "group1",
"uri": "/2/groups/group1",
},
{"name": "group2",
"uri": "/2/groups/group2",
},
]
self.rapi.AddResponse(serializer.DumpJson(groups))
self.assertEqual(["group1", "group2"], self.client.GetGroups())
self.assertHandler(rlib2.R_2_groups)
def testGetGroupsBulk(self):
groups = [{"name": "group1",
"uri": "/2/groups/group1",
"node_cnt": 2,
"node_list": ["gnt1.test",
"gnt2.test",
],
},
{"name": "group2",
"uri": "/2/groups/group2",
"node_cnt": 1,
"node_list": ["gnt3.test",
],
},
]
self.rapi.AddResponse(serializer.DumpJson(groups))
self.assertEqual(groups, self.client.GetGroups(bulk=True))
self.assertHandler(rlib2.R_2_groups)
self.assertBulk()
def testGetGroup(self):
group = {"ctime": None,
"name": "default",
}
self.rapi.AddResponse(serializer.DumpJson(group))
self.assertEqual({"ctime": None, "name": "default"},
self.client.GetGroup("default"))
self.assertHandler(rlib2.R_2_groups_name)
self.assertItems(["default"])
def testCreateGroup(self):
self.rapi.AddResponse("12345")
job_id = self.client.CreateGroup("newgroup", dry_run=True)
self.assertEqual(job_id, 12345)
self.assertHandler(rlib2.R_2_groups)
self.assertDryRun()
def testDeleteGroup(self):
self.rapi.AddResponse("12346")
job_id = self.client.DeleteGroup("newgroup", dry_run=True)
self.assertEqual(job_id, 12346)
self.assertHandler(rlib2.R_2_groups_name)
self.assertDryRun()
def testRenameGroup(self):
self.rapi.AddResponse("12347")
job_id = self.client.RenameGroup("oldname", "newname")
self.assertEqual(job_id, 12347)
self.assertHandler(rlib2.R_2_groups_name_rename)
def testModifyGroup(self):
self.rapi.AddResponse("12348")
job_id = self.client.ModifyGroup("mygroup", alloc_policy="foo")
self.assertEqual(job_id, 12348)
self.assertHandler(rlib2.R_2_groups_name_modify)
def testAssignGroupNodes(self):
self.rapi.AddResponse("12349")
job_id = self.client.AssignGroupNodes("mygroup", ["node1", "node2"],
force=True, dry_run=True)
self.assertEqual(job_id, 12349)
self.assertHandler(rlib2.R_2_groups_name_assign_nodes)
self.assertDryRun()
self.assertUseForce()
def testGetNetworksBulk(self):
networks = [{"name": "network1",
"uri": "/2/networks/network1",
"network": "192.168.0.0/24",
},
{"name": "network2",
"uri": "/2/networks/network2",
"network": "192.168.0.0/24",
},
]
self.rapi.AddResponse(serializer.DumpJson(networks))
self.assertEqual(networks, self.client.GetNetworks(bulk=True))
self.assertHandler(rlib2.R_2_networks)
self.assertBulk()
def testGetNetwork(self):
network = {"ctime": None,
"name": "network1",
}
self.rapi.AddResponse(serializer.DumpJson(network))
self.assertEqual({"ctime": None, "name": "network1"},
self.client.GetNetwork("network1"))
self.assertHandler(rlib2.R_2_networks_name)
self.assertItems(["network1"])
def testCreateNetwork(self):
self.rapi.AddResponse("12345")
job_id = self.client.CreateNetwork("newnetwork", network="192.168.0.0/24",
dry_run=True)
self.assertEqual(job_id, 12345)
self.assertHandler(rlib2.R_2_networks)
self.assertDryRun()
def testModifyNetwork(self):
self.rapi.AddResponse("12346")
job_id = self.client.ModifyNetwork("mynetwork", gateway="192.168.0.10",
dry_run=True)
self.assertEqual(job_id, 12346)
self.assertHandler(rlib2.R_2_networks_name_modify)
def testDeleteNetwork(self):
self.rapi.AddResponse("12347")
job_id = self.client.DeleteNetwork("newnetwork", dry_run=True)
self.assertEqual(job_id, 12347)
self.assertHandler(rlib2.R_2_networks_name)
self.assertDryRun()
def testConnectNetwork(self):
self.rapi.AddResponse("12348")
job_id = self.client.ConnectNetwork("mynetwork", "default",
"bridged", "br0", dry_run=True)
self.assertEqual(job_id, 12348)
self.assertHandler(rlib2.R_2_networks_name_connect)
self.assertDryRun()
def testDisconnectNetwork(self):
self.rapi.AddResponse("12349")
job_id = self.client.DisconnectNetwork("mynetwork", "default", dry_run=True)
self.assertEqual(job_id, 12349)
self.assertHandler(rlib2.R_2_networks_name_disconnect)
self.assertDryRun()
def testGetNetworkTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetNetworkTags("fooNetwork"))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["fooNetwork"])
def testAddNetworkTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddNetworkTags("fooNetwork", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["fooNetwork"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteNetworkTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteNetworkTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testModifyInstance(self):
self.rapi.AddResponse("23681")
job_id = self.client.ModifyInstance("inst7210", os_name="linux")
self.assertEqual(job_id, 23681)
self.assertItems(["inst7210"])
self.assertHandler(rlib2.R_2_instances_name_modify)
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "os_name": "linux", })
def testModifyCluster(self):
for mnh in [None, False, True]:
self.rapi.AddResponse("14470")
self.assertEqual(14470,
self.client.ModifyCluster(maintain_node_health=mnh))
self.assertHandler(rlib2.R_2_cluster_modify)
self.assertItems([])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 1)
self.assertEqual(data["maintain_node_health"], mnh)
self.assertEqual(self.rapi.CountPending(), 0)
def testRedistributeConfig(self):
self.rapi.AddResponse("3364")
job_id = self.client.RedistributeConfig()
self.assertEqual(job_id, 3364)
self.assertItems([])
self.assertHandler(rlib2.R_2_redist_config)
def testActivateInstanceDisks(self):
self.rapi.AddResponse("23547")
job_id = self.client.ActivateInstanceDisks("inst28204")
self.assertEqual(job_id, 23547)
self.assertItems(["inst28204"])
self.assertHandler(rlib2.R_2_instances_name_activate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testActivateInstanceDisksIgnoreSize(self):
self.rapi.AddResponse("11044")
job_id = self.client.ActivateInstanceDisks("inst28204", ignore_size=True)
self.assertEqual(job_id, 11044)
self.assertItems(["inst28204"])
self.assertHandler(rlib2.R_2_instances_name_activate_disks)
self.assertQuery("ignore_size", ["1"])
def testDeactivateInstanceDisks(self):
self.rapi.AddResponse("14591")
job_id = self.client.DeactivateInstanceDisks("inst28234")
self.assertEqual(job_id, 14591)
self.assertItems(["inst28234"])
self.assertHandler(rlib2.R_2_instances_name_deactivate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testRecreateInstanceDisks(self):
self.rapi.AddResponse("13553")
job_id = self.client.RecreateInstanceDisks("inst23153")
self.assertEqual(job_id, 13553)
self.assertItems(["inst23153"])
self.assertHandler(rlib2.R_2_instances_name_recreate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testGetInstanceConsole(self):
self.rapi.AddResponse("26876")
job_id = self.client.GetInstanceConsole("inst21491")
self.assertEqual(job_id, 26876)
self.assertItems(["inst21491"])
self.assertHandler(rlib2.R_2_instances_name_console)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
self.assertFalse(self.rapi.GetLastRequestData())
def testGrowInstanceDisk(self):
for idx, wait_for_sync in enumerate([None, False, True]):
amount = 128 + (512 * idx)
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse("30783")
self.assertEqual(30783,
self.client.GrowInstanceDisk("eze8ch", idx, amount,
wait_for_sync=wait_for_sync))
self.assertHandler(rlib2.R_2_instances_name_disk_grow)
self.assertItems(["eze8ch", str(idx)])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
if wait_for_sync is None:
self.assertEqual(len(data), 1)
self.assert_("wait_for_sync" not in data)
else:
self.assertEqual(len(data), 2)
self.assertEqual(data["wait_for_sync"], wait_for_sync)
self.assertEqual(data["amount"], amount)
self.assertEqual(self.rapi.CountPending(), 0)
def testGetGroupTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetGroupTags("fooGroup"))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["fooGroup"])
def testAddGroupTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddGroupTags("fooGroup", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["fooGroup"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteGroupTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteGroupTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testQuery(self):
for idx, what in enumerate(constants.QR_VIA_RAPI):
for idx2, qfilter in enumerate([None, ["?", "name"]]):
job_id = 11010 + (idx << 4) + (idx2 << 16)
fields = sorted(query.ALL_FIELDS[what].keys())[:10]
self.rapi.AddResponse(str(job_id))
self.assertEqual(self.client.Query(what, fields, qfilter=qfilter),
job_id)
self.assertItems([what])
self.assertHandler(rlib2.R_2_query)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data["fields"], fields)
if qfilter is None:
self.assertTrue("qfilter" not in data)
else:
self.assertEqual(data["qfilter"], qfilter)
self.assertEqual(self.rapi.CountPending(), 0)
def testQueryFields(self):
exp_result = objects.QueryFieldsResponse(fields=[
objects.QueryFieldDefinition(name="pnode", title="PNode",
kind=constants.QFT_NUMBER),
objects.QueryFieldDefinition(name="other", title="Other",
kind=constants.QFT_BOOL),
])
for what in constants.QR_VIA_RAPI:
for fields in [None, ["name", "_unknown_"], ["&", "?|"]]:
self.rapi.AddResponse(serializer.DumpJson(exp_result.ToDict()))
result = self.client.QueryFields(what, fields=fields)
self.assertItems([what])
self.assertHandler(rlib2.R_2_query_fields)
self.assertFalse(self.rapi.GetLastRequestData())
queryargs = self.rapi.GetLastHandler().queryargs
if fields is None:
self.assertFalse(queryargs)
else:
self.assertEqual(queryargs, {
"fields": [",".join(fields)],
})
self.assertEqual(objects.QueryFieldsResponse.FromDict(result).ToDict(),
exp_result.ToDict())
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionNoChange(self):
resp = serializer.DumpJson({
"status": constants.JOB_STATUS_WAITING,
})
for retries in [1, 5, 25]:
for _ in range(retries):
self.rapi.AddResponse(resp)
self.assertFalse(self.client.WaitForJobCompletion(22789, period=None,
retries=retries))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22789"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionAlreadyFinished(self):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_SUCCESS,
}))
self.assertTrue(self.client.WaitForJobCompletion(22793, period=None,
retries=1))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22793"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionEmptyResponse(self):
self.rapi.AddResponse("{}")
self.assertFalse(self.client.WaitForJobCompletion(22793, period=None,
retries=10))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22793"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionOutOfRetries(self):
for retries in [3, 10, 21]:
for _ in range(retries):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_RUNNING,
}))
self.assertFalse(self.client.WaitForJobCompletion(30948, period=None,
retries=retries - 1))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["30948"])
self.assertEqual(self.rapi.CountPending(), 1)
self.rapi.ResetResponses()
def testWaitForJobCompletionSuccessAndFailure(self):
for retries in [1, 4, 13]:
for (success, end_status) in [(False, constants.JOB_STATUS_ERROR),
(True, constants.JOB_STATUS_SUCCESS)]:
for _ in range(retries):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_RUNNING,
}))
self.rapi.AddResponse(serializer.DumpJson({
"status": end_status,
}))
result = self.client.WaitForJobCompletion(3187, period=None,
retries=retries + 1)
self.assertEqual(result, success)
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["3187"])
self.assertEqual(self.rapi.CountPending(), 0)
class RapiTestRunner(unittest.TextTestRunner):
def run(self, *args):
global _used_handlers
assert _used_handlers is None
_used_handlers = set()
try:
# Run actual tests
result = unittest.TextTestRunner.run(self, *args)
diff = (set(connector.CONNECTOR.values()) - _used_handlers -
_KNOWN_UNUSED)
if diff:
raise AssertionError("The following RAPI resources were not used by the"
" RAPI client: %r" % utils.CommaJoin(diff))
finally:
# Reset global variable
_used_handlers = None
return result
if __name__ == "__main__":
client.UsesRapiClient(testutils.GanetiTestProgram)(testRunner=RapiTestRunner)
|
willprice/arduino-sphere-project
|
scripts/example_direction_finder/temboo/Library/Foursquare/OAuth/FinalizeOAuth.py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# FinalizeOAuth
# Completes the OAuth process by retrieving a Foursquare access token for a user, after they have visited the authorization URL returned by the InitializeOAuth choreo and clicked "allow."
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FinalizeOAuth(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FinalizeOAuth Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(FinalizeOAuth, self).__init__(temboo_session, '/Library/Foursquare/OAuth/FinalizeOAuth')
def new_input_set(self):
return FinalizeOAuthInputSet()
def _make_result_set(self, result, path):
return FinalizeOAuthResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FinalizeOAuthChoreographyExecution(session, exec_id, path)
class FinalizeOAuthInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FinalizeOAuth
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccountName(self, value):
"""
Set the value of the AccountName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AccountName', value)
def set_AppKeyName(self, value):
"""
Set the value of the AppKeyName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AppKeyName', value)
def set_AppKeyValue(self, value):
"""
Set the value of the AppKeyValue input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AppKeyValue', value)
def set_CallbackID(self, value):
"""
Set the value of the CallbackID input for this Choreo. ((required, string) The callback token returned by the InitializeOAuth Choreo. Used to retrieve the authorization code after the user authorizes.)
"""
super(FinalizeOAuthInputSet, self)._set_input('CallbackID', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((required, string) The Client ID provided by Foursquare after registering your application.)
"""
super(FinalizeOAuthInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((required, string) The Client Secret provided by Foursquare after registering your application.)
"""
super(FinalizeOAuthInputSet, self)._set_input('ClientSecret', value)
def set_Timeout(self, value):
"""
Set the value of the Timeout input for this Choreo. ((optional, integer) The amount of time (in seconds) to poll your Temboo callback URL to see if your app's user has allowed or denied the request for access. Defaults to 20. Max is 60.)
"""
super(FinalizeOAuthInputSet, self)._set_input('Timeout', value)
class FinalizeOAuthResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FinalizeOAuth Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_AccessToken(self):
"""
Retrieve the value for the "AccessToken" output from this Choreo execution. ((string) The access token for the user that has granted access to your application.)
"""
return self._output.get('AccessToken', None)
class FinalizeOAuthChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FinalizeOAuthResultSet(response, path)
|
UNINETT/nav
|
python/nav/web/portadmin/forms.py
|
#
# Copyright (C) 2014 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Forms for PortAdmin"""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Layout, Row, Column, Submit
class SearchForm(forms.Form):
"""Form for searching for ip-devices and interfaces"""
query = forms.CharField(
label='',
widget=forms.TextInput(
attrs={'placeholder': 'Search for ip device or interface'}))
def __init__(self, *args, **kwargs):
super(SearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = 'portadmin-index'
self.helper.form_method = 'GET'
self.helper.layout = Layout(
Row(
Column('query', css_class='medium-9'),
Column(Submit('submit', 'Search', css_class='postfix'),
css_class='medium-3'),
css_class='collapse'
)
)
|
call-me-jimi/taskmanager
|
taskmanager/lib/hDBSessionMaker.py
|
# create a Session object by sessionmaker
import os
import ConfigParser
import sqlalchemy.orm
# get path to taskmanager. it is assumed that this script is in the lib directory of
# the taskmanager package.
tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) )
etcpath = '%s/etc' % tmpath # for configuration files
# library is in the same folder
from hDatabase import Base
class hDBSessionMaker( object ):
def __init__( self, configFileName=None, createTables=False, echo=False ):
if not configFileName:
# use default config file
etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) )
# default config file for database connection
configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath)
# read config file
if os.path.exists( configFileName ):
config = ConfigParser.ConfigParser()
config.read( configFileName )
else:
sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) )
sys.exit( -1 )
databaseDialect = config.get( 'DATABASE', 'database_dialect' )
databaseHost = config.get( 'DATABASE', 'database_host' )
databasePort = config.get( 'DATABASE', 'database_port' )
databaseName = config.get( 'DATABASE', 'database_name' )
databaseUsername = config.get( 'DATABASE', 'database_username' )
databasePassword = config.get( 'DATABASE', 'database_password' )
## @var engine
#The engine that is connected to the database
#use "echo=True" for SQL printing statements to stdout
self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect,
user=databaseUsername,
password=databasePassword,
host=databaseHost,
port=databasePort,
name=databaseName),
pool_size=50, # number of connections to keep open inside the connection pool
max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five.
pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed.
echo=False )
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all( self.engine )
## @var DBsession
# define a Session class which will serve as a factory for new Session objects
#
# http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html:
# Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are
# configured and acquired, the sessionmaker class is normally used to create a top level Session configuration
# which can then be used throughout an application without the need to repeat the configurational arguments.
# sessionmaker() is a Session factory. A factory is just something that produces a new object when called.
#
# Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions
#
SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine )
self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
|
KarolBedkowski/wxgtd
|
wxgtd/wxtools/validators/__init__.py
|
# -*- coding: utf-8 -*-
""" Validators for wx widgets.
Copyright (c) Karol Będkowski, 2006-2013
This file is part of wxGTD
This is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, version 2.
"""
__author__ = "Karol Będkowski"
__copyright__ = "Copyright (c) Karol Będkowski, 2006-2013"
__version__ = '2013-04-21'
__all__ = ['ValidatorDv', 'Validator', 'ValidatorDate', 'ValidatorTime',
'ValidatorColorStr']
from .validator import Validator, ValidatorDv, ValidatorDate, ValidatorTime, \
ValidatorColorStr
|
wasade/qiime
|
qiime/truncate_reverse_primer.py
|
#!/usr/bin/env python
# File created February 29, 2012
from __future__ import division
__author__ = "William Walters"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["William Walters", "Emily TerAvest"]
__license__ = "GPL"
__version__ = "1.8.0-dev"
__maintainer__ = "William Walters"
__email__ = "William.A.Walters@colorado.edu"
from os.path import join, basename
from skbio.parse.sequences import parse_fasta
from skbio.sequence import DNA
from qiime.split_libraries import local_align_primer_seq
from qiime.check_id_map import process_id_map
def get_rev_primer_seqs(mapping_fp):
""" Parses mapping file to get dictionary of SampleID:Rev primer
mapping_fp: mapping filepath
"""
hds, mapping_data, run_description, errors, warnings = \
process_id_map(mapping_fp, has_barcodes=False,
disable_primer_check=True)
if errors:
for curr_err in errors:
if curr_err.startswith("Duplicate SampleID"):
raise ValueError('Errors were found with mapping file, ' +
'please run validate_mapping_file.py to ' +
'identify problems.')
# create dict of dicts with SampleID:{each header:mapping data}
id_map = {}
for curr_data in mapping_data:
id_map[curr_data[0]] = {}
for header in range(len(hds)):
for curr_data in mapping_data:
id_map[curr_data[0]][hds[header]] = curr_data[header]
reverse_primers = {}
for curr_id in id_map.keys():
try:
reverse_primers[curr_id] =\
[str(DNA(curr_rev_primer).rc()) for curr_rev_primer in
id_map[curr_id]['ReversePrimer'].split(',')]
except KeyError:
raise KeyError("Reverse primer not found in mapping file, " +
"please include a 'ReversePrimer' column.")
# Check for valid reverse primers
# Will have been detected as warnings from mapping file
for curr_err in errors:
if curr_err.startswith("Invalid DNA sequence detected"):
raise ValueError("Problems found with reverse primers, please " +
"check mapping file with validate_mapping_file.py")
return reverse_primers
def get_output_filepaths(output_dir,
fasta_fp):
""" Returns output fasta filepath and log filepath
fasta_fp: fasta filepath
output_dir: output directory
"""
fasta_extensions = ['.fa', '.fasta', '.fna']
curr_fasta_out = basename(fasta_fp)
for fasta_extension in fasta_extensions:
curr_fasta_out = curr_fasta_out.replace(fasta_extension, '')
curr_fasta_out += "_rev_primer_truncated.fna"
output_fp = join(output_dir, curr_fasta_out)
log_fp = join(output_dir, "rev_primer_truncation.log")
return output_fp, log_fp
def truncate_rev_primers(fasta_f,
output_fp,
reverse_primers,
truncate_option='truncate_only',
primer_mismatches=2):
""" Locally aligns reverse primers, trucates or removes seqs
fasta_f: open file of fasta file
output_fp: open filepath to write truncated fasta to
reverse_primers: dictionary of SampleID:reverse primer sequence
truncate_option: either truncate_only, truncate_remove
primer_mismatches: number of allowed primer mismatches
"""
log_data = {
'sample_id_not_found': 0,
'reverse_primer_not_found': 0,
'total_seqs': 0,
'seqs_written': 0
}
for label, seq in parse_fasta(fasta_f):
curr_label = label.split('_')[0]
log_data['total_seqs'] += 1
# Check fasta label for valid SampleID, if not found, just write seq
try:
curr_rev_primer = reverse_primers[curr_label]
except KeyError:
log_data['sample_id_not_found'] += 1
output_fp.write('>%s\n%s\n' % (label, seq))
log_data['seqs_written'] += 1
continue
mm_tests = {}
for rev_primer in curr_rev_primer:
rev_primer_mm, rev_primer_index =\
local_align_primer_seq(rev_primer, seq)
mm_tests[rev_primer_mm] = rev_primer_index
rev_primer_mm = min(mm_tests.keys())
rev_primer_index = mm_tests[rev_primer_mm]
if rev_primer_mm > primer_mismatches:
if truncate_option == "truncate_remove":
log_data['reverse_primer_not_found'] += 1
else:
log_data['reverse_primer_not_found'] += 1
log_data['seqs_written'] += 1
output_fp.write('>%s\n%s\n' % (label, seq))
else:
# Check for zero seq length after truncation, will not write seq
if rev_primer_index > 0:
log_data['seqs_written'] += 1
output_fp.write('>%s\n%s\n' % (label, seq[0:rev_primer_index]))
return log_data
def write_log_file(log_data,
log_f):
""" Writes log file
log_data: dictionary of details about reverse primer removal
log_f: open filepath to write log details
"""
log_f.write("Details for removal of reverse primers\n")
log_f.write("Original fasta filepath: %s\n" % log_data['fasta_fp'])
log_f.write("Total seqs in fasta: %d\n" % log_data['total_seqs'])
log_f.write("Mapping filepath: %s\n" % log_data['mapping_fp'])
log_f.write("Truncation option: %s\n" % log_data['truncate_option'])
log_f.write("Mismatches allowed: %d\n" % log_data['primer_mismatches'])
log_f.write("Total seqs written: %d\n" % log_data['seqs_written'])
log_f.write("SampleIDs not found: %d\n" % log_data['sample_id_not_found'])
log_f.write("Reverse primers not found: %d\n" %
log_data['reverse_primer_not_found'])
def truncate_reverse_primer(fasta_fp,
mapping_fp,
output_dir=".",
truncate_option='truncate_only',
primer_mismatches=2):
""" Main program function for finding, removing reverse primer seqs
fasta_fp: fasta filepath
mapping_fp: mapping filepath
output_dir: output directory
truncate_option: truncation option, either truncate_only, truncate_remove
primer_mismatches: Number is mismatches allowed in reverse primer"""
reverse_primers = get_rev_primer_seqs(open(mapping_fp, "U"))
output_fp, log_fp = get_output_filepaths(output_dir, fasta_fp)
log_data = truncate_rev_primers(open(fasta_fp, "U"),
open(
output_fp, "w"), reverse_primers, truncate_option,
primer_mismatches)
log_data['fasta_fp'] = fasta_fp
log_data['mapping_fp'] = mapping_fp
log_data['truncate_option'] = truncate_option
log_data['primer_mismatches'] = primer_mismatches
write_log_file(log_data, open(log_fp, "w"))
|
kain88-de/mdanalysis
|
testsuite/MDAnalysisTests/core/test_segmentgroup.py
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from __future__ import absolute_import
from numpy.testing import (
dec,
assert_,
assert_equal,
)
from unittest import skip
import MDAnalysis as mda
from MDAnalysisTests.datafiles import PSF, DCD
from MDAnalysisTests import parser_not_found
class TestSegmentGroup(object):
# Legacy tests from before 363
@dec.skipif(parser_not_found('DCD'),
'DCD parser not available. Are you using python 3?')
def setUp(self):
"""Set up the standard AdK system in implicit solvent."""
self.universe = mda.Universe(PSF, DCD)
self.g = self.universe.atoms.segments
def test_newSegmentGroup(self):
"""test that slicing a SegmentGroup returns a new SegmentGroup (Issue 135)"""
g = self.universe.atoms.segments
newg = g[:]
assert_(isinstance(newg, mda.core.groups.SegmentGroup))
assert_equal(len(newg), len(g))
def test_n_atoms(self):
assert_equal(self.g.n_atoms, 3341)
def test_n_residues(self):
assert_equal(self.g.n_residues, 214)
def test_resids_dim(self):
assert_equal(len(self.g.resids), len(self.g))
for seg, resids in zip(self.g, self.g.resids):
assert_(len(resids) == len(seg.residues))
assert_equal(seg.residues.resids, resids)
def test_resnums_dim(self):
assert_equal(len(self.g.resnums), len(self.g))
for seg, resnums in zip(self.g, self.g.resnums):
assert_(len(resnums) == len(seg.residues))
assert_equal(seg.residues.resnums, resnums)
def test_segids_dim(self):
assert_equal(len(self.g.segids), len(self.g))
def test_set_segids(self):
s = self.universe.select_atoms('all').segments
s.segids = 'ADK'
assert_equal(self.universe.segments.segids, ['ADK'],
err_msg="failed to set_segid on segments")
def test_set_segid_updates_self(self):
g = self.universe.select_atoms("resid 10:18").segments
g.segids = 'ADK'
assert_equal(g.segids, ['ADK'],
err_msg="old selection was not changed in place after set_segid")
def test_atom_order(self):
assert_equal(self.universe.segments.atoms.indices,
sorted(self.universe.segments.atoms.indices))
|
jolid/script.module.donnie
|
lib/donnie/vidics.py
|
import urllib2, urllib, sys, os, re, random, copy
import htmlcleaner
import httplib2
from BeautifulSoup import BeautifulSoup, Tag, NavigableString
import xbmc,xbmcplugin,xbmcgui,xbmcaddon
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
from scrapers import CommonScraper
net = Net()
class VidicsServiceSracper(CommonScraper):
def __init__(self, settingsid, DB=None, REG=None):
if DB:
self.DB=DB
if REG:
self.REG=REG
self.addon_id = 'script.module.donnie'
self.service='vidics'
self.name = 'vidics.ch'
self.raiseError = False
self.referrer = 'http://www.vidics.ch/'
self.base_url = 'http://www.vidics.ch/'
self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
self.provides = []
self._streams = []
self._episodes = []
self.AZ = ['1', 'A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y', 'Z']
self.settingsid = settingsid
self._loadsettings()
def _getShows(self, silent=False):
uri = '/Category-TvShows/Genre-Any/Letter-Any/LatestFirst/1.htm'
self.log("Getting All shows for %s", self.service)
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading TV Shows from ' + self.service)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
table = soup.find('table', {'class' : 'pagination'});
pagelinks = table.findAll('a');
pages = pagelinks[len(pagelinks)-1]
pages = re.search('\d+',pages['href']).group(0)
row = self.DB.query("SELECT current, full FROM rw_update_status WHERE identifier='tvshows' AND provider=?", [self.service])
if len(row) > 0:
offset = int(pages) - int(row[1])
current = int(row[0]) + offset - 1
else:
current = pages
for page in reversed(range(1,int(current)+1)):
percent = int((100 * (int(pages) - page))/int(pages))
if not self._getShowsByPg(str(page), pages, pDialog, percent, silent):
break
if not silent:
if (pDialog.iscanceled()):
print 'Canceled download'
return
if not silent:
pDialog.close()
self.update_cache_status("tvshows")
self.log('Dowload complete!', level=0)
def _getShowsByPg(self, page, pages, pDialog, percent, silent):
self.log("getting TV Shows by %s", page)
uri = "/Category-TvShows/Genre-Any/Letter-Any/LatestFirst/%s.htm" % page
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
shows = soup.findAll('a', {'itemprop' : 'url', 'class': 'blue'})
for show in shows:
genres = []
try:
name = show.find('span', {'itemprop' : 'name'}).string
year = show.find('span', {'itemprop' : 'copyrightYear'}).string
href = show['href']
name = "%s (%s)" % (name, year)
if not silent:
pDialog.update(percent, self.service + ' page: ' + str(page), name)
character = self.getInitialChr(name)
self.addShowToDB(name, href, character, year, genres)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
if page == 1:
self.DB.execute("DELETE FROM rw_update_status WHERE provider=? and identifier=?", [self.service, 'tvshows'])
else:
self.DB.execute("REPLACE INTO rw_update_status(provider, identifier, current, full) VALUES(?, ?, ?, ?)", [self.service, 'tvshows', page, pages])
self.DB.commit()
return True
'''def _getNewEpisodes(self, silent=False):
self.log("Getting new episodes for %s", self.service)
episodes = []
pagedata = self.getURL('latest_episodes.xml', append_base_url=True)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
links = soup.findAll('item')
for link in links:
title = re.sub(r' Season: (\d+?), Episode: (\d+?) -', r'\1x\2', link.find('title').string)
episode = [self.service, title, '']
episodes.append(episode)
return episodes'''
def _getEpisodes(self, showid, show, url, pDialog, percent, silent, createFiles=True):
self.log("Getting episodes for %s", show)
pagedata = self.getURL(url, append_base_url=True)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
links = soup.findAll('a', {'class' : 'episode'})
p1 = re.compile('style="color: gray;"')
p2 = re.compile('-Season-(.+?)-Episode-(.+?)$')
p3 = re.compile(' - (.+?) \(')
for link in links:
try:
if not p1.search(str(link)):
href = link['href']
temp = p2.search(href)
season = temp.group(1)
episode = temp.group(2).zfill(2)
try:
name = link.find('span')
name = p3.search(name.string).group(1)
except:
name = "Episode %s" % episode
if not silent:
display = "%sx%s %s" % (season, episode, name)
pDialog.update(percent, show, display)
self.addEpisodeToDB(showid, show, name, season, episode, href, createFiles=createFiles)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
self.DB.commit()
return True
def _getMovies(self, silent=False):
uri = '/Category-Movies/Genre-Any/Letter-Any/LatestFirst/1.htm'
self.log("Getting All movies for %s", self.service)
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading Movies from ' + self.service)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
table = soup.find('table', {'class' : 'pagination'});
pagelinks = table.findAll('a');
pages = pagelinks[len(pagelinks)-1]
pages = re.search('\d+',pages['href']).group(0)
row = self.DB.query("SELECT current, full FROM rw_update_status WHERE identifier='movies' AND provider=?", [self.service])
if len(row) > 0:
offset = int(pages) - int(row[1])
current = int(row[0]) + offset - 1
else:
current = pages
for page in reversed(range(1,int(current)+1)):
percent = int((100 * (int(pages) - page))/int(pages))
if not self._getMoviesByPg(str(page), pages, pDialog, percent, silent):
break
if not silent:
if (pDialog.iscanceled()):
print 'Canceled download'
return
if not silent:
pDialog.close()
self.update_cache_status("movies")
self.log('Dowload complete!', level=0)
def _getMoviesByPg(self, page, pages, pDialog, percent, silent):
self.log("Getting Movies by %s", page)
uri = "/Category-Movies/Genre-Any/Letter-Any/LatestFirst/%s.htm" % page
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
movies = soup.findAll('a', {'itemprop' : 'url', 'class': 'blue'})
for movie in movies:
genres = []
try:
href = movie['href']
year = movie.find('span', {'itemprop' : 'copyrightYear'}).string
name = movie.find('span', {'itemprop' : 'name'}).string
name = "%s (%s)" % (name, year)
character = self.getInitialChr(name)
if not silent:
pDialog.update(percent, self.service + ' page: ' + str(page), name)
self.addMovieToDB(name, href, self.service + '://' + href, character, year, genres)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
if page == 1:
self.DB.execute("DELETE FROM rw_update_status WHERE provider=? and identifier=?", [self.service, 'movies'])
else:
self.DB.execute("REPLACE INTO rw_update_status(provider, identifier, current, full) VALUES(?, ?, ?, ?)", [self.service, 'movies', page, pages])
self.DB.commit()
return True
def _getStreams(self, episodeid=None, movieid=None):
streams = []
url = self.getServices(episodeid=episodeid, movieid=movieid)
if not url:
return streams
if self.ENABLE_MIRROR_CACHING:
if url:
self.log(url)
cache_url = url
else:
return streams
cached = self.checkStreamCache(cache_url)
if len(cached) > 0:
self.log("Loading streams from cache")
for temp in cached:
self.getStreamByPriority(temp[0], temp[1])
return cached
self.log("Locating streams for provided by service: %s", self.service)
pagedata = self.getURL(url, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
spans = soup.findAll('div', {'class' : 'movie_link'})
for span in spans:
print span
a = span.find('a', { "rel" : 'nofollow' })
if a:
host = self.whichHost(str(a.string))
print host
#host = host.find('script').string
raw_url = a['href']
print raw_url
if self.checkProviders(host):
#streams.append(['Vidics - ' + host, self.service + '://' + raw_url])
self.getStreamByPriority('Vidics - ' + host, self.service + '://' + raw_url)
if self.ENABLE_MIRROR_CACHING:
self.cacheStreamLink(cache_url, 'Vidics - ' + host, self.service + '://' + raw_url)
self.DB.commit()
#return streams
def getStreamByPriority(self, link, stream):
self.log(link)
host = re.search('- (.+?)$', link).group(1)
SQL = "INSERT INTO rw_stream_list(stream, url, priority, machineid) " \
"SELECT ?, ?, priority, ? " \
"FROM rw_providers " \
"WHERE mirror=? and provider=?"
self.DB.execute(SQL, [link, stream, self.REG.getSetting('machine-id'), host, self.service])
def _getServicePriority(self, link):
self.log(link)
host = re.search('- (.+?)$', link).group(1)
row = self.DB.query("SELECT priority FROM rw_providers WHERE mirror=? and provider=?", [host, self.service])
return row[0]
def sortStreams(self, random):
streams = sorted(random, key=lambda s: s[0])
return streams
def whichHost(self, host):
table = { 'Movpod' : 'movepod.in',
'Gorillavid' : 'gorillavid.in',
'Daclips' : 'daclips.com',
'Videoweed' : 'videoweed.es',
'Novamov' : 'novamov.com',
'Nowvideo.c..' : 'nowvideo.com',
'Moveshare' : 'moveshare.net',
'Divxstage' : 'divxstage.eu',
'Sharesix' : 'sharesix.com',
'Filenuke' : 'filenuke.com',
'Ilenuke' : 'filenuke.com',
'Uploadc' : 'uploadc.com',
'Putlocker' : 'putlocker.com',
'Sockshare' : 'sockshare.com',
'80upload' : '180upload.com',
'Illionuplo..' : 'billionuploads.com',
'Ovreel' : 'movreel.com',
'Emuploads' : 'lemuploads.com',
}
try:
host_url = table[host]
return host_url
except:
return 'Unknown'
def _resolveStream(self, stream):
import urlresolver
resolved_url = ''
raw_url = stream.replace(self.service + '://', '')
link_url = self.base_url + raw_url
h = httplib2.Http()
h.follow_redirects = False
(response, body) = h.request(link_url)
resolved_url = urlresolver.HostedMediaFile(url=response['location']).resolve()
#self.logHost(self.service, raw_url)
return resolved_url
def _resolveIMDB(self, uri):
imdb = ''
self.log("Resolving IMDB for %s", uri)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
try:
imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1)
except:
return False
return self.padIMDB(imdb)
|
walterdejong/synctool
|
contrib/attic/crc32.py
|
#! /usr/bin/env python
#
# CRC32 WJ103
#
import zlib
def crc32(filename):
'''calculate CRC-32 checksum of file'''
f = open(filename, 'r')
if not f:
return ''
crc = 0
while 1:
buf = f.read(16384)
if not buf:
break
crc = zlib.crc32(buf, crc)
f.close()
str_crc = '%x' % crc
# print 'TD: CRC32 : %s' % str_crc
return str_crc
if __name__ == '__main__':
import sys
for file in sys.argv[1:]:
print '%s %s' % (crc32(file), file)
# EOB
|
jordigh/mercurial-crew
|
mercurial/subrepo.py
|
# subrepo.py - sub-repository handling for Mercurial
#
# Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import errno, os, re, shutil, posixpath, sys
import xml.dom.minidom
import stat, subprocess, tarfile
from i18n import _
import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
hg = None
propertycache = util.propertycache
nullstate = ('', '', 'empty')
def _expandedabspath(path):
'''
get a path or url and if it is a path expand it and return an absolute path
'''
expandedpath = util.urllocalpath(util.expandpath(path))
u = util.url(expandedpath)
if not u.scheme:
path = util.normpath(os.path.abspath(u.path))
return path
def _getstorehashcachename(remotepath):
'''get a unique filename for the store hash cache of a remote repository'''
return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
def _calcfilehash(filename):
data = ''
if os.path.exists(filename):
fd = open(filename, 'rb')
data = fd.read()
fd.close()
return util.sha1(data).hexdigest()
class SubrepoAbort(error.Abort):
"""Exception class used to avoid handling a subrepo error more than once"""
def __init__(self, *args, **kw):
error.Abort.__init__(self, *args, **kw)
self.subrepo = kw.get('subrepo')
self.cause = kw.get('cause')
def annotatesubrepoerror(func):
def decoratedmethod(self, *args, **kargs):
try:
res = func(self, *args, **kargs)
except SubrepoAbort, ex:
# This exception has already been handled
raise ex
except error.Abort, ex:
subrepo = subrelpath(self)
errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
# avoid handling this exception by raising a SubrepoAbort exception
raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
cause=sys.exc_info())
return res
return decoratedmethod
def state(ctx, ui):
"""return a state dict, mapping subrepo paths configured in .hgsub
to tuple: (source from .hgsub, revision from .hgsubstate, kind
(key in types dict))
"""
p = config.config()
def read(f, sections=None, remap=None):
if f in ctx:
try:
data = ctx[f].data()
except IOError, err:
if err.errno != errno.ENOENT:
raise
# handle missing subrepo spec files as removed
ui.warn(_("warning: subrepo spec file %s not found\n") % f)
return
p.parse(f, data, sections, remap, read)
else:
raise util.Abort(_("subrepo spec file %s not found") % f)
if '.hgsub' in ctx:
read('.hgsub')
for path, src in ui.configitems('subpaths'):
p.set('subpaths', path, src, ui.configsource('subpaths', path))
rev = {}
if '.hgsubstate' in ctx:
try:
for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
l = l.lstrip()
if not l:
continue
try:
revision, path = l.split(" ", 1)
except ValueError:
raise util.Abort(_("invalid subrepository revision "
"specifier in .hgsubstate line %d")
% (i + 1))
rev[path] = revision
except IOError, err:
if err.errno != errno.ENOENT:
raise
def remap(src):
for pattern, repl in p.items('subpaths'):
# Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
# does a string decode.
repl = repl.encode('string-escape')
# However, we still want to allow back references to go
# through unharmed, so we turn r'\\1' into r'\1'. Again,
# extra escapes are needed because re.sub string decodes.
repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
try:
src = re.sub(pattern, repl, src, 1)
except re.error, e:
raise util.Abort(_("bad subrepository pattern in %s: %s")
% (p.source('subpaths', pattern), e))
return src
state = {}
for path, src in p[''].items():
kind = 'hg'
if src.startswith('['):
if ']' not in src:
raise util.Abort(_('missing ] in subrepo source'))
kind, src = src.split(']', 1)
kind = kind[1:]
src = src.lstrip() # strip any extra whitespace after ']'
if not util.url(src).isabs():
parent = _abssource(ctx._repo, abort=False)
if parent:
parent = util.url(parent)
parent.path = posixpath.join(parent.path or '', src)
parent.path = posixpath.normpath(parent.path)
joined = str(parent)
# Remap the full joined path and use it if it changes,
# else remap the original source.
remapped = remap(joined)
if remapped == joined:
src = remap(src)
else:
src = remapped
src = remap(src)
state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
return state
def writestate(repo, state):
"""rewrite .hgsubstate in (outer) repo with these subrepo states"""
lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
repo.wwrite('.hgsubstate', ''.join(lines), '')
def submerge(repo, wctx, mctx, actx, overwrite):
"""delegated from merge.applyupdates: merging of .hgsubstate file
in working context, merging context and ancestor context"""
if mctx == actx: # backwards?
actx = wctx.p1()
s1 = wctx.substate
s2 = mctx.substate
sa = actx.substate
sm = {}
repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
def debug(s, msg, r=""):
if r:
r = "%s:%s:%s" % r
repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
for s, l in sorted(s1.iteritems()):
a = sa.get(s, nullstate)
ld = l # local state with possible dirty flag for compares
if wctx.sub(s).dirty():
ld = (l[0], l[1] + "+")
if wctx == actx: # overwrite
a = ld
if s in s2:
r = s2[s]
if ld == r or r == a: # no change or local is newer
sm[s] = l
continue
elif ld == a: # other side changed
debug(s, "other changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[0] != r[0]: # sources differ
if repo.ui.promptchoice(
_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?'
'$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[1] == a[1]: # local side is unchanged
debug(s, "other side changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
else:
debug(s, "both sides changed")
option = repo.ui.promptchoice(
_(' subrepository %s diverged (local revision: %s, '
'remote revision: %s)\n'
'(M)erge, keep (l)ocal or keep (r)emote?'
'$$ &Merge $$ &Local $$ &Remote')
% (s, l[1][:12], r[1][:12]), 0)
if option == 0:
wctx.sub(s).merge(r)
sm[s] = l
debug(s, "merge with", r)
elif option == 1:
sm[s] = l
debug(s, "keep local subrepo revision", l)
else:
wctx.sub(s).get(r, overwrite)
sm[s] = r
debug(s, "get remote subrepo revision", r)
elif ld == a: # remote removed, local unchanged
debug(s, "remote removed, remove")
wctx.sub(s).remove()
elif a == nullstate: # not present in remote or ancestor
debug(s, "local added, keep")
sm[s] = l
continue
else:
if repo.ui.promptchoice(
_(' local changed subrepository %s which remote removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0):
debug(s, "prompt remove")
wctx.sub(s).remove()
for s, r in sorted(s2.items()):
if s in s1:
continue
elif s not in sa:
debug(s, "remote added, get", r)
mctx.sub(s).get(r)
sm[s] = r
elif r != sa[s]:
if repo.ui.promptchoice(
_(' remote changed subrepository %s which local removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0) == 0:
debug(s, "prompt recreate", r)
wctx.sub(s).get(r)
sm[s] = r
# record merged .hgsubstate
writestate(repo, sm)
return sm
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
return ui.promptchoice(msg, 0)
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
parent = repo
while util.safehasattr(parent, '_subparent'):
parent = parent._subparent
p = parent.root.rstrip(os.sep)
return repo.root[len(p) + 1:]
def subrelpath(sub):
"""return path to this subrepo as seen from outermost repo"""
if util.safehasattr(sub, '_relpath'):
return sub._relpath
if not util.safehasattr(sub, '_repo'):
return sub._path
return reporelpath(sub._repo)
def _abssource(repo, push=False, abort=True):
"""return pull/push path of repo - either based on parent repo .hgsub info
or on the top repo config. Abort or return None if no source found."""
if util.safehasattr(repo, '_subparent'):
source = util.url(repo._subsource)
if source.isabs():
return str(source)
source.path = posixpath.normpath(source.path)
parent = _abssource(repo._subparent, push, abort=False)
if parent:
parent = util.url(util.pconvert(parent))
parent.path = posixpath.join(parent.path or '', source.path)
parent.path = posixpath.normpath(parent.path)
return str(parent)
else: # recursion reached top repo
if util.safehasattr(repo, '_subtoppath'):
return repo._subtoppath
if push and repo.ui.config('paths', 'default-push'):
return repo.ui.config('paths', 'default-push')
if repo.ui.config('paths', 'default'):
return repo.ui.config('paths', 'default')
if repo.sharedpath != repo.path:
# chop off the .hg component to get the default path form
return os.path.dirname(repo.sharedpath)
if abort:
raise util.Abort(_("default path for subrepository not found"))
def itersubrepos(ctx1, ctx2):
"""find subrepos in ctx1 or ctx2"""
# Create a (subpath, ctx) mapping where we prefer subpaths from
# ctx1. The subpaths from ctx2 are important when the .hgsub file
# has been modified (in ctx2) but not yet committed (in ctx1).
subpaths = dict.fromkeys(ctx2.substate, ctx2)
subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
for subpath, ctx in sorted(subpaths.iteritems()):
yield subpath, ctx.sub(subpath)
def subrepo(ctx, path):
"""return instance of the right subrepo class for subrepo in path"""
# subrepo inherently violates our import layering rules
# because it wants to make repo objects from deep inside the stack
# so we manually delay the circular imports to not break
# scripts that don't use our demand-loading
global hg
import hg as h
hg = h
scmutil.pathauditor(ctx._repo.root)(path)
state = ctx.substate[path]
if state[2] not in types:
raise util.Abort(_('unknown subrepo type %s') % state[2])
return types[state[2]](ctx, path, state[:2])
# subrepo classes need to implement the following abstract class:
class abstractsubrepo(object):
def storeclean(self, path):
"""
returns true if the repository has not changed since it was last
cloned from or pushed to a given repository.
"""
return False
def dirty(self, ignoreupdate=False):
"""returns true if the dirstate of the subrepo is dirty or does not
match current stored state. If ignoreupdate is true, only check
whether the subrepo has uncommitted changes in its dirstate.
"""
raise NotImplementedError
def basestate(self):
"""current working directory base state, disregarding .hgsubstate
state and working directory modifications"""
raise NotImplementedError
def checknested(self, path):
"""check if path is a subrepository within this repository"""
return False
def commit(self, text, user, date):
"""commit the current changes to the subrepo with the given
log message. Use given user and date if possible. Return the
new state of the subrepo.
"""
raise NotImplementedError
def remove(self):
"""remove the subrepo
(should verify the dirstate is not dirty first)
"""
raise NotImplementedError
def get(self, state, overwrite=False):
"""run whatever commands are needed to put the subrepo into
this state
"""
raise NotImplementedError
def merge(self, state):
"""merge currently-saved state with the new state."""
raise NotImplementedError
def push(self, opts):
"""perform whatever action is analogous to 'hg push'
This may be a no-op on some systems.
"""
raise NotImplementedError
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return []
def status(self, rev2, **opts):
return [], [], [], [], [], [], []
def diff(self, ui, diffopts, node2, match, prefix, **opts):
pass
def outgoing(self, ui, dest, opts):
return 1
def incoming(self, ui, source, opts):
return 1
def files(self):
"""return filename iterator"""
raise NotImplementedError
def filedata(self, name):
"""return file data"""
raise NotImplementedError
def fileflags(self, name):
"""return file flags"""
return ''
def archive(self, ui, archiver, prefix, match=None):
if match is not None:
files = [f for f in self.files() if match(f)]
else:
files = self.files()
total = len(files)
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0,
unit=_('files'), total=total)
for i, name in enumerate(files):
flags = self.fileflags(name)
mode = 'x' in flags and 0755 or 0644
symlink = 'l' in flags
archiver.addfile(os.path.join(prefix, self._path, name),
mode, symlink, self.filedata(name))
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'), total=total)
ui.progress(_('archiving (%s)') % relpath, None)
return total
def walk(self, match):
'''
walk recursively through the directory tree, finding all files
matched by the match function
'''
pass
def forget(self, ui, match, prefix):
return ([], [])
def revert(self, ui, substate, *pats, **opts):
ui.warn('%s: reverting %s subrepos is unsupported\n' \
% (substate[0], substate[2]))
return []
class hgsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
r = ctx._repo
root = r.wjoin(path)
create = False
if not os.path.exists(os.path.join(root, '.hg')):
create = True
util.makedirs(root)
self._repo = hg.repository(r.baseui, root, create=create)
for s, k in [('ui', 'commitsubrepos')]:
v = r.ui.config(s, k)
if v:
self._repo.ui.setconfig(s, k, v)
self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
self._initrepo(r, state[0], create)
def storeclean(self, path):
clean = True
lock = self._repo.lock()
itercache = self._calcstorehash(path)
try:
for filehash in self._readstorehashcache(path):
if filehash != itercache.next():
clean = False
break
except StopIteration:
# the cached and current pull states have a different size
clean = False
if clean:
try:
itercache.next()
# the cached and current pull states have a different size
clean = False
except StopIteration:
pass
lock.release()
return clean
def _calcstorehash(self, remotepath):
'''calculate a unique "store hash"
This method is used to to detect when there are changes that may
require a push to a given remote path.'''
# sort the files that will be hashed in increasing (likely) file size
filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
yield '# %s\n' % _expandedabspath(remotepath)
for relname in filelist:
absname = os.path.normpath(self._repo.join(relname))
yield '%s = %s\n' % (relname, _calcfilehash(absname))
def _getstorehashcachepath(self, remotepath):
'''get a unique path for the store hash cache'''
return self._repo.join(os.path.join(
'cache', 'storehash', _getstorehashcachename(remotepath)))
def _readstorehashcache(self, remotepath):
'''read the store hash cache for a given remote repository'''
cachefile = self._getstorehashcachepath(remotepath)
if not os.path.exists(cachefile):
return ''
fd = open(cachefile, 'r')
pullstate = fd.readlines()
fd.close()
return pullstate
def _cachestorehash(self, remotepath):
'''cache the current store hash
Each remote repo requires its own store hash cache, because a subrepo
store may be "clean" versus a given remote repo, but not versus another
'''
cachefile = self._getstorehashcachepath(remotepath)
lock = self._repo.lock()
storehash = list(self._calcstorehash(remotepath))
cachedir = os.path.dirname(cachefile)
if not os.path.exists(cachedir):
util.makedirs(cachedir, notindexed=True)
fd = open(cachefile, 'w')
fd.writelines(storehash)
fd.close()
lock.release()
@annotatesubrepoerror
def _initrepo(self, parentrepo, source, create):
self._repo._subparent = parentrepo
self._repo._subsource = source
if create:
fp = self._repo.opener("hgrc", "w", text=True)
fp.write('[paths]\n')
def addpathconfig(key, value):
if value:
fp.write('%s = %s\n' % (key, value))
self._repo.ui.setconfig('paths', key, value)
defpath = _abssource(self._repo, abort=False)
defpushpath = _abssource(self._repo, True, abort=False)
addpathconfig('default', defpath)
if defpath != defpushpath:
addpathconfig('default-push', defpushpath)
fp.close()
@annotatesubrepoerror
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
os.path.join(prefix, self._path), explicitonly)
@annotatesubrepoerror
def status(self, rev2, **opts):
try:
rev1 = self._state[1]
ctx1 = self._repo[rev1]
ctx2 = self._repo[rev2]
return self._repo.status(ctx1, ctx2, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
return [], [], [], [], [], [], []
@annotatesubrepoerror
def diff(self, ui, diffopts, node2, match, prefix, **opts):
try:
node1 = node.bin(self._state[1])
# We currently expect node2 to come from substate and be
# in hex format
if node2 is not None:
node2 = node.bin(node2)
cmdutil.diffordiffstat(ui, self._repo, diffopts,
node1, node2, match,
prefix=posixpath.join(prefix, self._path),
listsubrepos=True, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
@annotatesubrepoerror
def archive(self, ui, archiver, prefix, match=None):
self._get(self._state + ('hg',))
total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
rev = self._state[1]
ctx = self._repo[rev]
for subpath in ctx.substate:
s = subrepo(ctx, subpath)
submatch = matchmod.narrowmatcher(subpath, match)
total += s.archive(
ui, archiver, os.path.join(prefix, self._path), submatch)
return total
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
r = self._state[1]
if r == '' and not ignoreupdate: # no state recorded
return True
w = self._repo[None]
if r != w.p1().hex() and not ignoreupdate:
# different version checked out
return True
return w.dirty() # working directory changed
def basestate(self):
return self._repo['.'].hex()
def checknested(self, path):
return self._repo._checknested(self._repo.wjoin(path))
@annotatesubrepoerror
def commit(self, text, user, date):
# don't bother committing in the subrepo if it's only been
# updated
if not self.dirty(True):
return self._repo['.'].hex()
self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
n = self._repo.commit(text, user, date)
if not n:
return self._repo['.'].hex() # different version checked out
return node.hex(n)
@annotatesubrepoerror
def remove(self):
# we can't fully delete the repository as it may contain
# local-only history
self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
hg.clean(self._repo, node.nullid, False)
def _get(self, state):
source, revision, kind = state
if revision not in self._repo:
self._repo._subsource = source
srcurl = _abssource(self._repo)
other = hg.peer(self._repo, {}, srcurl)
if len(self._repo) == 0:
self._repo.ui.status(_('cloning subrepo %s from %s\n')
% (subrelpath(self), srcurl))
parentrepo = self._repo._subparent
shutil.rmtree(self._repo.path)
other, cloned = hg.clone(self._repo._subparent.baseui, {},
other, self._repo.root,
update=False)
self._repo = cloned.local()
self._initrepo(parentrepo, source, create=True)
self._cachestorehash(srcurl)
else:
self._repo.ui.status(_('pulling subrepo %s from %s\n')
% (subrelpath(self), srcurl))
cleansub = self.storeclean(srcurl)
remotebookmarks = other.listkeys('bookmarks')
self._repo.pull(other)
bookmarks.updatefromremote(self._repo.ui, self._repo,
remotebookmarks, srcurl)
if cleansub:
# keep the repo clean after pull
self._cachestorehash(srcurl)
@annotatesubrepoerror
def get(self, state, overwrite=False):
self._get(state)
source, revision, kind = state
self._repo.ui.debug("getting subrepo %s\n" % self._path)
hg.updaterepo(self._repo, revision, overwrite)
@annotatesubrepoerror
def merge(self, state):
self._get(state)
cur = self._repo['.']
dst = self._repo[state[1]]
anc = dst.ancestor(cur)
def mergefunc():
if anc == cur and dst.branch() == cur.branch():
self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
hg.update(self._repo, state[1])
elif anc == dst:
self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
else:
self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
hg.merge(self._repo, state[1], remind=False)
wctx = self._repo[None]
if self.dirty():
if anc != dst:
if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
mergefunc()
else:
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
newbranch = opts.get('new_branch')
ssh = opts.get('ssh')
# push subrepos depth-first for coherent ordering
c = self._repo['']
subs = c.substate # only repos that are committed
for s in sorted(subs):
if c.sub(s).push(opts) == 0:
return False
dsturl = _abssource(self._repo, True)
if not force:
if self.storeclean(dsturl):
self._repo.ui.status(
_('no changes made to subrepo %s since last push to %s\n')
% (subrelpath(self), dsturl))
return None
self._repo.ui.status(_('pushing subrepo %s to %s\n') %
(subrelpath(self), dsturl))
other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
res = self._repo.push(other, force, newbranch=newbranch)
# the repo is now clean
self._cachestorehash(dsturl)
return res
@annotatesubrepoerror
def outgoing(self, ui, dest, opts):
return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
@annotatesubrepoerror
def incoming(self, ui, source, opts):
return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
@annotatesubrepoerror
def files(self):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.manifest()
def filedata(self, name):
rev = self._state[1]
return self._repo[rev][name].data()
def fileflags(self, name):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.flags(name)
def walk(self, match):
ctx = self._repo[None]
return ctx.walk(match)
@annotatesubrepoerror
def forget(self, ui, match, prefix):
return cmdutil.forget(ui, self._repo, match,
os.path.join(prefix, self._path), True)
@annotatesubrepoerror
def revert(self, ui, substate, *pats, **opts):
# reverting a subrepo is a 2 step process:
# 1. if the no_backup is not set, revert all modified
# files inside the subrepo
# 2. update the subrepo to the revision specified in
# the corresponding substate dictionary
ui.status(_('reverting subrepo %s\n') % substate[0])
if not opts.get('no_backup'):
# Revert all files on the subrepo, creating backups
# Note that this will not recursively revert subrepos
# We could do it if there was a set:subrepos() predicate
opts = opts.copy()
opts['date'] = None
opts['rev'] = substate[1]
pats = []
if not opts.get('all'):
pats = ['set:modified()']
self.filerevert(ui, *pats, **opts)
# Update the repo to the revision specified in the given substate
self.get(substate, overwrite=True)
def filerevert(self, ui, *pats, **opts):
ctx = self._repo[opts['rev']]
parents = self._repo.dirstate.parents()
if opts.get('all'):
pats = ['set:modified()']
else:
pats = []
cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
class svnsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
self._ctx = ctx
self._ui = ctx._repo.ui
self._exe = util.findexe('svn')
if not self._exe:
raise util.Abort(_("'svn' executable not found for subrepo '%s'")
% self._path)
def _svncommand(self, commands, filename='', failok=False):
cmd = [self._exe]
extrakw = {}
if not self._ui.interactive():
# Making stdin be a pipe should prevent svn from behaving
# interactively even if we can't pass --non-interactive.
extrakw['stdin'] = subprocess.PIPE
# Starting in svn 1.5 --non-interactive is a global flag
# instead of being per-command, but we need to support 1.4 so
# we have to be intelligent about what commands take
# --non-interactive.
if commands[0] in ('update', 'checkout', 'commit'):
cmd.append('--non-interactive')
cmd.extend(commands)
if filename is not None:
path = os.path.join(self._ctx._repo.origroot, self._path, filename)
cmd.append(path)
env = dict(os.environ)
# Avoid localized output, preserve current locale for everything else.
lc_all = env.get('LC_ALL')
if lc_all:
env['LANG'] = lc_all
del env['LC_ALL']
env['LC_MESSAGES'] = 'C'
p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, env=env, **extrakw)
stdout, stderr = p.communicate()
stderr = stderr.strip()
if not failok:
if p.returncode:
raise util.Abort(stderr or 'exited with code %d' % p.returncode)
if stderr:
self._ui.warn(stderr + '\n')
return stdout, stderr
@propertycache
def _svnversion(self):
output, err = self._svncommand(['--version', '--quiet'], filename=None)
m = re.search(r'^(\d+)\.(\d+)', output)
if not m:
raise util.Abort(_('cannot retrieve svn tool version'))
return (int(m.group(1)), int(m.group(2)))
def _wcrevs(self):
# Get the working directory revision as well as the last
# commit revision so we can compare the subrepo state with
# both. We used to store the working directory one.
output, err = self._svncommand(['info', '--xml'])
doc = xml.dom.minidom.parseString(output)
entries = doc.getElementsByTagName('entry')
lastrev, rev = '0', '0'
if entries:
rev = str(entries[0].getAttribute('revision')) or '0'
commits = entries[0].getElementsByTagName('commit')
if commits:
lastrev = str(commits[0].getAttribute('revision')) or '0'
return (lastrev, rev)
def _wcrev(self):
return self._wcrevs()[0]
def _wcchanged(self):
"""Return (changes, extchanges, missing) where changes is True
if the working directory was changed, extchanges is
True if any of these changes concern an external entry and missing
is True if any change is a missing entry.
"""
output, err = self._svncommand(['status', '--xml'])
externals, changes, missing = [], [], []
doc = xml.dom.minidom.parseString(output)
for e in doc.getElementsByTagName('entry'):
s = e.getElementsByTagName('wc-status')
if not s:
continue
item = s[0].getAttribute('item')
props = s[0].getAttribute('props')
path = e.getAttribute('path')
if item == 'external':
externals.append(path)
elif item == 'missing':
missing.append(path)
if (item not in ('', 'normal', 'unversioned', 'external')
or props not in ('', 'none', 'normal')):
changes.append(path)
for path in changes:
for ext in externals:
if path == ext or path.startswith(ext + os.sep):
return True, True, bool(missing)
return bool(changes), False, bool(missing)
def dirty(self, ignoreupdate=False):
if not self._wcchanged()[0]:
if self._state[1] in self._wcrevs() or ignoreupdate:
return False
return True
def basestate(self):
lastrev, rev = self._wcrevs()
if lastrev != rev:
# Last committed rev is not the same than rev. We would
# like to take lastrev but we do not know if the subrepo
# URL exists at lastrev. Test it and fallback to rev it
# is not there.
try:
self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
return lastrev
except error.Abort:
pass
return rev
@annotatesubrepoerror
def commit(self, text, user, date):
# user and date are out of our hands since svn is centralized
changed, extchanged, missing = self._wcchanged()
if not changed:
return self.basestate()
if extchanged:
# Do not try to commit externals
raise util.Abort(_('cannot commit svn externals'))
if missing:
# svn can commit with missing entries but aborting like hg
# seems a better approach.
raise util.Abort(_('cannot commit missing svn entries'))
commitinfo, err = self._svncommand(['commit', '-m', text])
self._ui.status(commitinfo)
newrev = re.search('Committed revision ([0-9]+).', commitinfo)
if not newrev:
if not commitinfo.strip():
# Sometimes, our definition of "changed" differs from
# svn one. For instance, svn ignores missing files
# when committing. If there are only missing files, no
# commit is made, no output and no error code.
raise util.Abort(_('failed to commit svn changes'))
raise util.Abort(commitinfo.splitlines()[-1])
newrev = newrev.groups()[0]
self._ui.status(self._svncommand(['update', '-r', newrev])[0])
return newrev
@annotatesubrepoerror
def remove(self):
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n' % self._path))
return
self._ui.note(_('removing subrepo %s\n') % self._path)
def onerror(function, path, excinfo):
if function is not os.remove:
raise
# read-only files cannot be unlinked under Windows
s = os.stat(path)
if (s.st_mode & stat.S_IWRITE) != 0:
raise
os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
os.remove(path)
path = self._ctx._repo.wjoin(self._path)
shutil.rmtree(path, onerror=onerror)
try:
os.removedirs(os.path.dirname(path))
except OSError:
pass
@annotatesubrepoerror
def get(self, state, overwrite=False):
if overwrite:
self._svncommand(['revert', '--recursive'])
args = ['checkout']
if self._svnversion >= (1, 5):
args.append('--force')
# The revision must be specified at the end of the URL to properly
# update to a directory which has since been deleted and recreated.
args.append('%s@%s' % (state[0], state[1]))
status, err = self._svncommand(args, failok=True)
if not re.search('Checked out revision [0-9]+.', status):
if ('is already a working copy for a different URL' in err
and (self._wcchanged()[:2] == (False, False))):
# obstructed but clean working copy, so just blow it away.
self.remove()
self.get(state, overwrite=False)
return
raise util.Abort((status or err).splitlines()[-1])
self._ui.status(status)
@annotatesubrepoerror
def merge(self, state):
old = self._state[1]
new = state[1]
wcrev = self._wcrev()
if new != wcrev:
dirty = old == wcrev or self._wcchanged()[0]
if _updateprompt(self._ui, self, dirty, wcrev, new):
self.get(state, False)
def push(self, opts):
# push is a no-op for SVN
return True
@annotatesubrepoerror
def files(self):
output = self._svncommand(['list', '--recursive', '--xml'])[0]
doc = xml.dom.minidom.parseString(output)
paths = []
for e in doc.getElementsByTagName('entry'):
kind = str(e.getAttribute('kind'))
if kind != 'file':
continue
name = ''.join(c.data for c
in e.getElementsByTagName('name')[0].childNodes
if c.nodeType == c.TEXT_NODE)
paths.append(name.encode('utf-8'))
return paths
def filedata(self, name):
return self._svncommand(['cat'], name)[0]
class gitsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._state = state
self._ctx = ctx
self._path = path
self._relpath = os.path.join(reporelpath(ctx._repo), path)
self._abspath = ctx._repo.wjoin(path)
self._subparent = ctx._repo
self._ui = ctx._repo.ui
self._ensuregit()
def _ensuregit(self):
try:
self._gitexecutable = 'git'
out, err = self._gitnodir(['--version'])
except OSError, e:
if e.errno != 2 or os.name != 'nt':
raise
self._gitexecutable = 'git.cmd'
out, err = self._gitnodir(['--version'])
m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
if not m:
self._ui.warn(_('cannot retrieve git version'))
return
version = (int(m.group(1)), m.group(2), m.group(3))
# git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
# despite the docstring comment. For now, error on 1.4.0, warn on
# 1.5.0 but attempt to continue.
if version < (1, 5, 0):
raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
elif version < (1, 6, 0):
self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
def _gitcommand(self, commands, env=None, stream=False):
return self._gitdir(commands, env=env, stream=stream)[0]
def _gitdir(self, commands, env=None, stream=False):
return self._gitnodir(commands, env=env, stream=stream,
cwd=self._abspath)
def _gitnodir(self, commands, env=None, stream=False, cwd=None):
"""Calls the git command
The methods tries to call the git command. versions prior to 1.6.0
are not supported and very probably fail.
"""
self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
# unless ui.quiet is set, print git's stderr,
# which is mostly progress and useful info
errpipe = None
if self._ui.quiet:
errpipe = open(os.devnull, 'w')
p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
cwd=cwd, env=env, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=errpipe)
if stream:
return p.stdout, None
retdata = p.stdout.read().strip()
# wait for the child to exit to avoid race condition.
p.wait()
if p.returncode != 0 and p.returncode != 1:
# there are certain error codes that are ok
command = commands[0]
if command in ('cat-file', 'symbolic-ref'):
return retdata, p.returncode
# for all others, abort
raise util.Abort('git %s error %d in %s' %
(command, p.returncode, self._relpath))
return retdata, p.returncode
def _gitmissing(self):
return not os.path.exists(os.path.join(self._abspath, '.git'))
def _gitstate(self):
return self._gitcommand(['rev-parse', 'HEAD'])
def _gitcurrentbranch(self):
current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
if err:
current = None
return current
def _gitremote(self, remote):
out = self._gitcommand(['remote', 'show', '-n', remote])
line = out.split('\n')[1]
i = line.index('URL: ') + len('URL: ')
return line[i:]
def _githavelocally(self, revision):
out, code = self._gitdir(['cat-file', '-e', revision])
return code == 0
def _gitisancestor(self, r1, r2):
base = self._gitcommand(['merge-base', r1, r2])
return base == r1
def _gitisbare(self):
return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
def _gitupdatestat(self):
"""This must be run before git diff-index.
diff-index only looks at changes to file stat;
this command looks at file contents and updates the stat."""
self._gitcommand(['update-index', '-q', '--refresh'])
def _gitbranchmap(self):
'''returns 2 things:
a map from git branch to revision
a map from revision to branches'''
branch2rev = {}
rev2branch = {}
out = self._gitcommand(['for-each-ref', '--format',
'%(objectname) %(refname)'])
for line in out.split('\n'):
revision, ref = line.split(' ')
if (not ref.startswith('refs/heads/') and
not ref.startswith('refs/remotes/')):
continue
if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
continue # ignore remote/HEAD redirects
branch2rev[ref] = revision
rev2branch.setdefault(revision, []).append(ref)
return branch2rev, rev2branch
def _gittracking(self, branches):
'return map of remote branch to local tracking branch'
# assumes no more than one local tracking branch for each remote
tracking = {}
for b in branches:
if b.startswith('refs/remotes/'):
continue
bname = b.split('/', 2)[2]
remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
if remote:
ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
tracking['refs/remotes/%s/%s' %
(remote, ref.split('/', 2)[2])] = b
return tracking
def _abssource(self, source):
if '://' not in source:
# recognize the scp syntax as an absolute source
colon = source.find(':')
if colon != -1 and '/' not in source[:colon]:
return source
self._subsource = source
return _abssource(self)
def _fetch(self, source, revision):
if self._gitmissing():
source = self._abssource(source)
self._ui.status(_('cloning subrepo %s from %s\n') %
(self._relpath, source))
self._gitnodir(['clone', source, self._abspath])
if self._githavelocally(revision):
return
self._ui.status(_('pulling subrepo %s from %s\n') %
(self._relpath, self._gitremote('origin')))
# try only origin: the originally cloned repo
self._gitcommand(['fetch'])
if not self._githavelocally(revision):
raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
(revision, self._relpath))
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
if self._gitmissing():
return self._state[1] != ''
if self._gitisbare():
return True
if not ignoreupdate and self._state[1] != self._gitstate():
# different version checked out
return True
# check for staged changes or modified files; ignore untracked files
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
return code == 1
def basestate(self):
return self._gitstate()
@annotatesubrepoerror
def get(self, state, overwrite=False):
source, revision, kind = state
if not revision:
self.remove()
return
self._fetch(source, revision)
# if the repo was set to be bare, unbare it
if self._gitisbare():
self._gitcommand(['config', 'core.bare', 'false'])
if self._gitstate() == revision:
self._gitcommand(['reset', '--hard', 'HEAD'])
return
elif self._gitstate() == revision:
if overwrite:
# first reset the index to unmark new files for commit, because
# reset --hard will otherwise throw away files added for commit,
# not just unmark them.
self._gitcommand(['reset', 'HEAD'])
self._gitcommand(['reset', '--hard', 'HEAD'])
return
branch2rev, rev2branch = self._gitbranchmap()
def checkout(args):
cmd = ['checkout']
if overwrite:
# first reset the index to unmark new files for commit, because
# the -f option will otherwise throw away files added for
# commit, not just unmark them.
self._gitcommand(['reset', 'HEAD'])
cmd.append('-f')
self._gitcommand(cmd + args)
def rawcheckout():
# no branch to checkout, check it out with no branch
self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
self._relpath)
self._ui.warn(_('check out a git branch if you intend '
'to make changes\n'))
checkout(['-q', revision])
if revision not in rev2branch:
rawcheckout()
return
branches = rev2branch[revision]
firstlocalbranch = None
for b in branches:
if b == 'refs/heads/master':
# master trumps all other branches
checkout(['refs/heads/master'])
return
if not firstlocalbranch and not b.startswith('refs/remotes/'):
firstlocalbranch = b
if firstlocalbranch:
checkout([firstlocalbranch])
return
tracking = self._gittracking(branch2rev.keys())
# choose a remote branch already tracked if possible
remote = branches[0]
if remote not in tracking:
for b in branches:
if b in tracking:
remote = b
break
if remote not in tracking:
# create a new local tracking branch
local = remote.split('/', 3)[3]
checkout(['-b', local, remote])
elif self._gitisancestor(branch2rev[tracking[remote]], remote):
# When updating to a tracked remote branch,
# if the local tracking branch is downstream of it,
# a normal `git pull` would have performed a "fast-forward merge"
# which is equivalent to updating the local branch to the remote.
# Since we are only looking at branching at update, we need to
# detect this situation and perform this action lazily.
if tracking[remote] != self._gitcurrentbranch():
checkout([tracking[remote]])
self._gitcommand(['merge', '--ff', remote])
else:
# a real merge would be required, just checkout the revision
rawcheckout()
@annotatesubrepoerror
def commit(self, text, user, date):
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
cmd = ['commit', '-a', '-m', text]
env = os.environ.copy()
if user:
cmd += ['--author', user]
if date:
# git's date parser silently ignores when seconds < 1e9
# convert to ISO8601
env['GIT_AUTHOR_DATE'] = util.datestr(date,
'%Y-%m-%dT%H:%M:%S %1%2')
self._gitcommand(cmd, env=env)
# make sure commit works otherwise HEAD might not exist under certain
# circumstances
return self._gitstate()
@annotatesubrepoerror
def merge(self, state):
source, revision, kind = state
self._fetch(source, revision)
base = self._gitcommand(['merge-base', revision, self._state[1]])
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
def mergefunc():
if base == revision:
self.get(state) # fast forward merge
elif base != self._state[1]:
self._gitcommand(['merge', '--no-commit', revision])
if self.dirty():
if self._gitstate() != revision:
dirty = self._gitstate() == self._state[1] or code != 0
if _updateprompt(self._ui, self, dirty,
self._state[1][:7], revision[:7]):
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
if not self._state[1]:
return True
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
# if a branch in origin contains the revision, nothing to do
branch2rev, rev2branch = self._gitbranchmap()
if self._state[1] in rev2branch:
for b in rev2branch[self._state[1]]:
if b.startswith('refs/remotes/origin/'):
return True
for b, revision in branch2rev.iteritems():
if b.startswith('refs/remotes/origin/'):
if self._gitisancestor(self._state[1], revision):
return True
# otherwise, try to push the currently checked out branch
cmd = ['push']
if force:
cmd.append('--force')
current = self._gitcurrentbranch()
if current:
# determine if the current branch is even useful
if not self._gitisancestor(self._state[1], current):
self._ui.warn(_('unrelated git branch checked out '
'in subrepo %s\n') % self._relpath)
return False
self._ui.status(_('pushing branch %s of subrepo %s\n') %
(current.split('/', 2)[2], self._relpath))
self._gitcommand(cmd + ['origin', current])
return True
else:
self._ui.warn(_('no branch checked out in subrepo %s\n'
'cannot push revision %s\n') %
(self._relpath, self._state[1]))
return False
@annotatesubrepoerror
def remove(self):
if self._gitmissing():
return
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n') % self._relpath)
return
# we can't fully delete the repository as it may contain
# local-only history
self._ui.note(_('removing subrepo %s\n') % self._relpath)
self._gitcommand(['config', 'core.bare', 'true'])
for f in os.listdir(self._abspath):
if f == '.git':
continue
path = os.path.join(self._abspath, f)
if os.path.isdir(path) and not os.path.islink(path):
shutil.rmtree(path)
else:
os.remove(path)
def archive(self, ui, archiver, prefix, match=None):
total = 0
source, revision = self._state
if not revision:
return total
self._fetch(source, revision)
# Parse git's native archive command.
# This should be much faster than manually traversing the trees
# and objects with many subprocess calls.
tarstream = self._gitcommand(['archive', revision], stream=True)
tar = tarfile.open(fileobj=tarstream, mode='r|')
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
for i, info in enumerate(tar):
if info.isdir():
continue
if match and not match(info.name):
continue
if info.issym():
data = info.linkname
else:
data = tar.extractfile(info).read()
archiver.addfile(os.path.join(prefix, self._path, info.name),
info.mode, info.issym(), data)
total += 1
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'))
ui.progress(_('archiving (%s)') % relpath, None)
return total
@annotatesubrepoerror
def status(self, rev2, **opts):
rev1 = self._state[1]
if self._gitmissing() or not rev1:
# if the repo is missing, return no results
return [], [], [], [], [], [], []
modified, added, removed = [], [], []
self._gitupdatestat()
if rev2:
command = ['diff-tree', rev1, rev2]
else:
command = ['diff-index', rev1]
out = self._gitcommand(command)
for line in out.split('\n'):
tab = line.find('\t')
if tab == -1:
continue
status, f = line[tab - 1], line[tab + 1:]
if status == 'M':
modified.append(f)
elif status == 'A':
added.append(f)
elif status == 'D':
removed.append(f)
deleted = unknown = ignored = clean = []
return modified, added, removed, deleted, unknown, ignored, clean
types = {
'hg': hgsubrepo,
'svn': svnsubrepo,
'git': gitsubrepo,
}
|
vanceeasleaf/aces
|
aces/materials/MoN2_alpha_rect.py
|
from aces.materials.POSCAR import structure as Material
class structure(Material):
def getPOSCAR(self):
return self.getMinimized()
def csetup(self):
from ase.dft.kpoints import ibz_points
#self.bandpoints=ibz_points['hexagonal']
import numpy as np
x=0.5*np.cos(np.arange(8)/8.0*2.0*np.pi)
y=0.5*np.sin(np.arange(8)/8.0*2.0*np.pi)
self.bandpath=['Gamma']
for i in range(8):
if(np.abs(x[i])>0.2):x[i]/=np.abs(x[i])*2.0
if(np.abs(y[i])>0.2):y[i]/=np.abs(y[i])*2.0
self.bandpoints['X'+str(i)]=[x[i],y[i],0.0]
self.bandpath.append('X'+str(i))
self.bandpath.append('Gamma')
#self.bandpath=['Gamma',"X2"]
def getMinimized(self):
return """Mo N
1.0000000000000000
2.9916000366000000 0.0000000000000000 0.0000000000000000
0.0000000000000000 5.1814560994168932 0.0000000000000000
0.0000000000000000 0.0000000000000000 25.0000000000000000
Mo N
2 4
Direct
0.5000000000000000 0.5000000000000000 0.5000000000000000
0.0000000000000000 0.0000000000000000 0.5000000000000000
0.5000000000000000 0.8333333333333335 0.4555099610000000
0.5000000000000000 0.8333333333333335 0.5444900390000000
0.0000000000000000 0.3333333333333333 0.4555099610000000
0.0000000000000000 0.3333333333333333 0.5444900390000000
"""
|
MrSenko/Nitrate
|
tcms/testcases/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import tcms.core.models.base
from django.conf import settings
import tcms.core.models.fields
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
('testplans', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_pk', models.PositiveIntegerField(null=True, verbose_name=b'object ID', blank=True)),
('name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=254, db_index=True)),
('date_joined', models.DateTimeField(auto_now_add=True)),
],
options={
'db_table': 'tcms_contacts',
},
),
migrations.CreateModel(
name='TestCase',
fields=[
('case_id', models.AutoField(max_length=10, serialize=False, primary_key=True)),
('create_date', models.DateTimeField(auto_now_add=True, db_column=b'creation_date')),
('is_automated', models.IntegerField(default=0, db_column=b'isautomated')),
('is_automated_proposed', models.BooleanField(default=False)),
('script', models.TextField(blank=True)),
('arguments', models.TextField(blank=True)),
('extra_link', models.CharField(default=None, max_length=1024, null=True, blank=True)),
('summary', models.CharField(max_length=255, blank=True)),
('requirement', models.CharField(max_length=255, blank=True)),
('alias', models.CharField(max_length=255, blank=True)),
('estimated_time', tcms.core.models.fields.DurationField(default=0, db_column=b'estimated_time')),
('notes', models.TextField(blank=True)),
],
options={
'db_table': 'test_cases',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseAttachment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
'db_table': 'test_case_attachments',
},
),
migrations.CreateModel(
name='TestCaseBug',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('bug_id', models.CharField(max_length=25)),
('summary', models.CharField(max_length=255, null=True, blank=True)),
('description', models.TextField(null=True, blank=True)),
],
options={
'db_table': 'test_case_bugs',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseBugSystem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True)),
('url_reg_exp', models.CharField(max_length=8192)),
('validate_reg_exp', models.CharField(max_length=128)),
],
options={
'db_table': 'test_case_bug_systems',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseCategory',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, db_column=b'category_id')),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True)),
('product', models.ForeignKey(related_name='category', to='management.Product')),
],
options={
'db_table': 'test_case_categories',
'verbose_name_plural': 'test case categories',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseComponent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('case', models.ForeignKey(to='testcases.TestCase')),
('component', models.ForeignKey(to='management.Component')),
],
options={
'db_table': 'test_case_components',
},
),
migrations.CreateModel(
name='TestCaseEmailSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('notify_on_case_update', models.BooleanField(default=False)),
('notify_on_case_delete', models.BooleanField(default=False)),
('auto_to_case_author', models.BooleanField(default=False)),
('auto_to_case_tester', models.BooleanField(default=False)),
('auto_to_run_manager', models.BooleanField(default=False)),
('auto_to_run_tester', models.BooleanField(default=False)),
('auto_to_case_run_assignee', models.BooleanField(default=False)),
('case', models.OneToOneField(related_name='email_settings', to='testcases.TestCase')),
],
),
migrations.CreateModel(
name='TestCasePlan',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sortkey', models.IntegerField(null=True, blank=True)),
('case', models.ForeignKey(to='testcases.TestCase')),
('plan', models.ForeignKey(to='testplans.TestPlan')),
],
options={
'db_table': 'test_case_plans',
},
),
migrations.CreateModel(
name='TestCaseStatus',
fields=[
('id', models.AutoField(max_length=6, serialize=False, primary_key=True, db_column=b'case_status_id')),
('name', models.CharField(max_length=255)),
('description', models.TextField(null=True, blank=True)),
],
options={
'db_table': 'test_case_status',
'verbose_name': 'Test case status',
'verbose_name_plural': 'Test case status',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseTag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.IntegerField(default=b'0', db_column=b'userid')),
('case', models.ForeignKey(to='testcases.TestCase')),
('tag', models.ForeignKey(to='management.TestTag')),
],
options={
'db_table': 'test_case_tags',
},
),
migrations.CreateModel(
name='TestCaseText',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('case_text_version', models.IntegerField()),
('create_date', models.DateTimeField(auto_now_add=True, db_column=b'creation_ts')),
('action', models.TextField(blank=True)),
('effect', models.TextField(blank=True)),
('setup', models.TextField(blank=True)),
('breakdown', models.TextField(blank=True)),
('action_checksum', models.CharField(max_length=32)),
('effect_checksum', models.CharField(max_length=32)),
('setup_checksum', models.CharField(max_length=32)),
('breakdown_checksum', models.CharField(max_length=32)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, db_column=b'who')),
('case', models.ForeignKey(related_name='text', to='testcases.TestCase')),
],
options={
'ordering': ['case', '-case_text_version'],
'db_table': 'test_case_texts',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.AddField(
model_name='testcasebug',
name='bug_system',
field=models.ForeignKey(default=1, to='testcases.TestCaseBugSystem'),
),
migrations.AddField(
model_name='testcasebug',
name='case',
field=models.ForeignKey(related_name='case_bug', to='testcases.TestCase'),
),
]
|
kadamski/func
|
func/overlord/func_command.py
|
#!/usr/bin/python
## func command line interface & client lib
##
## Copyright 2007,2008 Red Hat, Inc
## +AUTHORS
##
## This software may be freely redistributed under the terms of the GNU
## general public license.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys
import command
import func.module_loader as module_loader
from func.overlord import client,base_command
class FuncCommandLine(command.Command):
name = "func"
usage = "func [--options] \"hostname glob\" module method [arg1] [arg2] ... "
subCommandClasses = []
def __init__(self):
modules = module_loader.load_modules('func/overlord/cmd_modules/', base_command.BaseCommand)
for x in modules.keys():
self.subCommandClasses.append(modules[x].__class__)
command.Command.__init__(self)
def do(self, args):
pass
def addOptions(self):
self.parser.add_option('', '--version', action="store_true",
help="show version information")
# just some ugly goo to try to guess if arg[1] is hostnamegoo or
# a command name
def _isGlob(self, str):
if str.find("*") or str.find("?") or str.find("[") or str.find("]"):
return True
return False
def handleArguments(self, args):
if len(args) < 2:
sys.stderr.write("see the func manpage for usage\n")
sys.exit(411)
minion_string = args[0]
# try to be clever about this for now
if client.is_minion(minion_string) or self._isGlob(minion_string):
self.server_spec = minion_string
args.pop(0)
# if it doesn't look like server, assume it
# is a sub command? that seems wrong, what about
# typo's and such? How to catch that? -akl
# maybe a class variable self.data on Command?
def handleOptions(self, options):
if options.version:
#FIXME
sys.stderr.write("version is NOT IMPLEMENTED YET\n")
|
arunkgupta/gramps
|
gramps/gen/filters/rules/_hastagbase.py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Rule that checks for an object with a particular tag.
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ...ggettext import gettext as _
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasTag
#
#-------------------------------------------------------------------------
class HasTagBase(Rule):
"""
Rule that checks for an object with a particular tag.
"""
labels = [ 'Tag:' ]
name = 'Objects with the <tag>'
description = "Matches objects with the given tag"
category = _('General filters')
def prepare(self, db):
"""
Prepare the rule. Things we want to do just once.
"""
self.tag_handle = None
tag = db.get_tag_from_name(self.list[0])
if tag is not None:
self.tag_handle = tag.get_handle()
def apply(self, db, obj):
"""
Apply the rule. Return True for a match.
"""
if self.tag_handle is None:
return False
return self.tag_handle in obj.get_tag_list()
|
bertrandF/DictionaryDB
|
db.py
|
#!/usr/bin/python3.4
#############################################################################
#
# Dictionnary DB managing script. Add/Del/Search definitions
# Copyright (C) 2014 bertrand
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#############################################################################
###############
### Imports ###
import sys
import psycopg2 as PSQL
import textwrap as txtwrp
#####################
### Configuration ###
config = {
'VERSION_MAJOR' : '0',
'VERSION_MINOR' : '1',
'dbname' : 'bertrand',
'user' : 'bertrand'
}
#############
### USAGE ###
def usage():
print("Tool to insert/remove entries in the dicotionnnary.")
print("Version: " + config['VERSION_MAJOR'] + "." + config['VERSION_MINOR'])
print("Usage: " + sys.argv[0] + " <command> <options>")
print("")
print("Commands:")
print(" add Add definition to dictionnary.")
print(" del Remove definition from dictionnary.")
print(" help Print general help or command specific help.")
print(" search Search definition in dictionnary.")
print("")
###########
### ADD ###
def add():
argc = len(sys.argv)
if argc < 3:
__help_cmd(sys.argv[1])
return
req = {
'fields' : '',
'name' : '',
'def' : '',
'url' : ''
}
i=2
while i < argc:
if sys.argv[i] == "-d":
i += 1
req['def'] = sys.argv[i]
elif sys.argv[i] == "-f":
i += 1
req['fields'] = sys.argv[i]
elif sys.argv[i] == '-n':
i += 1
req['name'] = sys.argv[i]
elif sys.argv[i] == "-u":
i += 1
req['url'] = sys.argv[i]
else:
print("Unknown option '" + sys.argv[i] + "'")
__help_cmd(sys.argv[1])
return
i += 1
if req['fields'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['name'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['def'] == '':
print("Please specify definition with option '-d'.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("INSERT INTO dico (fields,name,def,url) VALUES (%s, %s, %s, %s)",
("{" + req['fields'] + "}", req['name'], req['def'], req['url']))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
###########
### DEL ###
def delete():
try:
defid = sys.argv[2]
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("DELETE FROM dico WHERE id=%s", (defid,))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
#####################
### HELP COMMANDS ###
def help_cmd():
try:
cmd = sys.argv[2]
except:
cmd = ''
__help_cmd(cmd)
def __help_cmd(cmd):
if cmd == '' :
usage()
elif cmd == "add" :
print("Command '" + cmd + "': Add definition to dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -d <str> Definition.")
print(" -f <str,str,..> List of fields.")
print(" -n <str> Name of the entry")
print(" -u <url> One url to a more complete definition.")
print("")
elif cmd == "del" :
print("Command '" + cmd + "': Delete definition from dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <id>")
print("")
print("Param:")
print(" id ID of the definition to delete.")
print("")
elif cmd == "help" :
print("Command '" + cmd + "': Print help.")
print("Usage: " + sys.argv[0] + " " + cmd + " [command]")
print("")
print("Giving NO 'command' this will print the general help.")
print("Giving 'command' this will print the command specific help. ")
print("")
elif cmd == "search" :
print("Command '" + cmd + "': Search definition in dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -a Print all definitions in the table.")
print(" -f <str,str,...> Print definitions matching the set of given fields.")
print(" -i <id> Print definition matching the given ID.")
print(" -n <str> Print definition mathing the given entry name.")
print("")
else:
print("Unknown command: '" + cmd + "'")
usage()
##############
### SEARCH ###
def search():
try:
opt = sys.argv[2]
except IndexError:
__help_cmd(sys.argv[1])
return
else:
if not opt in ('-a', '-f', '-i', '-n'):
print("Unknown option '" + sys.argv[2] + "'")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
try:
if opt == "-a":
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico")
elif opt == "-f":
optarg = sys.argv[3]
req = __search_build_req_fields(optarg.split(','))
elif opt == '-i':
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE id=%s", (optarg,))
elif opt == "-n":
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE name=%s", (optarg,))
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
else:
print(req)
cur.execute(req)
print_rows(cur.fetchall())
conn.commit()
finally:
cur.close()
conn.close()
def __search_build_req_fields(fields):
# How do you like your SQL injection?
# I like mine crispy and with a python '+' ;)
# http://initd.org/psycopg/docs/usage.html
# http://xkcd.com/327/
# That will do for now ...
req = "SELECT id,fields,name,def,url FROM dico WHERE "
req += "'" + fields[0] + "'=ANY(fields)"
for f in fields[1:]:
req += " OR '" + f + "'=ANY(fields)"
return req
###################################
### PRINT PSQL REQUESTS RESULTS ###
def print_rows(rows):
for row in rows:
print("---------------------")
print("ID : ", row[0])
__print_row_wrapped("FIELDS : ", row[1])
__print_row_wrapped("NAME : ", row[2])
__print_row_wrapped("DEF : ", row[3])
__print_row_wrapped("URL : ", row[4])
print("")
def __print_row_wrapped(label, value):
labellen = len(label)
wrapped = txtwrp.wrap(value)
print(label, wrapped[0])
for i in range(1, len(wrapped)):
print(' ' * labellen, wrapped[i])
############
### MAIN ###
commands = {
'add' : add,
'del' : delete,
'help' : help_cmd,
'search' : search
}
try:
cmd = sys.argv[1]
except KeyError:
print("Unknown command: " + cmd)
usage()
sys.exit()
except IndexError:
usage()
sys.exit()
else:
commands[cmd]()
|
moio/spacewalk
|
backend/server/rhnAuthPAM.py
|
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import PAM
import sys
from spacewalk.common.rhnLog import log_error
from spacewalk.common.rhnException import rhnException
__username = None
__password = None
def __pam_conv(auth, query_list):
global __username, __password
# Build a list of responses to be passed back to PAM
resp = []
for query, type in query_list:
if type == PAM.PAM_PROMPT_ECHO_ON:
# Prompt for a username
resp.append((__username, 0))
elif type == PAM.PAM_PROMPT_ECHO_OFF:
# Prompt for a password
resp.append((__password, 0))
else:
# Unknown PAM type
log_error("Got unknown PAM type %s (query=%s)" % (type, query))
return None
return resp
def check_password(username, password, service):
global __username, __password
auth = PAM.pam()
auth.start(service, username, __pam_conv)
# Save the username and passwords in the globals, the conversation
# function needs access to them
__username = username
__password = password
try:
try:
auth.authenticate()
auth.acct_mgmt()
finally:
# Something to be always executed - cleanup
__username = __password = None
except PAM.error, e:
resp, code = e.args[:2]
log_error("Password check failed (%s): %s" % (code, resp))
return 0
except:
raise rhnException('Internal PAM error'), None, sys.exc_info()[2]
else:
# Good password
return 1
|
vicente-gonzalez-ruiz/QSVC
|
trunk/src/old_py/texture_expand_lfb_j2k.py
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
import sys
import math
from subprocess import check_call
from subprocess import CalledProcessError
from MCTF_parser import MCTF_parser
file = ""
rate = 0.0
pictures = 33
pixels_in_x = 352
pixels_in_y = 288
subband = 4 # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
parser = MCTF_parser(description="Expands the the LFB texture data using JPEG 2000.")
parser.add_argument("--file", help="file that contains the LFB data. Default = {})".format(file))
parser.add_argument("--rate", help="read only the initial portion of the code-stream, corresponding to an overall bit-rate of \"rate\" bits/sample. Default = {})".format(rate))
parser.pictures(pictures)
parser.pixels_in_x(pixels_in_x)
parser.pixels_in_y(pixels_in_y)
args = parser.parse_known_args()[0]
if args.file:
file = args.file
if args.rate:
rate = float(args.rate)
if args.pictures:
pictures = int(args.pictures)
if args.pixels_in_x:
pixels_in_x = int(args.pixels_in_x)
if args.pixels_in_y:
pixels_in_y = int(args.pixels_in_y)
# Decode YUV
image_number = 0
while image_number < pictures:
#####
# Y #
try: # Jse. Sino existe se crea a movimiento lineal. # no se comprueba si existe el H_maximo donde apoyar dicho movimiento.
f = open(file + "_Y_" + str('%04d' % image_number) + ".j2c", "rb")
f.close()
try: # expand
if rate <= 0.0 :
check_call("trace kdu_expand"
+ " -i " + file + "_Y_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_Y_" + str('%04d' % image_number) + ".raw"
, shell=True)
else :
check_call("trace kdu_expand"
+ " -i " + file + "_Y_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_Y_" + str('%04d' % image_number) + ".raw"
+ " -rate " + rate
, shell=True)
except CalledProcessError:
sys.exit(-1)
except:
f = open(file + "_Y_" + str('%04d' % image_number) + ".raw", "wb")
for a in xrange(pixels_in_x * pixels_in_y):
f.write('%c' % 128)
f.close()
try:
check_call("trace cat " + file + "_Y_" + str('%04d' % image_number) + ".raw >> " + file, shell=True)
except CalledProcessError:
sys.exit(-1)
#####
# U #
try:
f = open(file + "_U_" + str('%04d' % image_number) + ".j2c", "rb")
f.close()
try: # expand
if rate <= 0.0 :
check_call("trace kdu_expand"
+ " -i " + file + "_U_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_U_" + str('%04d' % image_number) + ".raw"
, shell=True)
else :
check_call("trace kdu_expand"
+ " -i " + file + "_U_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_U_" + str('%04d' % image_number) + ".raw"
+ " -rate " + rate
, shell=True)
except CalledProcessError:
sys.exit(-1)
except:
f = open(file + "_U_" + str('%04d' % image_number) + ".raw", "wb")
for a in xrange((pixels_in_x * pixels_in_y)/4):
f.write('%c' % 128)
f.close()
try:
check_call("trace cat " + file + "_U_" + str('%04d' % image_number) + ".raw >> " + file, shell=True)
except CalledProcessError:
sys.exit(-1)
#####
# V #
try:
f = open(file + "_V_" + str('%04d' % image_number) + ".j2c", "rb")
f.close()
try: # expand
if rate <= 0.0 :
check_call("trace kdu_expand"
+ " -i " + file + "_V_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_V_" + str('%04d' % image_number) + ".raw"
, shell=True)
else :
check_call("trace kdu_expand"
+ " -i " + file + "_V_" + str('%04d' % image_number) + ".j2c"
+ " -o " + file + "_V_" + str('%04d' % image_number) + ".raw"
+ " -rate " + rate
, shell=True)
except CalledProcessError:
sys.exit(-1)
except:
f = open(file + "_V_" + str('%04d' % image_number) + ".raw", "wb")
for a in xrange((pixels_in_x * pixels_in_y)/4):
f.write('%c' % 128)
f.close()
try:
check_call("trace cat " + file + "_V_" + str('%04d' % image_number) + ".raw >> " + file, shell=True)
except CalledProcessError:
sys.exit(-1)
image_number += 1
|
timothycrosley/instantly
|
instantly/main.py
|
""" instantly/main.py
Defines the basic terminal interface for interacting with Instantly.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from pies.overrides import *
from . import __version__
from .instantly import Instantly
def main():
instantly = Instantly()
if not len(sys.argv) > 1:
print("Instantly allows you to expand simple templates, that take in a set number of arguments")
print("Usage: instantly [template name] to expand a template")
print(" type instantly help for full instructions.")
print("")
print("Installed Templates:")
print("\t" + str(instantly.installed_templates))
sys.exit(1)
command = sys.argv[1]
template_name = sys.argv[2:3] and sys.argv[2] or ""
extra_inputs = sys.argv[2:]
if command == "help":
print("Instantly Commands")
print("")
print("instantly [template name]")
print("\t Expand the named template")
print("instantly help")
print("\t Get full list of commands / help text")
print("instantly find [template name]")
print("\t Find pre-made templates to automate a task online")
print("instantly download [template name]")
print("\t Add a template shared online to your local template repository")
print("instantly install [template directory]")
print("\t Installs an instant_template directory from the local file system "
"or online repository into your personal collection of templates")
print("instantly uninstall [template name]")
print("\t Permanently removes an installed template locally")
print("instantly create_instant_template")
print("\t Create a new instant template to automate a task")
print("instantly share [template name]")
print("\t Share a template you have created with others online")
print("\t Must register your google account with http://instantly.pl/ to do this")
print("instantly unshare [template name]")
print("\t Removes a template that you previously shared from the instantly online repository.")
print("instantly location [template name]")
print("\t Will tell you where the specified template is located on disk.")
print("instantly create_settings [template directory]")
print("\t Will create an alternate settings / template directory within the current directory.")
print("instantly version")
print("\t Will tell you the version of instantly you have installed.")
sys.exit(0)
elif command == "uninstall":
if input("Are you sure you want to delete %s (y/n)? " % template_name).lower() in ("y", "yes"):
if instantly.uninstall(template_name):
print("Successfully removed %s from local templates" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "version":
print("instantly v. {0}".format(__version__))
sys.exit(0)
elif command == "location":
template = instantly.installed_template(template_name)
if not template:
print("Sorry template does not exist!")
sys.exit(1)
return template.location
sys.exit(0)
elif command == "share":
if instantly.share(template_name):
print("Successfully shared %s, thanks for helping to expand the number of instant templates!" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "unshare":
if instantly.unshare(template_name):
print("Successfully un-shared %s!" % template_name)
sys.exit(0)
else:
sys.exit(1)
elif command == "create_settings":
if instantly.create_settings():
print("Successfully created a new settings / templates directory!")
sys.exit(0)
else:
sys.exit(1)
elif command == "find":
results = instantly.find(template_name)
if not results:
print("Sorry: no templates have been shared that match the search term '%s'," % template_name)
print(" but you could always add one ;)")
sys.exit(0)
print("Instantly found the following templates:")
for result in results:
print(result)
print(" To install one of these templates run: instantly install [template_name]")
sys.exit(0)
elif command == "install":
if instantly.install(template_name):
print("%(name)s has been installed as a local template. Run 'instantly %(name)s' to expand it." % \
{"name":template_name})
sys.exit(0)
else:
print("Sorry: no one has thought of a way to instantly '%s'," % template_name)
print(" but you could always create one ;)")
sys.exit(0)
else:
template_name = command
template = instantly.get_template(template_name)
if not template:
print("Sorry: no one has thought of a way to instantly '%s'," % template_name)
print(" but you could always create one ;)")
sys.exit(1)
print("Expanding the following template:")
print(template)
arguments = {}
for argument, argument_definition in itemsview(template.arguments):
print("")
if extra_inputs:
arguments[argument] = extra_inputs.pop(0)
else:
argument_type = argument_definition.get('type', 'string')
default = instantly.settings['defaults'].get(argument, '') or argument_definition.get('default', '')
help_text = argument_definition.get('help_text')
if help_text:
print("Help Text: {0}".format(help_text))
prompt = argument_definition.get('prompt', '')
if default:
prompt += " [Default: {0}]".format(default)
if argument_type == "bool":
prompt += " (y/n)"
prompt += ": "
value = ""
while value == "":
value = input(prompt)
if argument_type == "bool":
if value.lower() in ("y", "yes"):
value = True
elif value.lower() in ("n", "no"):
value = False
else:
value = default or ""
elif argument_type == "int":
if value.isdigit():
value = int(value)
elif not value:
value = default
else:
value = ""
elif not value:
value = default
arguments[argument] = value
success_message = instantly.expand(template_name, arguments)
if success_message != False:
print("Successfully ran '{0}'!".format(template_name))
if success_message:
print(success_message)
if __name__ == "__main__":
main()
|
Forage/Gramps
|
gramps/plugins/lib/libsubstkeyword.py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Craig J. Anderson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Provide the SubstKeywords class that will replace keywords in a passed
string with information about the person/marriage/spouse. For sample:
foo = SubstKeywords(database, person_handle)
print foo.replace_and_clean(['$n was born on $b.'])
Will return a value such as:
Mary Smith was born on 3/28/1923.
"""
from __future__ import print_function
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.display.name import displayer as name_displayer
from gramps.gen.datehandler import displayer
from gramps.gen.lib import EventType
from gramps.gen.utils.db import get_birth_or_fallback, get_death_or_fallback
from gramps.gen.constfunc import STRTYPE, cuni
#------------------------------------------------------------------------
#
# Local constants
#
#------------------------------------------------------------------------
class TextTypes():
"""Four enumerations that are used to for the four main parts of a string.
and used for states. Separator is not used in states.
text -> remove or display
remove -> display
"""
separator, text, remove, display = list(range(4))
TXT = TextTypes()
#------------------------------------------------------------------------
#
# Formatting classes
#
#------------------------------------------------------------------------
class GenericFormat(object):
"""A Generic parsing class. Will be subclassed by specific format strings
"""
def __init__(self, string_in):
self.string_in = string_in
def _default_format(self, item):
""" The default format if there is no format string """
pass
def is_blank(self, item):
""" if the information is not known (item is None), remove the format
string information from the input string if any.
"""
if item is None:
self.string_in.remove_start_end("(", ")")
return True
return False
def generic_format(self, item, code, uppr, function):
"""the main parsing engine.
Needed are the following: the input string
code - List of one character (string) codes (all lowercase)
uppr - list of one character (string) codes that can be uppercased
each needs to have a lowercase equivalent in code
function - list of functions.
there is a one to one relationship with character codes and functions.
"""
if self.string_in.this != "(":
return self._default_format(item)
self.string_in.step()
main = VarString()
separator = SeparatorParse(self.string_in)
#code given in args
#function given in args
while self.string_in.this and self.string_in.this != ")":
#Check to see if _in.this is in code
to_upper = False
if uppr.find(self.string_in.this) != -1:
#and the result should be uppercased.
to_upper = True
where = code.find(self.string_in.this.lower())
else:
where = code.find(self.string_in.this)
if where != -1:
self.string_in.step()
tmp = function[where]()
if to_upper:
tmp = tmp.upper()
if tmp == "" or tmp is None:
main.add_remove()
elif isinstance(tmp, VarString): #events cause this
main.extend(tmp)
else:
main.add_variable(tmp)
elif separator.is_a():
main.add_separator(separator.parse_format())
else:
main.add_text(self.string_in.parse_format())
if self.string_in.this == ")":
self.string_in.step()
return main
#------------------------------------------------------------------------
# Name Format strings
#------------------------------------------------------------------------
class NameFormat(GenericFormat):
""" The name format class.
If no format string, the name is displayed as per preference options
otherwise, parse through a format string and put the name parts in
"""
def get_name(self, person):
""" A helper method for retrieving the person's name """
if person:
return person.get_primary_name()
return None
def _default_format(self, name):
""" display the name as set in preferences """
return name_displayer.sorted_name(name)
def parse_format(self, name):
""" Parse the name """
if self.is_blank(name):
return
def common():
""" return the common name of the person """
return (name.get_call_name() or
name.get_first_name().split(' ')[0])
code = "tfcnxslg"
upper = code.upper()
function = [name.get_title, #t
name.get_first_name, #f
name.get_call_name, #c
name.get_nick_name, #n
common, #x
name.get_suffix, #s
name.get_surname, #l
name.get_family_nick_name #g
]
return self.generic_format(name, code, upper, function)
#------------------------------------------------------------------------
# Date Format strings
#------------------------------------------------------------------------
class DateFormat(GenericFormat):
""" The date format class.
If no format string, the date is displayed as per preference options
otherwise, parse through a format string and put the date parts in
"""
def get_date(self, event):
""" A helper method for retrieving a date from an event """
if event:
return event.get_date_object()
return None
def _default_format(self, date):
return displayer.display(date)
def __count_chars(self, char, max_amount):
""" count the year/month/day codes """
count = 1 #already have seen/passed one
while count < max_amount and self.string_in.this == char:
self.string_in.step()
count = count +1
return count
def parse_format(self, date):
""" Parse the name """
if self.is_blank(date):
return
def year():
""" The year part only """
year = cuni(date.get_year())
count = self.__count_chars("y", 4)
if year == "0":
return
if count == 1: #found 'y'
if len(year) == 1:
return year
elif year[-2] == "0":
return year[-1]
else:
return year[-2:]
elif count == 2: #found 'yy'
tmp = "0" + year
return tmp[-2:]
elif count == 3: #found 'yyy'
if len(year) > 2:
return year
else:
tmp = "00" + year
return tmp[-3:]
else: #count == 4 #found 'yyyy'
tmp = "000" + year
return tmp[-4:]
def month(char_found = "m"):
""" The month part only """
month = cuni(date.get_month())
count = self.__count_chars(char_found, 4)
if month == "0":
return
if count == 1:
return month
elif count == 2: #found 'mm'
tmp = "0" + month
return tmp[-2:]
elif count == 3: #found 'mmm'
return displayer.short_months[int(month)]
else: #found 'mmmm'
return displayer.long_months[int(month)]
def month_up():
return month("M").upper()
def day():
""" The day part only """
day = cuni(date.get_day())
count = self.__count_chars("d", 2)
if day == "0": #0 means not defined!
return
if count == 1: #found 'd'
return day
else: #found 'dd'
tmp = "0" + day
return tmp[-2:]
def modifier():
#ui_mods taken from date.py def lookup_modifier(self, modifier):
ui_mods = ["", _("before"), _("after"), _("about"),
"", "", ""]
return ui_mods[date.get_modifier()].capitalize()
code = "ymdMo"
upper = "O"
function = [year, month, day, month_up, modifier]
return self.generic_format(date, code, upper, function)
#------------------------------------------------------------------------
# Place Format strings
#------------------------------------------------------------------------
class PlaceFormat(GenericFormat):
""" The place format class.
If no format string, the place is displayed as per preference options
otherwise, parse through a format string and put the place parts in
"""
def get_place(self, database, event):
""" A helper method for retrieving a place from an event """
if event:
bplace_handle = event.get_place_handle()
if bplace_handle:
return database.get_place_from_handle(bplace_handle)
return None
def _default_format(self, place):
return place.get_title()
def parse_format(self, place):
""" Parse the place """
if self.is_blank(place):
return
code = "elcuspnitxy"
upper = code.upper()
function = [place.get_main_location().get_street,
place.get_main_location().get_locality,
place.get_main_location().get_city,
place.get_main_location().get_county,
place.get_main_location().get_state,
place.get_main_location().get_postal_code,
place.get_main_location().get_country,
place.get_main_location().get_parish,
place.get_title,
place.get_longitude,
place.get_latitude
]
return self.generic_format(place, code, upper, function)
#------------------------------------------------------------------------
# Event Format strings
#------------------------------------------------------------------------
class EventFormat(GenericFormat):
""" The event format class.
If no format string, the event description is displayed
otherwise, parse through the format string and put in the parts
dates and places can have their own format strings
"""
def __init__(self, database, _in):
self.database = database
GenericFormat.__init__(self, _in)
def _default_format(self, event):
if event is None:
return
else:
return event.get_description()
def __empty_format(self):
""" clear out a sub format string """
self.string_in.remove_start_end("(", ")")
return
def __empty_attrib(self):
""" clear out an attribute name """
self.string_in.remove_start_end("[", "]")
return
def parse_format(self, event):
""" Parse the event format string.
let the date or place classes handle any sub-format strings """
if self.is_blank(event):
return
def format_date():
""" start formatting a date in this event """
date_format = DateFormat(self.string_in)
return date_format.parse_format(date_format.get_date(event))
def format_place():
""" start formatting a place in this event """
place_format = PlaceFormat(self.string_in)
place = place_format.get_place(self.database, event)
return place_format.parse_format(place)
def format_attrib():
""" Get the name and then get the attributes value """
#Event's Atribute
attrib_parse = AttributeParse(self.string_in)
#self.string_in.step()
name = attrib_parse.get_name()
if name:
return attrib_parse.get_attribute(event.get_attribute_list(),
name)
else:
return
code = "ndDia"
upper = ""
function = [event.get_description,
format_date,
format_place,
event.get_gramps_id,
format_attrib
]
return self.generic_format(event, code, upper, function)
def parse_empty(self):
""" remove the format string """
code = "dDa"
function = [self.__empty_format, self.__empty_format,
self.__empty_attrib]
return self.generic_format(None, code, "", function)
#------------------------------------------------------------------------
# Gallery Format strings
#------------------------------------------------------------------------
class GalleryFormat(GenericFormat):
""" The gallery format class.
If no format string, the photo description is displayed
otherwise, parse through the format string and put in the parts
dates (no places) can have their own format strings
"""
def __init__(self, database, _in):
self.database = database
GenericFormat.__init__(self, _in)
def _default_format(self, photo):
if photo is None:
return
else:
return photo.get_description()
def __empty_format(self):
""" clear out a sub format string """
self.string_in.remove_start_end("(", ")")
return
def __empty_attrib(self):
""" clear out an attribute name """
self.string_in.remove_start_end("[", "]")
return
def parse_format(self, photo):
""" Parse the photo format string.
let the date or place classes handle any sub-format strings """
if self.is_blank(photo):
return
def format_date():
""" start formatting a date in this photo """
date_format = DateFormat(self.string_in)
return date_format.parse_format(date_format.get_date(photo))
def format_attrib():
""" Get the name and then get the attributes value """
#photo's Atribute
attrib_parse = AttributeParse(self.string_in)
name = attrib_parse.get_name()
if name:
return attrib_parse.get_attribute(photo.get_attribute_list(),
name)
else:
return
code = "ndia"
upper = ""
function = [photo.get_description,
format_date,
photo.get_gramps_id,
format_attrib
]
return self.generic_format(photo, code, upper, function)
def parse_empty(self):
""" remove the format string """
code = "da"
function = [self.__empty_format, self.__empty_attrib]
return self.generic_format(None, code, "", function)
#------------------------------------------------------------------------
#
# ConsumableString - The Input string class
#
#------------------------------------------------------------------------
class ConsumableString(object):
"""
A simple string implementation with extras to help with parsing.
This will contain the string to be parsed. or string in.
There will only be one of these for each processed line.
"""
def __init__(self, string):
self.__this_string = string
self.__setup()
def __setup(self):
""" update class attributes this and next """
if len(self.__this_string) > 0:
self.this = self.__this_string[0]
else:
self.this = None
if len(self.__this_string) > 1:
self.next = self.__this_string[1]
else:
self.next = None
def step(self):
""" remove the first char from the string """
self.__this_string = self.__this_string[1:]
self.__setup()
return self.this
def step2(self):
""" remove the first two chars from the string """
self.__this_string = self.__this_string[2:]
self.__setup()
return self.this
def remove_start_end(self, start, end):
""" Removes a start, end block from the string if there """
if self.this == start:
self.text_to_next(end)
def __get_a_char_of_text(self):
""" Removes one char of TEXT from the string and returns it. """
if self.this == "\\":
if self.next == None:
rtrn = "\\"
else:
rtrn = self.next
self.step2()
else:
rtrn = self.this
self.step()
return rtrn
def text_to_next(self, char):
""" return/remove a format strings from here """
new_str = ""
while self.this is not None and self.this != char:
new_str += self.__get_a_char_of_text()
if self.this == char:
self.step()
return new_str
def is_a(self):
return True
def parse_format(self):
rtrn = self.__get_a_char_of_text()
if rtrn:
return rtrn
return ''
#------------------------------------------------------------------------
#
# VarString class - The Output string class
#
#------------------------------------------------------------------------
class VarString(object):
"""
The current state of the entire string (integer from TextTypes)
A list to hold tuple object (integer from TextTypes, string)
This will contain the string that will be displayed. or string out.
it is used for groups and format strings.
"""
def __init__(self, start_state = TXT.remove):
self.state = start_state #overall state of the string.
self._text = [] #list of tuples (TXT.?, string)
def __update_state(self, new_status):
if new_status > self.state:
self.state = new_status
def add_text(self, text):
self._text.append((TXT.text, text))
def add_variable(self, text):
self.state = TXT.display
self._text.append((TXT.text, text))
def add_remove(self):
self.__update_state(TXT.remove)
self._text.append((TXT.remove, ""))
def add_separator(self, text):
self._text.append((TXT.separator, text))
def get_final(self):
#if self.state == TXT.remove:
# return (TXT.remove, "")
curr_string = ""
index = 0
while index < len(self._text):
if self._text[index][0] == TXT.text:
curr_string += self._text[index][1]
index = index + 1
continue #while self._text:
if index +1 == len(self._text):
if self._text[index][0] == TXT.separator and curr_string != '':
curr_string += self._text[index][1]
index = index + 1
break #while self._text:
type_0_1 = (self._text[index][0], self._text[index+1][0])
#if type_0_1 == (TXT.remove, TXT.remove):
# pass
if type_0_1 == (TXT.remove, TXT.separator):
index = index + 1
#elif type_0_1 == (TXT.remove, TXT.text):
# pass
elif type_0_1 == (TXT.separator, TXT.remove):
index = index + 1
#elif type_0_1 == (TXT.separator, TXT.separator):
# pass
elif type_0_1 == (TXT.separator, TXT.text):
curr_string += self._text[index][1]
#else:
# print "#oops Should never get here."
index = index + 1
#return what we have
return (self.state, curr_string)
print("===" + str(self.state) + " '" + str(curr_string) + "'")
def extend(self, acquisition):
"""
acquisition is a VarString object
Merge the content of acquisition into this place.
"""
self.__update_state(acquisition.state)
if acquisition.state != TXT.display:
#The sub {} was TXT.remove. We don't want to simply ignore it.
self.add_remove() #add a remove que here to note it.
return
self._text.extend(acquisition._text)
#------------------------------------------------------------------------
#
# Parsers
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# SeparatorParse
#------------------------------------------------------------------------
class SeparatorParse(object):
""" parse out a separator """
def __init__(self, consumer_in):
self._in = consumer_in
def is_a(self):
return self._in.this == "<"
def parse_format(self):
if not self.is_a():
return
""" get the text and return it """
self._in.step()
return self._in.text_to_next(">")
#------------------------------------------------------------------------
# AttributeParse
#------------------------------------------------------------------------
class AttributeParse(object):
""" Parse attributes """
def __init__(self, consumer_in):
self._in = consumer_in
def get_name(self):
""" Gets a name inside a [] block """
if self._in.this != "[":
return
self._in.step()
return self._in.text_to_next("]")
def get_attribute(self, attrib_list, attrib_name):
""" Get an attribute by name """
if attrib_name == "":
return
for attr in attrib_list:
if str(attr.get_type()) == attrib_name:
return str(attr.get_value())
return
def is_a(self):
""" check """
return self._in.this == "a"
def parse_format(self, attrib_list):
""" Get the attribute and add it to the string out """
name = self.get_name()
return self.get_attribute(attrib_list, name)
#------------------------------------------------------------------------
# VariableParse
#------------------------------------------------------------------------
class VariableParse(object):
""" Parse the individual variables """
def __init__(self, friend, database, consumer_in):
self.friend = friend
self.database = database
self._in = consumer_in
def is_a(self):
""" check """
return self._in.this == "$" and self._in.next is not None and \
"nsijbBdDmMvVauetTpP".find(self._in.next) != -1
def get_event_by_type(self, marriage, e_type):
""" get an event from a type """
if marriage is None:
return None
for e_ref in marriage.get_event_ref_list():
if not e_ref:
continue
event = self.friend.database.get_event_from_handle(e_ref.ref)
if event.get_type() == e_type:
return event
return None
def get_event_by_name(self, person, event_name):
""" get an event from a name. """
if not person:
return None
for e_ref in person.get_event_ref_list():
if not e_ref:
continue
event = self.friend.database.get_event_from_handle(e_ref.ref)
if event.get_type().is_type(event_name):
return event
return None
def empty_item(self, item):
""" return false if there is a valid item(date or place).
Otherwise
add a TXT.remove marker in the output string
remove any format strings from the input string
"""
if item is not None:
return False
self._in.remove_start_end("(", ")")
return True
def empty_attribute(self, person):
""" return false if there is a valid person.
Otherwise
add a TXT.remove marker in the output string
remove any attribute name from the input string
"""
if person:
return False
self._in.remove_start_end("[", "]")
return True
def __parse_date(self, event):
""" sub to process a date
Given an event, get the date object, process the format,
return the result """
date_f = DateFormat(self._in)
date = date_f.get_date(event)
if self.empty_item(date):
return
return date_f.parse_format(date)
def __parse_place(self, event):
""" sub to process a date
Given an event, get the place object, process the format,
return the result """
place_f = PlaceFormat(self._in)
place = place_f.get_place(self.database, event)
if self.empty_item(place):
return
return place_f.parse_format(place)
def __parse_name(self, person):
name_format = NameFormat(self._in)
name = name_format.get_name(person)
return name_format.parse_format(name)
def __parse_id(self, first_class_object):
if first_class_object is not None:
return first_class_object.get_gramps_id()
else:
return
def __parse_event(self, person, attrib_parse):
event = self.get_event_by_name(person, attrib_parse.get_name())
event_f = EventFormat(self.database, self._in)
if event:
return event_f.parse_format(event)
else:
event_f.parse_empty()
return
def __get_photo(self, person_or_marriage):
""" returns the first photo in the media list or None """
media_list = person_or_marriage.get_media_list()
for media_ref in media_list:
media_handle = media_ref.get_reference_handle()
media = self.database.get_object_from_handle(media_handle)
mime_type = media.get_mime_type()
if mime_type and mime_type.startswith("image"):
return media
return None
def __parse_photo(self, person_or_marriage):
photo_f = GalleryFormat(self.database, self._in)
if person_or_marriage is None:
return photo_f.parse_empty()
photo = self.__get_photo(person_or_marriage)
if photo:
return photo_f.parse_format(photo)
else:
return photo_f.parse_empty()
def parse_format(self):
"""Parse the $ variables. """
if not self.is_a():
return
attrib_parse = AttributeParse(self._in)
next_char = self._in.next
self._in.step2()
if next_char == "n":
#Person's name
return self.__parse_name(self.friend.person)
elif next_char == "s":
#Souses name
return self.__parse_name(self.friend.spouse)
elif next_char == "i":
#Person's Id
return self.__parse_id(self.friend.person)
elif next_char == "j":
#Marriage Id
return self.__parse_id(self.friend.family)
elif next_char == "b":
#Person's Birth date
if self.empty_item(self.friend.person):
return
return self.__parse_date(
get_birth_or_fallback(self.friend.database, self.friend.person))
elif next_char == "d":
#Person's Death date
if self.empty_item(self.friend.person):
return
return self.__parse_date(
get_death_or_fallback(self.friend.database, self.friend.person))
elif next_char == "m":
#Marriage date
if self.empty_item(self.friend.family):
return
return self.__parse_date(
self.get_event_by_type(self.friend.family,
EventType.MARRIAGE))
elif next_char == "v":
#Divorce date
if self.empty_item(self.friend.family):
return
return self.__parse_date(
self.get_event_by_type(self.friend.family,
EventType.DIVORCE))
elif next_char == "T":
#Todays date
date_f = DateFormat(self._in)
from gramps.gen.lib.date import Today
date = Today()
if self.empty_item(date):
return
return date_f.parse_format(date)
elif next_char == "B":
#Person's birth place
if self.empty_item(self.friend.person):
return
return self.__parse_place(
get_birth_or_fallback(self.friend.database, self.friend.person))
elif next_char == "D":
#Person's death place
if self.empty_item(self.friend.person):
return
return self.__parse_place(
get_death_or_fallback(self.friend.database, self.friend.person))
elif next_char == "M":
#Marriage place
if self.empty_item(self.friend.family):
return
return self.__parse_place(
self.get_event_by_type(self.friend.family,
EventType.MARRIAGE))
elif next_char == "V":
#Divorce place
if self.empty_item(self.friend.family):
return
return self.__parse_place(
self.get_event_by_type(self.friend.family,
EventType.DIVORCE))
elif next_char == "a":
#Person's Atribute
if self.empty_attribute(self.friend.person):
return
return attrib_parse.parse_format(
self.friend.person.get_attribute_list())
elif next_char == "u":
#Marriage Atribute
if self.empty_attribute(self.friend.family):
return
return attrib_parse.parse_format(
self.friend.family.get_attribute_list())
elif next_char == "e":
#person event
return self.__parse_event(self.friend.person, attrib_parse)
elif next_char == "t":
#person event
return self.__parse_event(self.friend.family, attrib_parse)
elif next_char == 'p':
#photo for the person
return self.__parse_photo(self.friend.person)
elif next_char == 'P':
#photo for the marriage
return self.__parse_photo(self.friend.family)
#------------------------------------------------------------------------
#
# SubstKeywords
#
#------------------------------------------------------------------------
class SubstKeywords(object):
"""Accepts a person/family with format lines and returns a new set of lines
using variable substitution to make it.
The individual variables are defined with the classes that look for them.
Needed:
Database object
person_handle
This will be the center person for the display
family_handle
this will specify the specific family/spouse to work with.
If none given, then the first/preferred family/spouse is used
"""
def __init__(self, database, person_handle, family_handle=None):
"""get the person and find the family/spouse to use for this display"""
self.database = database
self.person = database.get_person_from_handle(person_handle)
self.family = None
self.spouse = None
self.line = None #Consumable_string - set below
if self.person is None:
return
fam_hand_list = self.person.get_family_handle_list()
if fam_hand_list:
if family_handle in fam_hand_list:
self.family = database.get_family_from_handle(family_handle)
else:
#Error. fam_hand_list[0] below may give wrong marriage info.
#only here because of OLD specifications. Specs read:
# * $S/%S
# Displays the name of the person's preferred ...
# 'preferred' means FIRST.
#The first might not be the correct marriage to display.
#else: clause SHOULD be removed.
self.family = database.get_family_from_handle(fam_hand_list[0])
father_handle = self.family.get_father_handle()
mother_handle = self.family.get_mother_handle()
self.spouse = None
if father_handle == person_handle:
if mother_handle:
self.spouse = database.get_person_from_handle(mother_handle)
else:
if father_handle:
self.spouse = database.get_person_from_handle(father_handle)
def __parse_line(self):
"""parse each line of text and return the new displayable line
There are four things we can find here
A {} group which will make/end as needed.
A <> separator
A $ variable - Handled separately
or text
"""
stack_var = []
curr_var = VarString(TXT.text)
#First we are going take care of all variables/groups
#break down all {} (groups) and $ (vars) into either
#(TXT.text, resulting_string) or (TXT.remove, '')
variable = VariableParse(self, self.database, self.line) # $
while self.line.this:
if self.line.this == "{":
#Start of a group
#push what we have onto the stack
stack_var.append(curr_var)
#Setup
curr_var = VarString()
#step
self.line.step()
elif self.line.this == "}" and len(stack_var) > 0: #End of a group
#add curr to what is on the (top) stack and pop into current
#or pop the stack into current and add TXT.remove
direction = curr_var.state
if direction == TXT.display:
#add curr onto the top slot of the stack
stack_var[-1].extend(curr_var)
#pop what we have on the stack
curr_var = stack_var.pop()
if direction == TXT.remove:
#add remove que
curr_var.add_remove()
#step
self.line.step()
elif variable.is_a(): # $ (variables)
rtrn = variable.parse_format()
if rtrn is None:
curr_var.add_remove()
elif isinstance(rtrn, VarString):
curr_var.extend(rtrn)
else:
curr_var.add_variable(rtrn)
elif self.line.this == "<": # separator
self.line.step()
curr_var.add_separator(self.line.text_to_next(">"))
else: #regular text
curr_var.add_text(self.line.parse_format())
#the stack is for groups/subgroup and may contain items
#if the user does not close his/her {}
#squash down the stack
while stack_var:
direction = curr_var.state
if direction == TXT.display:
#add curr onto the top slot of the stack
stack_var[-1].extend(curr_var)
#pop what we have on the stack
curr_var = stack_var.pop()
if direction == TXT.remove:
#add remove que
curr_var.add_remove()
#step
self.line.step()
#return what we have
return curr_var.get_final()
def __main_level(self):
#Check only if the user wants to not display the line if TXT.remove
remove_line_tag = False
if self.line.this == "-":
remove_line_tag = True
self.line.step()
state, line = self.__parse_line()
if state is TXT.remove and remove_line_tag:
return None
return line
def replace_and_clean(self, lines):
"""
return a new array of lines with all of the substitutions done
"""
new = []
for this_line in lines:
if this_line == "":
new.append(this_line)
continue
#print "- ", this_line
self.line = ConsumableString(this_line)
new_line = self.__main_level()
#print "+ ", new_line
if new_line is not None:
new.append(new_line)
if new == []:
new = [""]
return new
#Acts 20:35 (New International Version)
#In everything I did, I showed you that by this kind of hard work
#we must help the weak, remembering the words the Lord Jesus himself
#said: 'It is more blessed to give than to receive.'
if __name__ == '__main__':
#-------------------------------------------------------------------------
#
# For Testing everything except VariableParse, SubstKeywords and EventFormat
# apply it as a script:
#
# ==> in command line do "PYTHONPATH=??? python libsubstkeyword.py"
#
# You will need to put in your own path to the src directory
#
#-------------------------------------------------------------------------
# pylint: disable-msg=C0103
def combinations(c, r):
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(range(c))
n = len(pool)
if r > n:
return
indices = list(range(r))
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(list(range(r))):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
def main_level_test(_in, testing_class, testing_what):
"""This is a mini def __main_level(self):
"""
main = LevelParse(_in)
sepa = SeparatorParse(_in)
test = testing_class(_in)
while _in.this:
if main.is_a():
main.parse_format(_in)
elif sepa.is_a():
sepa.parse_format(main)
elif _in.this == "$":
_in.step()
main.add_variable(
test.parse_format(testing_what))
else:
_in.parse_format(main)
main.combine_all()
state, line = main.get_string()
if state is TXT.remove:
return None
else:
return line
from gramps.gen.lib.date import Date
y_or_n = ()
date_to_test = Date()
def date_set():
date_to_test.set_yr_mon_day(
1970 if 0 in y_or_n else 0,
9 if 1 in y_or_n else 0,
3 if 2 in y_or_n else 0
)
#print date_to_test
line_in = "<Z>$(yyy) <a>$(<Z>Mm)<b>$(mm){<c>$(d)}{<d>$(yyyy)<e>}<f>$(yy)"
consume_str = ConsumableString(line_in)
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
print("Good" if tmp == " " else "!! bad !!")
print()
print()
print("#One is known")
answer = []
for y_or_n in combinations(3, 1):
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"1970 d1970f70",
" a99b09",
" c3"
] else "!! bad !!")
print()
print()
print("#Two are known")
answer = []
for y_or_n in combinations(3, 2):
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"1970 a99b09d1970f70",
"1970 c3d1970f70",
" a99b09c3"
] else "!! bad !!")
print()
print()
print("#All are known")
answer = []
y_or_n = (0, 1, 2)
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == ["1970 a99b09c3d1970f70"
] else "!! bad !!")
import sys
sys.exit()
print()
print()
print("=============")
print("=============")
from gramps.gen.lib.name import Name
y_or_n = ()
name_to_test = Name()
def name_set():
#code = "tfcnxslg"
name_to_test.set_call_name("Bob" if 0 in y_or_n else "")
name_to_test.set_title("Dr." if 1 in y_or_n else "")
name_to_test.set_first_name("Billy" if 2 in y_or_n else "")
name_to_test.set_nick_name("Buck" if 3 in y_or_n else "")
name_to_test.set_suffix("IV" if 4 in y_or_n else "")
#now can we put something in for the last name?
name_to_test.set_family_nick_name("The Clubs" if 5 in y_or_n else "")
line_in = "{$(c)$(t)<1>{<2>$(f)}{<3>$(n){<0> <0>}<4>$(x)}$(s)<5>$(l)<6>$(g)<0>"
consume_str = ConsumableString(line_in)
print()
print()
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, NameFormat, name_to_test)
print(tmp)
print("Good" if tmp == None else "!! bad !!")
print()
print()
print("#Two are known")
answer = []
for y_or_n in combinations(6, 2):
name_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, NameFormat, name_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"BobDr.4Bob",
"Bob2Billy4Bob",
"Bob3Buck4Bob",
"Bob4BobIV",
"Bob4BobThe Clubs",
"Dr.2Billy4Billy",
"Dr.3Buck",
"Dr.1IV",
"Dr.6The Clubs",
"Billy3Buck4Billy",
"Billy4BillyIV",
"Billy4BillyThe Clubs",
"BuckIV",
"BuckThe Clubs",
"IV6The Clubs"
] else "!! bad !!")
print()
print()
print("#All are known")
y_or_n = (0, 1, 2, 3, 4, 5)
name_set()
consume_str = ConsumableString(line_in)
answer = main_level_test(consume_str, NameFormat, name_to_test)
print(answer)
print("Good" if answer == "BobDr.2Billy3Buck4BobIV6The Clubs" \
else "!! bad !!")
print()
print()
print("=============")
print("=============")
from gramps.gen.lib.place import Place
y_or_n = ()
place_to_test = Place()
def place_set():
#code = "elcuspnitxy"
main_loc = place_to_test.get_main_location()
main_loc.set_street(
"Lost River Ave." if 0 in y_or_n else ""
)
main_loc.set_locality(
"Second district" if 1 in y_or_n else ""
)
main_loc.set_city(
"Arco" if 2 in y_or_n else ""
)
main_loc.set_county(
"Butte" if 3 in y_or_n else ""
)
main_loc.set_state(
"Idaho" if 4 in y_or_n else ""
)
main_loc.set_postal_code(
"83213" if 5 in y_or_n else ""
)
main_loc.set_country(
"USA" if 6 in y_or_n else ""
)
main_loc.set_parish(
"St Anns" if 7 in y_or_n else ""
)
place_to_test.set_title(
"Atomic City" if 8 in y_or_n else ""
)
place_to_test.set_longitude(
"N43H38'5\"N" if 9 in y_or_n else ""
)
place_to_test.set_latitude(
"W113H18'5\"W" if 10 in y_or_n else ""
)
#code = "txy"
line_in = "$(e)<1>{<2>$(l) <3> $(c)<4><0><5>{$(s)<6>$(p)<7>" + \
"{<1>$(n)<2>}<3>$(i<0>)<4>}<5>$(t)<6>$(x)<7>}<8>$(y)"
consume_str = ConsumableString(line_in)
print()
print()
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, PlaceFormat, place_to_test)
print(tmp)
print("Good" if tmp == "" else "!! bad !!")
print()
print()
print("#Three are known (string lengths only)")
answer = []
for y_or_n in combinations(11, 4):
place_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, PlaceFormat, place_to_test)
#print tmp
answer.append(len(tmp))
print(answer)
print("Good" if answer == [38, 44, 44, 42, 46, 50, 49, 50, 40, 40, 38, 42,
46, 45, 46, 46, 44, 48, 52, 51, 52, 44, 48, 52, 51, 52, 46, 50, 49, 50,
54, 53, 54, 57, 58, 57, 28, 28, 26, 30, 34, 33, 34, 34, 32, 36, 40, 39,
40, 32, 36, 40, 39, 40, 34, 38, 37, 38, 42, 41, 42, 45, 46, 45, 30, 28,
32, 36, 35, 36, 28, 32, 36, 35, 36, 30, 34, 33, 34, 38, 37, 38, 41, 42,
41, 34, 38, 42, 41, 42, 36, 40, 39, 40, 44, 43, 44, 47, 48, 47, 36, 40,
39, 40, 44, 43, 44, 47, 48, 47, 42, 41, 42, 45, 46, 45, 49, 50, 49, 53,
28, 28, 26, 30, 34, 33, 34, 34, 32, 36, 40, 39, 40, 32, 36, 40, 39, 40,
34, 38, 37, 38, 42, 41, 42, 45, 46, 45, 30, 28, 32, 36, 35, 36, 28, 32,
36, 35, 36, 30, 34, 33, 34, 38, 37, 38, 41, 42, 41, 34, 38, 42, 41, 42,
36, 40, 39, 40, 44, 43, 44, 47, 48, 47, 36, 40, 39, 40, 44, 43, 44, 47,
48, 47, 42, 41, 42, 45, 46, 45, 49, 50, 49, 53, 19, 17, 21, 25, 24, 25,
17, 21, 25, 24, 25, 19, 23, 22, 23, 27, 26, 27, 30, 31, 30, 23, 27, 31,
30, 31, 25, 29, 28, 29, 33, 32, 33, 36, 37, 36, 25, 29, 28, 29, 33, 32,
33, 36, 37, 36, 31, 30, 31, 34, 35, 34, 38, 39, 38, 42, 19, 23, 27, 26,
27, 21, 25, 24, 25, 29, 28, 29, 32, 33, 32, 21, 25, 24, 25, 29, 28, 29,
32, 33, 32, 27, 26, 27, 30, 31, 30, 34, 35, 34, 38, 27, 31, 30, 31, 35,
34, 35, 38, 39, 38, 33, 32, 33, 36, 37, 36, 40, 41, 40, 44, 33, 32, 33,
36, 37, 36, 40, 41, 40, 44, 38, 39, 38, 42, 46] else "!! bad !!")
|
droundy/deft
|
papers/histogram/figs/yaml-comparison.py
|
#!/usr/bin/env python
from __future__ import division
import sys, os
import numpy as np
import readnew
from glob import glob
#import re
import yaml
import os.path
import time # Need to wait some time if file is being written
# Example: /home/jordan/sad-monte-carlo/
filename_location = sys.argv[1]
# Example: data/samc-1e4-256-cpp-reference-lndos.dat
reference = sys.argv[2]
# Used for where we save the data.: s000/periodic-ww1.50-ff0.17-N256
filebase = sys.argv[3]
# The number to divide moves by! N is added back in comparison-plot
N = int(sys.argv[4])
# Energy range
Smin = int(sys.argv[5])
Smax = int(sys.argv[6])
# Are you comparing to a yaml reference?
yamlRef = bool(sys.argv[7])
filename = sys.argv[8:]
print(('filenames are ', filename))
for f in filename:
name = '%s.yaml' % (f)
print(('trying filename ', name))
while not os.path.exists(filename_location + name):
print('I am waiting for file to be written.')
time.sleep(30)
# Read YAML file
if os.path.isfile(filename_location + name):
with open(filename_location + name, 'r') as stream:
yaml_data = yaml.load(stream)
else:
raise ValueError("%s isn't a file!" % (filename_location + name))
#print(data_loaded)
data = yaml_data
data['bins']['histogram'] = np.array(data['bins']['histogram'])
data['bins']['lnw'] = np.array(data['bins']['lnw'])
data['movies']['energy']
minyaml = data['movies']['energy'].index(-Smax)
maxyaml = data['movies']['energy'].index(-Smin)
#print(data['bins']['lnw'])
moves = data['moves']
data['movies']['entropy'] = np.array(data['movies']['entropy'])
lndos = data['movies']['entropy']
N_save_times = len(data['movies']['entropy'])
ref = reference
if ref[:len('data/')] != 'data/':
ref = 'data/' + ref
maxref = Smax #int(readnew.max_entropy_state(ref))
minref = Smin # int(readnew.min_important_energy(ref))
n_energies = int(minref - maxref+1)
#print maxref, minref
try:
eref, lndosref, Nrt_ref = readnew.e_lndos_ps(ref)
except:
eref, lndosref = readnew.e_lndos(ref)
errorinentropy = np.zeros(N_save_times)
maxerror = np.zeros(N_save_times)
for i in range(0, N_save_times):
# below just set average S equal between lndos and lndosref
if yamlRef:
# if using yaml as a reference the range is from 0 to len while for C++ the range is
# from maxref to minref + 1
norm_factor = np.mean(lndos[i][maxyaml:minyaml+1]) - np.mean(lndosref[0:(minyaml+1-maxyaml)])
doserror = lndos[i][maxyaml:minyaml+1][::-1] - lndosref[0:(minyaml+1-maxyaml)] - norm_factor
else:
norm_factor = np.mean(lndos[i][maxyaml:minyaml+1]) - np.mean(lndosref[maxref:minref+1])
doserror = lndos[i][maxyaml:minyaml+1][::-1] - lndosref[maxref:minref+1] - norm_factor
errorinentropy[i] = np.sum(abs(doserror))/len(doserror)
maxerror[i] = np.amax(doserror) - np.amin(doserror)
# remove N from moves in yaml file because N is added back in the
# comparison-plot script
moves = list(map(int, data['movies']['time']))
moves = [x / N for x in moves]
errorinentropy = errorinentropy[:len(moves)]
maxerror = maxerror[:len(moves)]
dirname = 'data/comparison/%s-%s' % (filebase, name.replace('.yaml', ''))
print('saving to', dirname)
try:
os.mkdir(dirname)
except OSError:
pass
else:
print(("Successfully created the directory %s " % dirname))
np.savetxt('%s/errors.txt' %(dirname),
np.c_[moves, errorinentropy, maxerror],
fmt = ('%.4g'),
delimiter = '\t',
header = 'iterations\t errorinentropy\t maxerror\t(generated with python %s' % ' '.join(sys.argv))
# The following is intended for testing whether there is a
# systematic error in any of our codes.
#np.savetxt('%s/error-vs-energy.txt' %(dirname),
#np.c_[eref, doserror],
#fmt = ('%.4g'),
#delimiter = '\t', header = 'E\t Serror')
|
hedmo/compizconfig-python
|
setup.py
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.command.build import build as _build
from distutils.command.install import install as _install
from distutils.command.install_data import install_data as _install_data
from distutils.command.sdist import sdist as _sdist
from distutils.extension import Extension
import os
import subprocess
# If src/compizconfig.pyx exists, build using Cython
if os.path.exists ("src/compizconfig.pyx"):
from Cython.Distutils import build_ext
ext_module_src = "src/compizconfig.pyx"
else: # Otherwise build directly from C source
from distutils.command.build_ext import build_ext
ext_module_src = "src/compizconfig.c"
version_file = open ("VERSION", "r")
version = version_file.read ().strip ()
if "=" in version:
version = version.split ("=")[1]
def pkgconfig(*packages, **kw):
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-R': 'runtime_library_dirs'}
cmd = ['pkg-config', '--libs', '--cflags']
tokens = subprocess.Popen (cmd + list(packages), stdout=subprocess.PIPE).communicate()[0].split ()
for t in tokens:
if '-L' in t[:2]:
kw.setdefault (flag_map.get ("-L"), []).append (t[2:])
if not os.getenv ("COMPIZ_DISABLE_RPATH") is "1":
kw.setdefault (flag_map.get ("-R"), []).append (t[2:])
elif '-I' in t[:2]:
kw.setdefault (flag_map.get ("-I"), []).append (t[2:])
elif '-l' in t[:2]:
kw.setdefault (flag_map.get ("-l"), []).append (t[2:])
return kw
VERSION_FILE = os.path.join (os.path.dirname (__file__), "VERSION")
pkgconfig_libs = subprocess.Popen (["pkg-config", "--libs", "libcompizconfig"], stdout=subprocess.PIPE, stderr=open(os.devnull, 'w')).communicate ()[0]
if len (pkgconfig_libs) is 0:
print ("CompizConfig Python [ERROR]: No libcompizconfig.pc found in the pkg-config search path")
print ("Ensure that libcompizonfig is installed or libcompizconfig.pc is in your $PKG_CONFIG_PATH")
exit (1);
libs = pkgconfig_libs[2:].split (" ")[0]
INSTALLED_FILES = "installed_files"
class install (_install):
def run (self):
_install.run (self)
outputs = self.get_outputs ()
length = 0
if self.root:
length += len (self.root)
if self.prefix:
length += len (self.prefix)
if length:
for counter in xrange (len (outputs)):
outputs[counter] = outputs[counter][length:]
data = "\n".join (outputs)
try:
file = open (INSTALLED_FILES, "w")
except:
self.warn ("Could not write installed files list %s" % \
INSTALLED_FILES)
return
file.write (data)
file.close ()
class install_data (_install_data):
def run (self):
def chmod_data_file (file):
try:
os.chmod (file, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH)
except:
self.warn ("Could not chmod data file %s" % file)
_install_data.run (self)
map (chmod_data_file, self.get_outputs ())
class uninstall (_install):
def run (self):
try:
file = open (INSTALLED_FILES, "r")
except:
self.warn ("Could not read installed files list %s" % \
INSTALLED_FILES)
return
files = file.readlines ()
file.close ()
prepend = ""
if self.root:
prepend += self.root
if self.prefix:
prepend += self.prefix
if len (prepend):
for counter in xrange (len (files)):
files[counter] = prepend + files[counter].rstrip ()
for file in files:
print ("Uninstalling %s" % file)
try:
os.unlink (file)
except:
self.warn ("Could not remove file %s" % file)
class sdist (_sdist):
def run (self):
# Build C file
if os.path.exists ("src/compizconfig.pyx"):
from Cython.Compiler.Main import compile as cython_compile
cython_compile ("src/compizconfig.pyx")
# Run regular sdist
_sdist.run (self)
def add_defaults (self):
_sdist.add_defaults (self)
# Remove pyx source and add c source
if os.path.exists ("src/compizconfig.pyx"):
self.filelist.exclude_pattern ("src/compizconfig.pyx")
self.filelist.append ("src/compizconfig.c")
setup (
name = "compizconfig-python",
version = version,
description = "CompizConfig Python",
url = "http://www.compiz.org/",
license = "GPL",
maintainer = "Guillaume Seguin",
maintainer_email = "guillaume@segu.in",
cmdclass = {"uninstall" : uninstall,
"install" : install,
"install_data" : install_data,
"build_ext" : build_ext,
"sdist" : sdist},
ext_modules=[
Extension ("compizconfig", [ext_module_src],
**pkgconfig("libcompizconfig"))
]
)
|
UdK-VPT/Open_eQuarter
|
oeq_tb/resources.py
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.12.1)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x04\x0a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x17\x00\x00\x00\x18\x08\x06\x00\x00\x00\x11\x7c\x66\x75\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x02\x15\
\x16\x11\x2c\x9d\x48\x83\xbb\x00\x00\x03\x8a\x49\x44\x41\x54\x48\
\xc7\xad\x95\x4b\x68\x5c\x55\x18\xc7\x7f\xe7\xdc\x7b\x67\xe6\xce\
\x4c\x66\x26\x49\xd3\x24\x26\xa6\xc6\xf8\x40\x21\xa5\x04\xb3\x28\
\xda\x98\x20\xa5\x0b\xad\x55\xa8\x2b\xc5\x50\x1f\xa0\x6e\x34\x2b\
\x45\x30\x14\x02\xba\x52\x69\x15\x17\x66\x63\x45\x97\x95\xa0\xad\
\x0b\xfb\xc0\x06\x25\xb6\x71\x61\x12\x41\x50\xdb\x2a\x21\xd1\xe2\
\x24\xf3\x9e\xc9\xcc\xbd\xe7\x1c\x17\x35\x43\x1e\x33\x21\xb6\xfd\
\x56\x87\xf3\x9d\xfb\xfb\x1e\xf7\xff\x9d\x23\x8c\x31\x43\x95\xf4\
\x85\x1e\x3f\x3b\x35\xac\xfd\xcc\x43\xdc\xa4\x49\x3b\xfe\x9d\x1d\
\xdb\x7b\x22\x90\x78\xf8\xb2\x28\xa7\xbe\x7d\xc1\x4b\x9d\x79\xdf\
\x18\x15\xe5\x16\x99\x10\x56\xde\x69\xdc\x3f\x22\xfd\xec\xd4\xf0\
\xad\x04\x03\x18\xa3\xa2\x7e\x76\x6a\x58\xde\x68\x2b\xb4\x36\xf8\
\xbe\xc6\x18\x53\xdb\xef\xe7\xfa\xec\xed\x67\x63\x10\x42\x00\xf0\
\xfb\xd5\x65\x2a\x15\x45\xc7\x6d\x0d\x00\xc4\xa2\xc1\xaa\x6f\x0d\
\x3e\x6c\xab\xc2\x1c\x56\xa4\x77\x4b\xb0\xf2\x35\x15\x5f\x21\x85\
\xe0\xc8\x6b\x5f\x92\x2d\x37\x33\x39\xf9\x03\x27\x8e\x1f\xa2\xf7\
\xbe\x9d\x04\x1c\x0b\x37\xe4\xac\xff\xa6\x30\x87\xbd\xba\x00\x6a\
\x06\x79\xe5\xf5\xaf\x89\xd9\x92\xc5\xcc\x0a\xd9\x7c\x19\xcf\xe9\
\xe2\xe4\xa9\x2f\x78\x7c\xff\x01\x72\x85\x0a\x2b\x65\x1f\xa5\x4c\
\xb5\xb2\x55\x16\x80\xbd\x31\xda\xda\x20\x1f\x7d\x3e\xcd\xc2\xfd\
\x59\xa6\x93\x39\x92\xd1\x22\xea\x9b\x16\xce\x9d\x3f\xce\xe0\x83\
\x03\x24\x82\x59\x3a\xdb\x7b\x88\xc7\x82\x68\x63\x58\xc9\xcc\x62\
\x8c\x21\x18\xb0\x6a\xc3\x37\x06\x49\x16\xff\x24\x6b\xa5\x49\xbb\
\x25\xbc\xa2\xa6\x21\xbb\x40\x7f\xdf\x00\x83\xbd\x01\x8e\x3c\xd5\
\x45\xd7\x8e\x6b\x9c\x9c\x98\x25\x1a\xb6\xe8\xbe\x3d\xc2\xdd\x77\
\x44\x48\xc4\x1c\x22\xe1\xeb\x58\x59\xaf\xcf\xd3\x33\x29\x2e\x34\
\x2d\x91\x93\x3e\xbe\x34\x78\x01\xc5\xe2\x61\xc5\xae\x72\x8e\x70\
\xc8\xc2\x0d\x5a\xbc\xf5\xee\x2f\x9c\xfa\x3e\x86\x69\x7a\x8e\xcf\
\x26\xe6\xf9\x63\xa1\x44\xa1\xa4\xd0\xda\x6c\x0d\x2f\x15\x7c\xb4\
\x67\x28\x59\x0a\xcf\xd6\x54\xe2\x06\x13\x87\x2b\x6f\x68\xa6\x27\
\xaf\x31\x32\x36\xc7\xb2\x7f\x17\xef\x7d\x7c\x8c\x33\x67\xcf\x12\
\x70\x24\x4a\x69\xd6\x6a\x46\xd6\xd3\x70\x72\xa9\x82\x67\x34\x45\
\xad\x28\xdb\x1a\x15\x34\x98\xff\x46\xed\xef\x37\x0d\x99\xbf\x4a\
\x3c\x30\x38\xc0\xc8\x4b\xaf\x92\x5a\x9c\xe2\xe0\x23\x6d\x74\xb4\
\xba\x84\x5d\x0b\x29\x45\x7d\xb8\x94\x82\x96\xb6\x10\xf3\xc5\x12\
\x2a\xef\x53\x11\x1a\x63\xad\x3f\x93\x19\x85\xf1\xb1\x77\x58\x5a\
\xf8\x99\x97\x9f\xe9\xa6\x75\x47\x90\xc6\xb8\x43\xd8\xb5\xb6\xce\
\xfc\xfa\xfd\x00\xfb\x3e\xf4\xc8\x05\x35\xba\x5e\xeb\x46\x21\xf9\
\xcf\x0a\xa9\x8c\x87\xe3\x48\xdc\x90\xb5\x6e\x98\x6a\xaa\x65\xf2\
\x52\x92\x43\x2f\x5e\xc2\x8c\x02\x1a\x10\xf5\x07\xac\xc3\x75\x70\
\x83\x92\x80\xb3\xf9\xd0\x26\xf8\x8f\xb3\x29\xc6\x3e\xb8\x8c\x19\
\x35\x75\x6b\x7b\x7e\x3c\xca\x45\x0c\x7e\x49\x31\xf4\x58\x3b\xf7\
\xf6\x34\x90\x88\x39\x04\x1c\x59\x1f\xfe\xdb\xd5\x3c\x5f\x9d\x4b\
\x32\xfd\x44\xb2\xba\xd7\xfa\xb6\x60\xcf\xde\x16\xdc\x90\x45\x4c\
\x4a\x2a\x9e\x62\xfe\x4e\xc5\xc8\xc1\x4e\xda\x76\x86\xe8\xe9\x0a\
\xe3\xd8\x92\x58\xd4\xc6\xb2\x44\x6d\x78\x2a\x53\xe1\xca\x7c\x99\
\x63\x5d\xbf\x56\x9d\xbd\x9f\x44\x18\x7a\xba\x95\x27\x0f\xb4\xd3\
\xdc\x18\xc0\xf3\x0d\x52\x40\xd8\xb5\xb0\xa4\x20\x14\xb2\x70\x6c\
\x81\x63\xcb\xaa\x42\xd6\xfd\xb7\xf4\xec\xa3\x06\xa0\x50\x52\xd8\
\x4e\x1b\x7e\x4a\xd3\x31\xf9\x29\xcf\xfe\xd4\x49\x7f\x5f\x13\xfb\
\xfa\x9b\x71\x43\x92\x58\xd4\x21\x18\x90\xac\xde\xb0\x42\x50\x13\
\x58\x33\xf3\x88\x6b\xa1\xfd\x65\x96\xf2\x79\xc6\x43\x7b\xd8\x75\
\x38\xcc\x3d\xdd\xd1\xaa\xcf\x71\xe4\xff\x7f\x91\x56\x33\xaf\xea\
\x37\xe7\xa1\x94\x21\x16\xb5\xd1\x06\x2c\x29\x36\xf5\x72\x9b\x96\
\x95\xc0\xc4\xda\x9d\x78\x83\x43\x53\x22\x80\x65\x09\x1c\xfb\x86\
\xc1\x00\xe7\x25\x70\x14\x48\x6f\x1e\x22\x51\xe3\x75\xd9\xb6\xa5\
\x81\xa3\x32\xb1\xfb\xf4\x0c\x30\xb8\xb1\x82\x9b\xb0\x09\x60\x30\
\xb1\xfb\xf4\xcc\xbf\xa0\xe9\x6e\xae\x5a\xdf\x4b\x81\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x06\
\x07\x5c\x76\xa2\
\x00\x6f\
\x00\x65\x00\x71\x00\x5f\x00\x74\x00\x62\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x69\x23\xc2\x96\x6e\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
saangel/randomcoding
|
QuestionShuffling.py
|
import numpy as np
# import text file, which has a determined format
a=open("test.dat")
b=open("test2.dat","w")
f=a.read()
g=f.split("\n")
nlines=6
nquestions=16
q1=[g[nlines*i:nlines*(i+1)] for i in range(nquestions)]
# these two lines can be commented if you want to shuffle last question also
last=q1[-1]
q2=q1[:-1]
np.random.shuffle(q2)
for q in q2:
alts=q[4:9]
np.random.shuffle(alts)
q=np.concatenate([q[:4],alts,q[-2:]])
for l in q:
b.write(str(l)+"\n")
# comment this block also if you want to shuffle last question
alts=last[4:9]
np.random.shuffle(alts)
last=np.concatenate([last[:4],alts,last[-2:]])
for l in last:
b.write(str(l)+"\n")
a.close()
b.close()
|
szecsi/Gears
|
GearsPy/Project/Components/Forward/Flyby.py
|
import Gears as gears
from .. import *
try:
from OpenGL.GL import *
from OpenGL.GLU import *
except:
print ('ERROR: PyOpenGL not installed properly.')
import random
def box() :
glBegin(GL_QUADS)
glColor3f(0.0,1.0,0.0)
glVertex3f(1.0, 1.0,-1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f(1.0, 1.0, 1.0)
glColor3f(1.0,0.5,0.0)
glVertex3f(1.0,-1.0, 1.0)
glVertex3f(-1.0,-1.0, 1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f(1.0,-1.0,-1.0)
glColor3f(1.0,0.0,0.0)
glVertex3f(1.0, 1.0, 1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f(-1.0,-1.0, 1.0)
glVertex3f(1.0,-1.0, 1.0)
glColor3f(1.0,1.0,0.0)
glVertex3f(1.0,-1.0,-1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f(1.0, 1.0,-1.0)
glColor3f(0.0,0.0,1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f(-1.0,-1.0, 1.0)
glColor3f(1.0,0.0,1.0)
glVertex3f(1.0, 1.0,-1.0)
glVertex3f(1.0, 1.0, 1.0)
glVertex3f(1.0,-1.0, 1.0)
glVertex3f(1.0,-1.0,-1.0)
glEnd()
class Flyby() :
args = None
def __init__(self, **args):
self.args = args
def apply(self, stimulus) :
self.applyWithArgs(stimulus, **self.args)
def applyWithArgs(
self,
stimulus,
) :
stimulus.enableColorMode()
stimulus.setForwardRenderingCallback(self.render)
stimulus.registerCallback(gears.StimulusStartEvent.typeId, self.start)
stimulus.registerCallback(gears.StimulusEndEvent.typeId, self.finish)
def start( self, event ):
print('hello start flyby')
self.glist = glGenLists(1)
glNewList(self.glist, GL_COMPILE)
for i in range(0, 400) :
glPushMatrix()
glTranslated(
random.uniform( a = -20, b = 20),
random.uniform( a = -20, b = 20),
random.uniform( a = -20, b = 20),
)
box()
glPopMatrix()
glEndList()
def finish( self, event ):
glDeleteLists(self.glist, 1)
def render(self, iFrame):
glEnable(GL_DEPTH_TEST)
glDepthMask(GL_TRUE);
glClearColor(0.0, 0.0, 0.0, 1.0 )
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45, 1, 0.1, 1000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(0, 0, 20- iFrame * 0.1, 0, 0, 19 - iFrame * 0.1, 0, 1, 0)
glTranslated(0, 0, -40 * (iFrame // 400))
glCallList(self.glist)
glTranslated(0, 0, -40)
glCallList(self.glist)
glDisable(GL_DEPTH_TEST)
glDepthMask(GL_FALSE);
|
bcopy/raspbuggy
|
modules/pywebide/src/main/python/raspbuggy/webide/main.py
|
'''
Created on Apr 19, 2015
@author: bcopy
'''
import os
import cherrypy
import sys
import subprocess
import random
import time
import threading
import Queue
import tempfile
class ScriptMonitor(object):
'''
Monitors the script execution and updates result statuses
'''
def __init__(self):
self.m_processInitialized = False
def monitor(self, process):
assert isinstance(process, subprocess.Popen)
self.m_processInitialized = True
self.m_process = process
if(self.m_process.pid != None and self.m_process.poll() == None):
print "Starting raspbuggy script process output polling..."
self.m_stdoutQueue = Queue.Queue()
self.m_stderrQueue = Queue.Queue()
self.m_stdoutReader = AsynchronousFileReader(self.m_process.stdout, self.m_stdoutQueue)
self.m_stdoutReader.start()
else:
print "Raspbuggy script process startup failed."
def abort(self):
print "Starting raspbuggy script process output polling..."
if(self.m_processInitialized and self.m_process.poll() == None):
self.m_process.terminate()
self.m_processInitialized = False
def isRunning(self):
return (self.m_processInitialized and self.m_process.poll() == None)
def getStdoutQueue(self):
return self.m_stdoutQueue
def getStderrQueue(self):
return self.m_stderrQueue
class AsynchronousFileReader(threading.Thread):
'''
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
'''
def __init__(self, fd, queue):
assert isinstance(queue, Queue.Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
def run(self):
'''The body of the thread: read lines and put them on the queue.'''
for line in iter(self._fd.readline, ''):
self._queue.put(line)
def eof(self):
'''Check whether there is no more content to expect.'''
return not self.is_alive() and self._queue.empty()
class RaspbuggyService(object):
def __init__(self):
self.m_scriptMonitor = None
@cherrypy.expose
@cherrypy.tools.json_out()
def ping(self):
return {"msg": "pong"}
@cherrypy.expose
@cherrypy.tools.json_out()
def status(self):
if(self.m_scriptMonitor != None):
running = self.m_scriptMonitor.isRunning()
retCode = self.m_scriptMonitor.m_process.poll()
if(retCode == None):
retCode = -1
return {"running":running,"exitCode":retCode}
else:
return {"running":False,"exitCode":-1}
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out()
def execute(self):
scriptData = cherrypy.request.json
if(self.m_scriptMonitor == None):
self.m_scriptMonitor = ScriptMonitor()
if(scriptData["scriptText"] == None):
return {"success":False, "message":"Script contents undefined"}
elif(self.m_scriptMonitor.isRunning()):
return {"success":False, "message":"Script already running !"}
else:
# Write the script to a temporary file
#scriptFile = tempfile.NamedTemporaryFile(prefix='raspbuggy-script-')
scriptFile = open("/tmp/raspbuggy-script.py", "w")
scriptFile.write(scriptData["scriptText"]+"\n")
scriptFile.close()
print "Executing script "+scriptFile.name+" ..."
scriptProcess = subprocess.Popen(["python", scriptFile.name], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=128)
if(scriptProcess.pid != None):
self.m_scriptMonitor.monitor(scriptProcess)
return {"success":True, "message": "Running script (pid "+str(self.m_scriptMonitor.m_process.pid)+")"}
else:
return {"success":False, "message": "Could not start up script"}
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out()
def abort(self):
return {"result":1}
@cherrypy.expose
@cherrypy.tools.json_out()
def tailStdOut(self):
return {"tail": "New line\nNew line"}
if __name__ == '__main__':
WEBAPP_ROOT = os.getenv('RASPBUGGY_WEBAPP_ROOT',os.getcwd()+"/src/main/webapp")
BLOCKLY_ROOT = os.getenv('BLOCKLY_ROOT',os.getcwd()+"/target/webjars/META-INF/resources/webjars/blockly/b35c0fbfa2")
BOOTSTRAP_ROOT = os.getenv('BOOTSTRAP_ROOT',os.getcwd()+"/target/webjars/META-INF/resources/webjars/bootstrap/3.3.4")
JQUERY_ROOT = os.getenv('JQUERY_ROOT',os.getcwd()+"/target/webjars/META-INF/resources/webjars/jquery/1.9.1")
#print os.path.abspath(WEBAPP_ROOT)
#print os.path.abspath(BLOCKLY_ROOT)
cherrypy.quickstart(RaspbuggyService(), "/",
{
'/':
{
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.abspath(WEBAPP_ROOT)
},
'/blockly':
{
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.abspath(BLOCKLY_ROOT)
},
'/bootstrap':
{
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.abspath(BOOTSTRAP_ROOT)
},
'/jquery':
{
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.abspath(JQUERY_ROOT)
}
})
|
jjneely/webkickstart
|
archive/centos5.py
|
#!/usr/bin/python
#
# centos5.py - A webKickstart module to handle changes needed from
# RHEL 5 to CentOS 5 Kickstart generation.
#
# Copyright 2007 NC State University
# Written by Jack Neely <jjneely@ncsu.edu>
#
# SDG
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from baseRealmLinuxKickstart import baseRealmLinuxKickstart
class Kickstart(baseRealmLinuxKickstart):
def __init__(self, url, cfg, sc=None):
baseRealmLinuxKickstart.__init__(self, url, cfg, sc)
self.buildOrder.remove(self.installationNumber)
self.buildOrder.remove(self.RHN)
|
bpain2010/kgecweb
|
hostels/models.py
|
from django.db import models
from stdimage import StdImageField
from django.core.validators import RegexValidator
import datetime
YEAR_CHOICES = []
for r in range(1980, (datetime.datetime.now().year+1)):
YEAR_CHOICES.append((r,r))
S_CHOICE = [('1stYear','1stYear'),('2ndYear','2ndYear'),('3rdYear','3rdYear'),('4thYear','4thYear')]
# Create your models here.
class Hostel(models.Model):
HostelName = models.CharField(max_length=100, primary_key=True)
HostelType = models.CharField(max_length=10)
HostelSeat = models.IntegerField()
HostelImage = StdImageField(upload_to='Hostels/logo/',variations={'large': (675, 300,True)})
HostelAddress = models.CharField(max_length=200)
HostelDescription = models.TextField()
HostelEmail = models.EmailField()
phone_regex = RegexValidator(regex=r'^\+?1?\d{10,13}$', message="Phone number must be entered in the format: '+999999999'. Up to 13 digits allowed.")
HostelPhoneNo = models.CharField(max_length=13,validators=[phone_regex], blank=True)
def __str__(self):
return self.HostelName
class HostelEvents(models.Model):
HostelName = models.ForeignKey(Hostel)
HostelEventsName = models.CharField(max_length=100)
HostelEventDescription = models.TextField()
def __str__(self):
return self.HostelEventsName
class HostelPictureGalary(models.Model):
HostelName = models.ForeignKey(Hostel)
PictureName = models.CharField(max_length=100)
PictureLocation = StdImageField(upload_to='Hostels/galary/',variations={'large': (675, 300,True)})
def __str__(self):
return self.PictureName
class HostelBody(models.Model):
HostelName = models.ForeignKey(Hostel)
HostelbodyRole = models.CharField(max_length=100)
HostelbodyRoleYear = models.IntegerField(choices=YEAR_CHOICES, default=datetime.datetime.now().year)
PersonName = models.CharField (max_length=10)
PersonYear = models.CharField (max_length=7, choices=S_CHOICE,default='NA')
PersonImage = StdImageField(upload_to='Hostels/gb/',variations={'thumbnail': (300, 200,True)})
def __str__(self):
return self.HostelbodyRole
|
JamesLinEngineer/RKMC
|
addons/script.ftvguide/gui.py
|
#
# Copyright (C) 2014 Tommy Winther
# http://tommy.winther.nu
#
# Modified for FTV Guide (09/2014 onwards)
# by Thomas Geppert [bluezed] - bluezed.apps@gmail.com
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this Program; see the file LICENSE.txt. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import datetime
import json
import os
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import streaming
from utils import reset_playing
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_ESC = 61467
CHANNELS_PER_PAGE = 8
HALF_HOUR = datetime.timedelta(minutes=30)
SKIN = ADDON.getSetting('skin')
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE_LONG = 3999
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self):
super(TVGuide, self).__init__()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService(ADDON)
self.player = xbmc.Player()
self.database = None
self.proc_file = xbmc.translatePath(os.path.join(ADDON.getAddonInfo('profile'), 'proc'))
if not os.path.exists(self.proc_file):
self.reset_playing()
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
# find nearest half hour
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
if ADDON.getSetting('background.stream') == 'false':
self.reset_playing()
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
def onInit(self):
is_playing, play_data = self.check_is_playing()
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
if is_playing and 'idx' in play_data:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.channelIdx = play_data['idx']
if self.database and 'y' in play_data:
self.focusPoint.y = play_data['y']
self.onRedrawEPG(self.channelIdx, self.viewStartDate,
focusFunction=self._findCurrentTimeslot)
elif self.database:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
else:
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized,
self.isSourceInitializationCancelled)
self.updateTimebar()
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass # skip the rest of the actions
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel, self.osdProgram):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
# catch the ESC key
elif action.getId() == ACTION_PREVIOUS_MENU and action.getButtonCode() == KEY_ESC:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
else:
xbmc.log('[script.ftvguide] Unhandled ActionId: ' + str(action.getId()), xbmc.LOGDEBUG)
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel, program):
result = self.streamingService.detectStream(program.channel)
if not result:
# could not detect stream, show context menu
self._showContextMenu(program)
elif type(result) == str:
# one single stream detected, save it and start streaming
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel, program)
else:
# multiple matches, let user decide
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel, program)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel, program)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
elif buttonClicked == PopupMenu.C_POPUP_LIBMOV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://movies/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_LIBTV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://tvshows/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_VIDEOADDONS:
xbmc.executebuiltin('ActivateWindow(Videos,addons://sources/video/)')
elif buttonClicked == PopupMenu.C_POPUP_PLAY_BEGINNING:
title = program.title.replace(" ", "%20").replace(",", "").replace(u"\u2013", "-")
title = unicode.encode(title, "ascii", "ignore")
if program.is_movie == "Movie":
selection = 0
elif program.season is not None:
selection = 1
else:
selection = xbmcgui.Dialog().select("Choose media type", ["Search as Movie", "Search as TV Show"])
if selection == 0:
xbmc.executebuiltin("RunPlugin(plugin://plugin.video.meta/movies/play_by_name/%s/%s)" % (
title, program.language))
elif selection == 1:
if program.season and program.episode:
xbmc.executebuiltin("RunPlugin(plugin://plugin.video.meta/tv/play_by_name/%s/%s/%s/%s)" % (
title, program.season, program.episode, program.language))
else:
xbmc.executebuiltin("RunPlugin(plugin://plugin.video.meta/tv/play_by_name_only/%s/%s)" % (
title, program.language))
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
title = '[B]%s[/B]' % program.title
if program.season is not None and program.episode is not None:
title += " [B]S%sE%s[/B]" % (program.season, program.episode)
if program.is_movie == "Movie":
title += " [B](Movie)[/B]"
self.setControlLabel(self.C_MAIN_TITLE, title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (
self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
else:
self.setControlImage(self.C_MAIN_LOGO, '')
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
else:
self.setControlImage(self.C_MAIN_IMAGE, 'tvguide-logo-epg.png')
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if self.player.isPlaying() and not self.osdEnabled and \
ADDON.getSetting('background.stream') == 'false':
self.reset_playing()
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
program = self.database.getCurrentProgram(channel)
self.playChannel(channel, program)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
program = self.database.getCurrentProgram(channel)
self.playChannel(channel, program)
def playChannel(self, channel, program=None):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
self.set_playing()
if str.startswith(url, "plugin://plugin.video.meta") and program is not None:
import urllib
title = urllib.quote(program.title)
url += "/%s/%s" % (title, program.language)
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return # ignore redraw request while redrawing
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
# show Loading screen
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
# remove existing controls
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime,
self.onSourceProgressUpdate,
clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
# date and time row
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate, False))
self.setControlLabel(self.C_MAIN_DATE_LONG, self.formatDate(self.viewStartDate, True))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
# set channel logo or text
showLogo = ADDON.getSetting('logos.enabled') == 'true'
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if (channel.logo is not None and showLogo == True):
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = '' # Text will overflow outside the button if it is too narrow
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
# add program controls
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass # happens if we try to remove a control that doesn't exist
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def check_is_playing(self):
is_playing = self.player.isPlaying()
play_data = {}
if not self.isClosing:
f = open(self.proc_file, 'r')
data = f.read()
if len(data) > 0:
is_playing = True
play_data = json.loads(data)
f.close()
debug('[%s] Checking Play-State... is_playing: %s, data: %s '
% (ADDON.getAddonInfo('id'), str(is_playing), str(play_data)))
return is_playing, play_data
def set_playing(self):
f = open(self.proc_file, 'w')
data = {'timestamp': datetime.datetime.now().strftime('%Y%m%d%H%M%S'),
'y': self.focusPoint.y, 'idx': self.channelIdx}
f.write(json.dumps(data))
f.close()
def reset_playing(self):
reset_playing()
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
is_playing, play_data = self.check_is_playing()
self._hideControl(self.C_MAIN_OSD)
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
if is_playing and 'idx' in play_data:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.channelIdx = play_data['idx']
if self.database and 'y' in play_data:
self.focusPoint.y = play_data['y']
self.onRedrawEPG(self.channelIdx, self.viewStartDate,
focusFunction=self._findCurrentTimeslot)
self.reset_playing()
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (
nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (
nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _findCurrentTimeslot(self, point):
y = point.y
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, _) = control.getPosition()
else:
x = point.x
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= x <= right and top <= y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp, longdate=False):
if timestamp:
if longdate == True:
format = xbmc.getRegion('datelong')
else:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_PLAY_BEGINNING = 4005
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_LIBMOV = 80000
C_POPUP_LIBTV = 80001
C_POPUP_VIDEOADDONS = 80002
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, program, showRemind):
"""
@type database: source.Database
@param program:
@type program: source.Program
@param showRemind:
"""
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
programPlayBeginningControl = self.getControl(self.C_POPUP_PLAY_BEGINNING)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database):
"""
@type database: source.Database
"""
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
self.selectedChannel = 0
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU,
ACTION_LEFT]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.selectedChannel = idx
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_PREVIOUS_MENU,
KEY_CONTEXT_MENU]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.swapChannels(self.selectedChannel, idx)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'),
SKIN)
def __init__(self, database, channel):
"""
@type database: source.Database
@type channel:source.Channel
"""
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService(ADDON)
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
if item.getProperty('addon_id') == "plugin.video.meta":
label = self.channel.title
stream = stream.replace("<channel>", self.channel.title.replace(" ", "%20"))
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml',
ADDON.getAddonInfo('path'), SKIN)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
def onFocus(self, controlId):
pass
|
schleichdi2/openpli-e2
|
skin.py
|
from Tools.Profile import profile
profile("LOAD:ElementTree")
import xml.etree.cElementTree
import os
profile("LOAD:enigma_skin")
from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, \
addFont, gRGB, eWindowStyleSkinned, getDesktop
from Components.config import ConfigSubsection, ConfigText, config
from Components.Converter.Converter import Converter
from Components.Sources.Source import Source, ObsoleteSource
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_FONTS, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists, SCOPE_SKIN_IMAGE
from Tools.Import import my_import
from Tools.LoadPixmap import LoadPixmap
from Components.RcModel import rc_model
from Components.SystemInfo import SystemInfo
colorNames = {}
# Predefined fonts, typically used in built-in screens and for components like
# the movie list and so.
fonts = {
"Body": ("Regular", 18, 22, 16),
"ChoiceList": ("Regular", 20, 24, 18),
}
parameters = {}
def dump(x, i=0):
print " " * i + str(x)
try:
for n in x.childNodes:
dump(n, i + 1)
except:
None
class SkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg)
dom_skins = [ ]
def addSkin(name, scope = SCOPE_SKIN):
# read the skin
filename = resolveFilename(scope, name)
if fileExists(filename):
mpath = os.path.dirname(filename) + "/"
try:
dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot()))
except:
print "[SKIN ERROR] error in %s" % filename
return False
else:
return True
return False
# get own skin_user_skinname.xml file, if exist
def skin_user_skinname():
name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml"
filename = resolveFilename(SCOPE_CONFIG, name)
if fileExists(filename):
return name
return None
# we do our best to always select the "right" value
# skins are loaded in order of priority: skin with
# highest priority is loaded last, usually the user-provided
# skin.
# currently, loadSingleSkinData (colors, bordersets etc.)
# are applied one-after-each, in order of ascending priority.
# the dom_skin will keep all screens in descending priority,
# so the first screen found will be used.
# example: loadSkin("nemesis_greenline/skin.xml")
config.skin = ConfigSubsection()
DEFAULT_SKIN = "PLi-HD/skin.xml"
# on SD hardware, PLi-HD will not be available
if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
# in that case, fallback to Magic (which is an SD skin)
DEFAULT_SKIN = "Magic/skin.xml"
config.skin.primary_skin = ConfigText(default=DEFAULT_SKIN)
profile("LoadSkin")
res = None
name = skin_user_skinname()
if name:
res = addSkin(name, SCOPE_CONFIG)
if not name or not res:
addSkin('skin_user.xml', SCOPE_CONFIG)
# some boxes lie about their dimensions
addSkin('skin_box.xml')
# add optional discrete second infobar
addSkin('skin_second_infobar.xml')
display_skin_id = 1
addSkin('skin_display.xml')
addSkin('skin_text.xml')
addSkin('skin_subtitles.xml')
try:
if not addSkin(config.skin.primary_skin.value):
raise SkinError, "primary skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_SKIN
if config.skin.primary_skin.value == skin:
skin = 'skin.xml'
print "defaulting to standard skin...", skin
config.skin.primary_skin.value = skin
addSkin(skin)
del skin
addSkin('skin_default.xml')
profile("LoadSkinDefaultDone")
#
# Convert a string into a number. Used to convert object position and size attributes into a number
# s is the input string.
# e is the the parent object size to do relative calculations on parent
# size is the size of the object size (e.g. width or height)
# font is a font object to calculate relative to font sizes
# Note some constructs for speeding # up simple cases that are very common.
# Can do things like: 10+center-10w+4%
# To center the widget on the parent widget,
# but move forward 10 pixels and 4% of parent width
# and 10 character widths backward
# Multiplication, division and subexprsssions are also allowed: 3*(e-c/2)
#
# Usage: center : center the object on parent based on parent size and object size
# e : take the parent size/width
# c : take the center point of parent size/width
# % : take given percentag of parent size/width
# w : multiply by current font width
# h : multiply by current font height
#
def parseCoordinate(s, e, size=0, font=None):
s = s.strip()
if s == "center": # for speed, can be common case
val = (e - size)/2
elif s == '*':
return None
else:
try:
val = int(s) # for speed
except:
if 't' in s:
s = s.replace("center", str((e-size)/2.0))
if 'e' in s:
s = s.replace("e", str(e))
if 'c' in s:
s = s.replace("c", str(e/2.0))
if 'w' in s:
s = s.replace("w", "*" + str(fonts[font][3]))
if 'h' in s:
s = s.replace("h", "*" + str(fonts[font][2]))
if '%' in s:
s = s.replace("%", "*" + str(e/100.0))
try:
val = int(s) # for speed
except:
val = eval(s)
if val < 0:
return 0
return int(val) # make sure an integer value is returned
def getParentSize(object, desktop):
size = eSize()
if object:
parent = object.getParent()
# For some widgets (e.g. ScrollLabel) the skin attributes are applied to
# a child widget, instead of to the widget itself. In that case, the parent
# we have here is not the real parent, but it is the main widget.
# We have to go one level higher to get the actual parent.
# We can detect this because the 'parent' will not have a size yet
# (the main widget's size will be calculated internally, as soon as the child
# widget has parsed the skin attributes)
if parent and parent.size().isEmpty():
parent = parent.getParent()
if parent:
size = parent.size()
elif desktop:
#widget has no parent, use desktop size instead for relative coordinates
size = desktop.size()
return size
def parseValuePair(s, scale, object = None, desktop = None, size = None):
x, y = s.split(',')
parentsize = eSize()
if object and ('c' in x or 'c' in y or 'e' in x or 'e' in y or
'%' in x or '%' in y): # need parent size for ce%
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width(), size and size.width() or 0)
yval = parseCoordinate(y, parentsize.height(), size and size.height() or 0)
return (xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parsePosition(s, scale, object = None, desktop = None, size = None):
(x, y) = parseValuePair(s, scale, object, desktop, size)
return ePoint(x, y)
def parseSize(s, scale, object = None, desktop = None):
(x, y) = parseValuePair(s, scale, object, desktop)
return eSize(x, y)
def parseFont(s, scale):
try:
f = fonts[s]
name = f[0]
size = f[1]
except:
name, size = s.split(';')
return gFont(name, int(size) * scale[0][0] / scale[0][1])
def parseColor(s):
if s[0] != '#':
try:
return colorNames[s]
except:
raise SkinError("color '%s' must be #aarrggbb or valid named color" % (s))
return gRGB(int(s[1:], 0x10))
def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))):
# walk all attributes
size = None
pos = None
font = None
for attrib, value in node.items():
if attrib not in ignore:
if attrib in filenames:
value = resolveFilename(SCOPE_CURRENT_SKIN, value, path_prefix=skin_path_prefix)
# Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder)
# it needs to be set at least before the size is set, in order for the
# window dimensions to be calculated correctly in all situations.
# If wfNoBorder is applied after the size has been set, the window will fail to clear the title area.
# Similar situation for a scrollbar in a listbox; when the scrollbar setting is applied after
# the size, a scrollbar will not be shown until the selection moves for the first time
if attrib == 'size':
size = value.encode("utf-8")
elif attrib == 'position':
pos = value.encode("utf-8")
elif attrib == 'font':
font = value.encode("utf-8")
skinAttributes.append((attrib, font))
else:
skinAttributes.append((attrib, value.encode("utf-8")))
if pos is not None:
pos, size = context.parse(pos, size, font)
skinAttributes.append(('position', pos))
if size is not None:
skinAttributes.append(('size', size))
def morphRcImagePath(value):
if rc_model.rcIsDefault() is False:
if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png':
value = rc_model.getRcImg()
return value
def loadPixmap(path, desktop):
option = path.find("#")
if option != -1:
path = path[:option]
ptr = LoadPixmap(morphRcImagePath(path), desktop)
if ptr is None:
raise SkinError("pixmap file %s not found!" % (path))
return ptr
class AttributeParser:
def __init__(self, guiObject, desktop, scale=((1,1),(1,1))):
self.guiObject = guiObject
self.desktop = desktop
self.scaleTuple = scale
def applyOne(self, attrib, value):
try:
getattr(self, attrib)(value)
except AttributeError:
print "[Skin] Attribute not implemented:", attrib, "value:", value
except SkinError, ex:
print "[Skin] Error:", ex
def applyAll(self, attrs):
for attrib, value in attrs:
self.applyOne(attrib, value)
def conditional(self, value):
pass
def position(self, value):
if isinstance(value, tuple):
self.guiObject.move(ePoint(*value))
else:
self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize()))
def size(self, value):
if isinstance(value, tuple):
self.guiObject.resize(eSize(*value))
else:
self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop))
def title(self, value):
self.guiObject.setTitle(_(value))
def text(self, value):
self.guiObject.setText(_(value))
def font(self, value):
self.guiObject.setFont(parseFont(value, self.scaleTuple))
def zPosition(self, value):
self.guiObject.setZPosition(int(value))
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setPixmap(ptr)
def backgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setBackgroundPicture(ptr)
def selectionPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSelectionPicture(ptr)
def sliderPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSliderPicture(ptr)
def scrollbarbackgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def alphatest(self, value):
self.guiObject.setAlphatest(
{ "on": 1,
"off": 0,
"blend": 2,
}[value])
def scale(self, value):
self.guiObject.setScale(1)
def orientation(self, value): # used by eSlider
try:
self.guiObject.setOrientation(*
{ "orVertical": (self.guiObject.orVertical, False),
"orTopToBottom": (self.guiObject.orVertical, False),
"orBottomToTop": (self.guiObject.orVertical, True),
"orHorizontal": (self.guiObject.orHorizontal, False),
"orLeftToRight": (self.guiObject.orHorizontal, False),
"orRightToLeft": (self.guiObject.orHorizontal, True),
}[value])
except KeyError:
print "oprientation must be either orVertical or orHorizontal!"
def valign(self, value):
try:
self.guiObject.setVAlign(
{ "top": self.guiObject.alignTop,
"center": self.guiObject.alignCenter,
"bottom": self.guiObject.alignBottom
}[value])
except KeyError:
print "valign must be either top, center or bottom!"
def halign(self, value):
try:
self.guiObject.setHAlign(
{ "left": self.guiObject.alignLeft,
"center": self.guiObject.alignCenter,
"right": self.guiObject.alignRight,
"block": self.guiObject.alignBlock
}[value])
except KeyError:
print "halign must be either left, center, right or block!"
def textOffset(self, value):
x, y = value.split(',')
self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1]))
def flags(self, value):
flags = value.split(',')
for f in flags:
try:
fv = eWindow.__dict__[f]
self.guiObject.setFlag(fv)
except KeyError:
print "illegal flag %s!" % f
def backgroundColor(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def backgroundColorSelected(self, value):
self.guiObject.setBackgroundColorSelected(parseColor(value))
def foregroundColor(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def foregroundColorSelected(self, value):
self.guiObject.setForegroundColorSelected(parseColor(value))
def shadowColor(self, value):
self.guiObject.setShadowColor(parseColor(value))
def selectionDisabled(self, value):
self.guiObject.setSelectionEnable(0)
def transparent(self, value):
self.guiObject.setTransparent(int(value))
def borderColor(self, value):
self.guiObject.setBorderColor(parseColor(value))
def borderWidth(self, value):
self.guiObject.setBorderWidth(int(value))
def scrollbarMode(self, value):
self.guiObject.setScrollbarMode(getattr(self.guiObject, value))
# { "showOnDemand": self.guiObject.showOnDemand,
# "showAlways": self.guiObject.showAlways,
# "showNever": self.guiObject.showNever,
# "showLeft": self.guiObject.showLeft
# }[value])
def enableWrapAround(self, value):
self.guiObject.setWrapAround(True)
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(0, ptr, pos)
def seek_pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(1, ptr, pos)
def shadowOffset(self, value):
self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple))
def noWrap(self, value):
self.guiObject.setNoWrap(1)
def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))):
# Someone still using applySingleAttribute?
AttributeParser(guiObject, desktop, scale).applyOne(attrib, value)
def applyAllAttributes(guiObject, desktop, attributes, scale):
AttributeParser(guiObject, desktop, scale).applyAll(attributes)
def loadSingleSkinData(desktop, skin, path_prefix):
"""loads skin data like colors, windowstyle etc."""
assert skin.tag == "skin", "root element in skin must be 'skin'!"
for c in skin.findall("output"):
id = c.attrib.get('id')
if id:
id = int(id)
else:
id = 0
if id == 0: # framebuffer
for res in c.findall("resolution"):
get_attr = res.attrib.get
xres = get_attr("xres")
if xres:
xres = int(xres)
else:
xres = 720
yres = get_attr("yres")
if yres:
yres = int(yres)
else:
yres = 576
bpp = get_attr("bpp")
if bpp:
bpp = int(bpp)
else:
bpp = 32
#print "Resolution:", xres,yres,bpp
from enigma import gMainDC
gMainDC.getInstance().setResolution(xres, yres)
desktop.resize(eSize(xres, yres))
if bpp != 32:
# load palette (not yet implemented)
pass
if yres >= 1080:
parameters["FileListName"] = (68,4,1000,34)
parameters["FileListIcon"] = (7,4,52,37)
parameters["FileListMultiName"] = (90,3,1000,32)
parameters["FileListMultiIcon"] = (45, 4, 30, 30)
parameters["FileListMultiLock"] = (2,0,36,36)
parameters["ChoicelistDash"] = (0,3,1000,30)
parameters["ChoicelistName"] = (68,3,1000,30)
parameters["ChoicelistIcon"] = (7,0,52,38)
parameters["PluginBrowserName"] = (180,8,38)
parameters["PluginBrowserDescr"] = (180,42,25)
parameters["PluginBrowserIcon"] = (15,8,150,60)
parameters["PluginBrowserDownloadName"] = (120,8,38)
parameters["PluginBrowserDownloadDescr"] = (120,42,25)
parameters["PluginBrowserDownloadIcon"] = (15,0,90,76)
parameters["ServiceInfo"] = (0,0,450,50)
parameters["ServiceInfoLeft"] = (0,0,450,45)
parameters["ServiceInfoRight"] = (450,0,1000,45)
parameters["SelectionListDescr"] = (45,3,1000,32)
parameters["SelectionListLock"] = (0,2,36,36)
parameters["ConfigListSeperator"] = 300
parameters["VirtualKeyboard"] = (68,68)
parameters["PartnerBoxEntryListName"] = (8,2,225,38)
parameters["PartnerBoxEntryListIP"] = (180,2,225,38)
parameters["PartnerBoxEntryListPort"] = (405,2,150,38)
parameters["PartnerBoxEntryListType"] = (615,2,150,38)
parameters["PartnerBoxTimerServicename"] = (0,0,45)
parameters["PartnerBoxTimerName"] = (0,42,30)
parameters["PartnerBoxE1TimerTime"] = (0,78,255,30)
parameters["PartnerBoxE1TimerState"] = (255,78,255,30)
parameters["PartnerBoxE2TimerTime"] = (0,78,225,30)
parameters["PartnerBoxE2TimerState"] = (225,78,225,30)
parameters["PartnerBoxE2TimerIcon"] = (1050,8,20,20)
parameters["PartnerBoxE2TimerIconRepeat"] = (1050,38,20,20)
parameters["PartnerBoxBouquetListName"] = (0,0,45)
parameters["PartnerBoxChannelListName"] = (0,0,45)
parameters["PartnerBoxChannelListTitle"] = (0,42,30)
parameters["PartnerBoxChannelListTime"] = (0,78,225,30)
parameters["HelpMenuListHlp"] = (0,0,900,42)
parameters["HelpMenuListExtHlp0"] = (0,0,900,39)
parameters["HelpMenuListExtHlp1"] = (0,42,900,30)
parameters["AboutHddSplit"] = 1
parameters["DreamexplorerName"] = (62,0,1200,38)
parameters["DreamexplorerIcon"] = (15,4,30,30)
parameters["PicturePlayerThumb"] = (30,285,45,300,30,25)
parameters["PlayListName"] = (38,2,1000,34)
parameters["PlayListIcon"] = (7,7,24,24)
parameters["SHOUTcastListItem"] = (30,27,35,96,35,33,60,32)
for skininclude in skin.findall("include"):
filename = skininclude.attrib.get("filename")
if filename:
skinfile = resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix)
if not fileExists(skinfile):
skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
if fileExists(skinfile):
print "[SKIN] loading include:", skinfile
loadSkin(skinfile)
for c in skin.findall("colors"):
for color in c.findall("color"):
get_attr = color.attrib.get
name = get_attr("name")
color = get_attr("value")
if name and color:
colorNames[name] = parseColor(color)
#print "Color:", name, color
else:
raise SkinError("need color and name, got %s %s" % (name, color))
for c in skin.findall("fonts"):
for font in c.findall("font"):
get_attr = font.attrib.get
filename = get_attr("filename", "<NONAME>")
name = get_attr("name", "Regular")
scale = get_attr("scale")
if scale:
scale = int(scale)
else:
scale = 100
is_replacement = get_attr("replacement") and True or False
render = get_attr("render")
if render:
render = int(render)
else:
render = 0
resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix)
if not fileExists(resolved_font): #when font is not available look at current skin path
skin_path = resolveFilename(SCOPE_CURRENT_SKIN, filename)
if fileExists(skin_path):
resolved_font = skin_path
addFont(resolved_font, name, scale, is_replacement, render)
#print "Font: ", resolved_font, name, scale, is_replacement
for alias in c.findall("alias"):
get = alias.attrib.get
try:
name = get("name")
font = get("font")
size = int(get("size"))
height = int(get("height", size)) # to be calculated some day
width = int(get("width", size))
global fonts
fonts[name] = (font, size, height, width)
except Exception, ex:
print "[SKIN] bad font alias", ex
for c in skin.findall("parameters"):
for parameter in c.findall("parameter"):
get = parameter.attrib.get
try:
name = get("name")
value = get("value")
parameters[name] = "," in value and map(int, value.split(",")) or int(value)
except Exception, ex:
print "[SKIN] bad parameter", ex
for c in skin.findall("subtitles"):
from enigma import eWidget, eSubtitleWidget
scale = ((1,1),(1,1))
for substyle in c.findall("sub"):
get_attr = substyle.attrib.get
font = parseFont(get_attr("font"), scale)
col = get_attr("foregroundColor")
if col:
foregroundColor = parseColor(col)
haveColor = 1
else:
foregroundColor = gRGB(0xFFFFFF)
haveColor = 0
col = get_attr("borderColor")
if col:
borderColor = parseColor(col)
else:
borderColor = gRGB(0)
borderwidth = get_attr("borderWidth")
if borderwidth is None:
# default: use a subtitle border
borderWidth = 3
else:
borderWidth = int(borderwidth)
face = eSubtitleWidget.__dict__[get_attr("name")]
eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth)
for windowstyle in skin.findall("windowstyle"):
style = eWindowStyleSkinned()
style_id = windowstyle.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
# defaults
font = gFont("Regular", 20)
offset = eSize(20, 5)
for title in windowstyle.findall("title"):
get_attr = title.attrib.get
offset = parseSize(get_attr("offset"), ((1,1),(1,1)))
font = parseFont(get_attr("font"), ((1,1),(1,1)))
style.setTitleFont(font);
style.setTitleOffset(offset)
#print " ", font, offset
for borderset in windowstyle.findall("borderset"):
bsName = str(borderset.attrib.get("name"))
for pixmap in borderset.findall("pixmap"):
get_attr = pixmap.attrib.get
bpName = get_attr("pos")
filename = get_attr("filename")
if filename and bpName:
png = loadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix), desktop)
style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png)
#print " borderset:", bpName, filename
for color in windowstyle.findall("color"):
get_attr = color.attrib.get
colorType = get_attr("name")
color = parseColor(get_attr("color"))
try:
style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color)
except:
raise SkinError("Unknown color %s" % (colorType))
#pass
#print " color:", type, color
x = eWindowStyleManager.getInstance()
x.setStyle(style_id, style)
for margin in skin.findall("margin"):
style_id = margin.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
r = eRect(0,0,0,0)
v = margin.attrib.get("left")
if v:
r.setLeft(int(v))
v = margin.attrib.get("top")
if v:
r.setTop(int(v))
v = margin.attrib.get("right")
if v:
r.setRight(int(v))
v = margin.attrib.get("bottom")
if v:
r.setBottom(int(v))
# the "desktop" parameter is hardcoded to the UI screen, so we must ask
# for the one that this actually applies to.
getDesktop(style_id).setMargins(r)
dom_screens = {}
def loadSkin(name, scope = SCOPE_SKIN):
# Now a utility for plugins to add skin data to the screens
global dom_screens, display_skin_id
filename = resolveFilename(scope, name)
if fileExists(filename):
path = os.path.dirname(filename) + "/"
for elem in xml.etree.cElementTree.parse(filename).getroot():
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
print "loadSkin: Screen already defined elsewhere:", name
elem.clear()
else:
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
def loadSkinData(desktop):
# Kinda hackish, but this is called once by mytest.py
global dom_skins
skins = dom_skins[:]
skins.reverse()
for (path, dom_skin) in skins:
loadSingleSkinData(desktop, dom_skin, path)
for elem in dom_skin:
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
# Kill old versions, save memory
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
# without name, it's useless!
elem.clear()
else:
# non-screen element, no need for it any longer
elem.clear()
# no longer needed, we know where the screens are now.
del dom_skins
class additionalWidget:
pass
# Class that makes a tuple look like something else. Some plugins just assume
# that size is a string and try to parse it. This class makes that work.
class SizeTuple(tuple):
def split(self, *args):
return (str(self[0]), str(self[1]))
def strip(self, *args):
return '%s,%s' % self
def __str__(self):
return '%s,%s' % self
class SkinContext:
def __init__(self, parent=None, pos=None, size=None, font=None):
if parent is not None:
if pos is not None:
pos, size = parent.parse(pos, size, font)
self.x, self.y = pos
self.w, self.h = size
else:
self.x = None
self.y = None
self.w = None
self.h = None
def __str__(self):
return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h)
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
self.w = 0
self.h = 0
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
self.h -= h
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
self.h -= h
self.y += h
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
self.x += w
self.w -= w
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
self.w -= w
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
class SkinContextStack(SkinContext):
# A context that stacks things instead of aligning them
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
def readSkin(screen, skin, names, desktop):
if not isinstance(names, list):
names = [names]
# try all skins, first existing one have priority
global dom_screens
for n in names:
myscreen, path = dom_screens.get(n, (None,None))
if myscreen is not None:
# use this name for debug output
name = n
break
else:
name = "<embedded-in-'%s'>" % screen.__class__.__name__
# otherwise try embedded skin
if myscreen is None:
myscreen = getattr(screen, "parsedSkin", None)
# try uncompiled embedded skin
if myscreen is None and getattr(screen, "skin", None):
skin = screen.skin
print "[SKIN] Parsing embedded skin", name
if (isinstance(skin, tuple)):
for s in skin:
candidate = xml.etree.cElementTree.fromstring(s)
if candidate.tag == 'screen':
sid = candidate.attrib.get('id', None)
if (not sid) or (int(sid) == display_skin_id):
myscreen = candidate
break;
else:
print "[SKIN] Hey, no suitable screen!"
else:
myscreen = xml.etree.cElementTree.fromstring(skin)
if myscreen:
screen.parsedSkin = myscreen
if myscreen is None:
print "[SKIN] No skin to read..."
myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>")
screen.skinAttributes = [ ]
skin_path_prefix = getattr(screen, "skin_path", path)
context = SkinContextStack()
s = desktop.bounds()
context.x = s.left()
context.y = s.top()
context.w = s.width()
context.h = s.height()
del s
collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",))
context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size'))
screen.additionalWidgets = [ ]
screen.renderer = [ ]
visited_components = set()
# now walk all widgets and stuff
def process_none(widget, context):
pass
def process_widget(widget, context):
get_attr = widget.attrib.get
# ok, we either have 1:1-mapped widgets ('old style'), or 1:n-mapped
# widgets (source->renderer).
wname = get_attr('name')
wsource = get_attr('source')
if wname is None and wsource is None:
print "widget has no name and no source!"
return
if wname:
#print "Widget name=", wname
visited_components.add(wname)
# get corresponding 'gui' object
try:
attributes = screen[wname].skinAttributes = [ ]
except:
raise SkinError("component with name '" + wname + "' was not found in skin of screen '" + name + "'!")
# assert screen[wname] is not Source
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',))
elif wsource:
# get corresponding source
#print "Widget source=", wsource
while True: # until we found a non-obsolete source
# parse our current "wsource", which might specifiy a "related screen" before the dot,
# for example to reference a parent, global or session-global screen.
scr = screen
# resolve all path components
path = wsource.split('.')
while len(path) > 1:
scr = screen.getRelatedScreen(path[0])
if scr is None:
#print wsource
#print name
raise SkinError("specified related screen '" + wsource + "' was not found in screen '" + name + "'!")
path = path[1:]
# resolve the source.
source = scr.get(path[0])
if isinstance(source, ObsoleteSource):
# however, if we found an "obsolete source", issue warning, and resolve the real source.
print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source)
print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % (source.removal_date)
if source.description:
print source.description
wsource = source.new_source
else:
# otherwise, use that source.
break
if source is None:
raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!")
wrender = get_attr('render')
if not wrender:
raise SkinError("you must define a renderer with render= for source '%s'" % (wsource))
for converter in widget.findall("convert"):
ctype = converter.get('type')
assert ctype, "'convert'-tag needs a 'type'-attribute"
#print "Converter:", ctype
try:
parms = converter.text.strip()
except:
parms = ""
#print "Params:", parms
converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype)
c = None
for i in source.downstream_elements:
if isinstance(i, converter_class) and i.converter_arguments == parms:
c = i
if c is None:
c = converter_class(parms)
c.connect(source)
source = c
renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender)
renderer = renderer_class() # instantiate renderer
renderer.connect(source) # connect to source
attributes = renderer.skinAttributes = [ ]
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source'))
screen.renderer.append(renderer)
def process_applet(widget, context):
try:
codeText = widget.text.strip()
widgetType = widget.attrib.get('type')
code = compile(codeText, "skin applet", "exec")
except Exception, ex:
raise SkinError("applet failed to compile: " + str(ex))
if widgetType == "onLayoutFinish":
screen.onLayoutFinish.append(code)
else:
raise SkinError("applet type '%s' unknown!" % widgetType)
def process_elabel(widget, context):
w = additionalWidget()
w.widget = eLabel
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_epixmap(widget, context):
w = additionalWidget()
w.widget = ePixmap
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_screen(widget, context):
for w in widget.getchildren():
conditional = w.attrib.get('conditional')
if conditional and not [i for i in conditional.split(",") if i in screen.keys()]:
continue
p = processors.get(w.tag, process_none)
try:
p(w, context)
except SkinError, e:
print "[Skin] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e
def process_panel(widget, context):
n = widget.attrib.get('name')
if n:
try:
s = dom_screens[n]
except KeyError:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
else:
process_screen(s[0], context)
layout = widget.attrib.get('layout')
if layout == 'stack':
cc = SkinContextStack
else:
cc = SkinContext
try:
c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'))
except Exception, ex:
raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) )
process_screen(widget, c)
processors = {
None: process_none,
"widget": process_widget,
"applet": process_applet,
"eLabel": process_elabel,
"ePixmap": process_epixmap,
"panel": process_panel
}
try:
context.x = 0 # reset offsets, all components are relative to screen
context.y = 0 # coordinates.
process_screen(myscreen, context)
except Exception, e:
print "[Skin] SKIN ERROR in %s:" % name, e
from Components.GUIComponent import GUIComponent
nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)]
assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components))
# This may look pointless, but it unbinds 'screen' from the nested scope. A better
# solution is to avoid the nested scope above and use the context object to pass
# things around.
screen = None
visited_components = None
|
paralab/Dendro4
|
python_scripts_sc16/csv_mat.py
|
# @author: Milinda Fernando
# School of Computing, University of Utah.
# generate all the slurm jobs for the sc16 poster, energy measurements,
import argparse
from subprocess import call
import os
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='slurm_pbs')
parser.add_argument('-p','--prefix', help='file prefix that you need to merge')
parser.add_argument('-s','--suffix',help='suffix of the file')
parser.add_argument('-n','--n',help='number of flies that you need to merge')
args=parser.parse_args()
tol_list=['0.000010','0.000100','0.001000','0.010000','0.100000','0.200000','0.300000','0.400000','0.500000']
#sendCommMap_M_tol_0.010000_npes_4096_pts_100000_ps_4096mat.csv
for tol in tol_list:
inFName=args.prefix+tol+args.suffix+'_'+args.n+'mat'+'.csv'
outFName=args.prefix+tol+args.suffix+'_'+args.n+'mat_comma'+'.csv'
fin=open(inFName,'r')
fout=open(outFName,'w')
for line in fin:
line=line.strip()
line=line.replace('\t',',')
fout.write(line+'\n')
fin.close()
fout.close()
print 'OK'
|
s-pearce/glider-utilities
|
glider_utils/parsers/dbd_parsers.py
|
#!/usr/bin/env python
"""
@package glider_utils
@file glider_utils.py
@author Stuart Pearce & Chris Wingard
@brief Module containing glider utiliities
"""
__author__ = 'Stuart Pearce & Chris Wingard'
__license__ = 'Apache 2.0'
import numpy as np
import warnings
#import pdb
import re
#import pygsw.vectors as gsw
class DbaDataParser(object):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def __init__(self, filename):
self._fid = open(filename, 'r')
self.hdr_dict = {}
self.data_dict = {}
self._read_header()
self._read_data()
self._fid.close()
def _read_header(self):
"""
Read in the self describing header lines of an ASCII glider data
file.
"""
# There are usually 14 header lines, start with 14,
# and check the 'num_ascii_tags' line.
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
#pdb.set_trace()
hdr_line = 1
while hdr_line <= num_hdr_lines:
line = self._fid.readline()
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
if 'num_ascii_tags' in key:
num_hdr_lines = int(value)
self.hdr_dict[key] = value
hdr_line += 1
def _read_data(self):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
#pdb.set_trace()
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data, dtype=np.float) # NOTE: this is an array of strings
# warn if # of described data rows != to amount read in.
num_columns = int(self.hdr_dict['sensors_per_cycle'])
if num_columns != data_array.shape[1]:
warnings.warn('Glider data file does not have the same' +
'number of columns as described in header.\n' +
'described %d, actual %d' % (num_columns,
data_array.shape[1])
)
# extract data to dictionary
for ii in range(num_columns):
units = column_type[ii]
data_col = data_array[:, ii]
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': units,
'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_col
}
# change ISO lat or lon format to decimal degrees
if units == 'lat' or units == 'lon':
min_d100, deg = np.modf(data_col/100.)
deg_col = deg + (min_d100*100.)/60.
self.data_dict[column_labels[ii]]['Data_deg'] = deg_col
self.data_keys = column_labels
class DataVizDataParser(DbaDataParser):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def _read_header(self):
pass
def _read_data(self):
"""
Read in the column labels, data type/units, and the data from an Data Visualizer data file.
"""
filename_hdr = self._fid.readline()
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
#column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data) # NOTE: can't make floats because of lat & lon
num_columns = len(column_labels)
# extract data to dictionary
for ii in range(num_columns):
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': column_type[ii],
#'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_array[:, ii]
}
self.data_keys = column_labels
class GliderData(dict):
""" An object specifically to store Slocum glider data.
"""
def __init__():
dict.__init__
|
jfroco/atari800-rpi
|
atari5200.py
|
#!/usr/bin/python
import os, struct, array
from fcntl import ioctl
SDL_JOY_0_SELECT = 8
SDL_JOY_0_START = 9
SDL_JOY_0_TRIGGER1 = 0
SDL_JOY_0_TRIGGER2 = 1
SDL_JOY_0_ASTERISK = 2
SDL_JOY_0_HASH = 3
SDL_JOY_0_SECOND_AXIS = 2
# Iterate over the joystick devices.
# print('Available devices:')
devices = sorted(os.listdir('/dev/input'))
joysticks = []
for fn in devices:
if fn.startswith('js'):
# print(' /dev/input/%s' % fn)
joysticks.append("/dev/input/%s" % fn)
joysticks = sorted(joysticks)
print "First joystick is %s" % joysticks[0]
# Open the joystick device.
fn = joysticks[0]
# print('Opening %s...' % fn)
jsdev = open(fn, 'rb')
buf = array.array('c', ['\0'] * 64)
ioctl(jsdev, 0x80006a13 + (0x10000 * len(buf)), buf) # JSIOCGNAME(len)
js_name = ("%s" % buf.tostring()).partition(b'\0')[0]
# print('Device name: %s' % js_name)
jsdev.close()
js_cfg = "/opt/retropie/configs/all/retroarch-joypads/%s.cfg" % js_name.replace(" ", "")
print "Getting Retroarch configuration for %s" % js_cfg
# print(js_cfg)
f = open("%s" % js_cfg, "r")
content = f.read()
lines = content.split("\n")
for line in lines:
if line:
p = line.replace(" ", "").split("=")
# print "Processing %s" % p[0]
if p[0] == "input_select_btn":
SDL_JOY_0_SELECT = p[1].replace('"', '')
elif p[0] == "input_start_btn":
SDL_JOY_0_START = p[1].replace('"', '')
elif p[0] == "input_a_btn":
SDL_JOY_0_TRIGGER1 = p[1].replace('"', '')
elif p[0] == "input_b_btn":
SDL_JOY_0_TRIGGER2 = p[1].replace('"', '')
elif p[0] == "input_x_btn":
SDL_JOY_0_ASTERISK = p[1].replace('"', '')
elif p[0] == "input_y_btn":
SDL_JOY_0_HASH = p[1].replace('"', '')
elif p[0] == "input_r_x_minus_axis":
SDL_JOY_0_SECOND_AXIS = p[1].replace('"', '').replace("-", "")
f.close()
atari800_cfg = "/home/pi/.atari800.cfg"
print "Updating configuration in %s with" % atari800_cfg
print "SDL_JOY_0_SELECT=%s" % SDL_JOY_0_SELECT
print "SDL_JOY_0_START=%s" % SDL_JOY_0_START
print "SDL_JOY_0_TRIGGER1=%s" % SDL_JOY_0_TRIGGER1
print "SDL_JOY_0_TRIGGER2=%s" % SDL_JOY_0_TRIGGER2
print "SDL_JOY_0_ASTERISK=%s" % SDL_JOY_0_ASTERISK
print "SDL_JOY_0_HASH=%s" % SDL_JOY_0_HASH
print "SDL_JOY_0_SECOND_AXIS=%s" % SDL_JOY_0_SECOND_AXIS
f = open("%s" % atari800_cfg, "r")
content = f.read()
f.close()
new_data = ""
lines = content.split("\n")
for line in lines:
if line.startswith("SDL_JOY_0_SELECT"):
line = "SDL_JOY_0_SELECT=%s" % SDL_JOY_0_SELECT
elif line.startswith("SDL_JOY_0_START"):
line = "SDL_JOY_0_START=%s" % SDL_JOY_0_START
elif line.startswith("SDL_JOY_0_TRIGGER1"):
line = "SDL_JOY_0_TRIGGER1=%s" % SDL_JOY_0_TRIGGER1
elif line.startswith("SDL_JOY_0_TRIGGER2"):
line = "SDL_JOY_0_TRIGGER2=%s" % SDL_JOY_0_TRIGGER2
elif line.startswith("SDL_JOY_0_ASTERISK"):
line = "SDL_JOY_0_ASTERISK=%s" % SDL_JOY_0_ASTERISK
elif line.startswith("SDL_JOY_0_HASH"):
line = "SDL_JOY_0_HASH=%s" % SDL_JOY_0_HASH
elif line.startswith("SDL_JOY_0_SECOND_AXIS"):
line = "SDL_JOY_0_SECOND_AXIS=%s" % SDL_JOY_0_SECOND_AXIS
new_data += line + "\n"
# print new_data
f = open("%s" % atari800_cfg, 'w')
f.write(new_data)
f.close()
|
sdgdsffdsfff/jumpserver
|
apps/perms/api/user_permission/common.py
|
# -*- coding: utf-8 -*-
#
import uuid
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView, Response
from rest_framework.generics import (
ListAPIView, get_object_or_404, RetrieveAPIView
)
from common.permissions import IsOrgAdminOrAppUser, IsOrgAdmin
from common.utils import get_logger
from ...utils import (
AssetPermissionUtilV2
)
from ...hands import User, Asset, SystemUser
from ... import serializers
from ...models import Action
from .mixin import UserAssetPermissionMixin
logger = get_logger(__name__)
__all__ = [
'RefreshAssetPermissionCacheApi',
'UserGrantedAssetSystemUsersApi',
'ValidateUserAssetPermissionApi',
'GetUserAssetPermissionActionsApi',
]
class GetUserAssetPermissionActionsApi(UserAssetPermissionMixin,
RetrieveAPIView):
permission_classes = (IsOrgAdminOrAppUser,)
serializer_class = serializers.ActionsSerializer
def get_obj(self):
user_id = self.request.query_params.get('user_id', '')
user = get_object_or_404(User, id=user_id)
return user
def get_object(self):
asset_id = self.request.query_params.get('asset_id', '')
system_id = self.request.query_params.get('system_user_id', '')
try:
asset_id = uuid.UUID(asset_id)
system_id = uuid.UUID(system_id)
except ValueError:
return Response({'msg': False}, status=403)
asset = get_object_or_404(Asset, id=asset_id)
system_user = get_object_or_404(SystemUser, id=system_id)
system_users_actions = self.util.get_asset_system_users_with_actions(asset)
actions = system_users_actions.get(system_user)
return {"actions": actions}
class ValidateUserAssetPermissionApi(UserAssetPermissionMixin, APIView):
permission_classes = (IsOrgAdminOrAppUser,)
def get_obj(self):
user_id = self.request.query_params.get('user_id', '')
user = get_object_or_404(User, id=user_id)
return user
def get(self, request, *args, **kwargs):
asset_id = request.query_params.get('asset_id', '')
system_id = request.query_params.get('system_user_id', '')
action_name = request.query_params.get('action_name', '')
try:
asset_id = uuid.UUID(asset_id)
system_id = uuid.UUID(system_id)
except ValueError:
return Response({'msg': False}, status=403)
asset = get_object_or_404(Asset, id=asset_id)
system_user = get_object_or_404(SystemUser, id=system_id)
system_users_actions = self.util.get_asset_system_users_with_actions(
asset)
actions = system_users_actions.get(system_user)
if action_name in Action.value_to_choices(actions):
return Response({'msg': True}, status=200)
return Response({'msg': False}, status=403)
class RefreshAssetPermissionCacheApi(RetrieveAPIView):
permission_classes = (IsOrgAdmin,)
def retrieve(self, request, *args, **kwargs):
AssetPermissionUtilV2.expire_all_user_tree_cache()
return Response({'msg': True}, status=200)
class UserGrantedAssetSystemUsersApi(UserAssetPermissionMixin, ListAPIView):
permission_classes = (IsOrgAdminOrAppUser,)
serializer_class = serializers.AssetSystemUserSerializer
only_fields = serializers.AssetSystemUserSerializer.Meta.only_fields
def get_queryset(self):
asset_id = self.kwargs.get('asset_id')
asset = get_object_or_404(Asset, id=asset_id)
system_users_with_actions = self.util.get_asset_system_users_with_actions(asset)
system_users = []
for system_user, actions in system_users_with_actions.items():
system_user.actions = actions
system_users.append(system_user)
system_users.sort(key=lambda x: x.priority)
return system_users
|
izapolsk/integration_tests
|
cfme/tests/automate/test_vmware_methods.py
|
"""This module contains tests that exercise the canned VMware Automate stuff."""
from textwrap import dedent
import fauxfactory
import pytest
from widgetastic.widget import View
from widgetastic_patternfly import Dropdown
from cfme import test_requirements
from cfme.common import BaseLoggedInPage
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
from cfme.utils.wait import wait_for
pytestmark = [
test_requirements.automate,
pytest.mark.meta(server_roles="+automate"),
pytest.mark.long_running,
pytest.mark.ignore_stream("upstream"),
pytest.mark.tier(3),
pytest.mark.provider(
[VMwareProvider], required_fields=[['provisioning', 'template']],
scope="module")
]
@pytest.fixture(scope="module")
def cls(domain):
original_class = domain.parent\
.instantiate(name='ManageIQ')\
.namespaces.instantiate(name='System')\
.classes.instantiate(name='Request')
original_class.copy_to(domain=domain)
return domain.namespaces.instantiate(name='System').classes.instantiate(name='Request')
@pytest.fixture(scope="module")
def testing_group(appliance):
group_desc = fauxfactory.gen_alphanumeric()
group = appliance.collections.button_groups.create(
text=group_desc,
hover=group_desc,
type=appliance.collections.button_groups.VM_INSTANCE
)
yield group
group.delete_if_exists()
@pytest.fixture(scope="function")
def testing_vm(setup_provider, provider):
collection = provider.appliance.provider_based_collection(provider)
try:
template_name = provider.data['templates']['full_template']['name']
except KeyError:
pytest.skip('Unable to identify full_template for provider: {}'.format(provider))
vm = collection.instantiate(
random_vm_name("ae-hd"),
provider,
template_name=template_name
)
try:
vm.create_on_provider(find_in_cfme=True, allow_skip="default")
yield vm
finally:
vm.cleanup_on_provider()
def test_vmware_vimapi_hotadd_disk(
appliance, request, testing_group, testing_vm, domain, cls):
"""Tests hot adding a disk to vmware vm. This test exercises the `VMware_HotAdd_Disk` method,
located in `/Integration/VMware/VimApi`
Polarion:
assignee: ghubale
initialEstimate: 1/8h
casecomponent: Automate
caseimportance: critical
tags: automate
testSteps:
1. It creates an instance in ``System/Request`` that can be accessible from eg. button
2. Then it creates a button, that refers to the ``VMware_HotAdd_Disk`` in ``Request``.
The button shall belong in the VM and instance button group.
3. After the button is created, it goes to a VM's summary page, clicks the button.
4. The test waits until the capacity of disks is raised.
Bugzilla:
1211627
1311221
"""
meth = cls.methods.create(
name=fauxfactory.gen_alpha(15, start="load_value_"),
script=dedent('''\
# Sets the capacity of the new disk.
$evm.root['size'] = 1 # GB
exit MIQ_OK
'''))
request.addfinalizer(meth.delete_if_exists)
# Instance that calls the method and is accessible from the button
instance = cls.instances.create(
name=fauxfactory.gen_alpha(23, start="VMware_HotAdd_Disk_"),
fields={
"meth4": {'value': meth.name}, # To get the value
"rel5": {'value': "/Integration/VMware/VimApi/VMware_HotAdd_Disk"},
},
)
request.addfinalizer(instance.delete_if_exists)
# Button that will invoke the dialog and action
button_name = fauxfactory.gen_alphanumeric()
button = testing_group.buttons.create(
text=button_name,
hover=button_name,
system="Request",
request=instance.name)
request.addfinalizer(button.delete_if_exists)
def _get_disk_capacity():
view = testing_vm.load_details(refresh=True)
return view.entities.summary('Datastore Allocation Summary').get_text_of('Total Allocation')
original_disk_capacity = _get_disk_capacity()
logger.info('Initial disk allocation: %s', original_disk_capacity)
class CustomButtonView(View):
custom_button = Dropdown(testing_group.text)
view = appliance.browser.create_view(CustomButtonView)
view.custom_button.item_select(button.text)
view = appliance.browser.create_view(BaseLoggedInPage)
view.flash.assert_no_error()
try:
wait_for(
lambda: _get_disk_capacity() > original_disk_capacity, num_sec=180, delay=5)
finally:
logger.info('End disk capacity: %s', _get_disk_capacity())
|
GeeteshKhatavkar/gh0st_kernel_samsung_royxx
|
arm-2010.09/arm-none-eabi/lib/armv6-m/libstdc++.a-gdb.py
|
# -*- python -*-
# Copyright (C) 2009 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/opt/codesourcery/arm-none-eabi/share/gcc-4.5.1/python'
libdir = '/opt/codesourcery/arm-none-eabi/lib/armv6-m'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir in sys.path:
sys.path.insert(0, dir)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
|
Kenneth-Posey/kens-old-projects
|
smokin-goldshop/handler/vip.py
|
import urllib
from models.vipsubscriber import VipSubscriber
from base import BaseHandler
class Vip(BaseHandler):
LOCATION = "../views/vip.html"
def GetContext(self):
tContext = {}
tVipList = []
tVipKey = urllib.unquote(self.request.get('key'))
if(tVipKey != None and len(tVipKey) > 0):
tVip = VipSubscriber.get(tVipKey)
tContext['tVip'] = tVip
return tContext
def PostContext(self):
tContext = {}
tVip = VipSubscriber()
tVipForumName = urllib.unquote(self.request.get('forumname'))
tVipKey = urllib.unquote(self.request.get('key'))
if(tVipKey != None and len(tVipKey) > 0):
tVip = VipSubscriber.get(tVipKey)
tContext['tVip'] = tVip
if(tVipForumName != None and len(tVipForumName) > 0):
tVip.subscriberForumName = tVipForumName
tVip.put()
return tContext
|
botswana-harvard/bcvp
|
bcvp/bcvp_subject/admin/subject_locator_admin.py
|
from django.contrib import admin
from edc_registration.models import RegisteredSubject
from edc_locator.admin import BaseLocatorModelAdmin
from ..forms import SubjectLocatorForm
from ..models import SubjectLocator
class SubjectLocatorAdmin(BaseLocatorModelAdmin):
form = SubjectLocatorForm
fields = (
'registered_subject',
'report_datetime',
'date_signed',
'mail_address',
'home_visit_permission',
'physical_address',
'may_follow_up',
'subject_cell',
'subject_cell_alt',
'subject_phone',
'subject_phone_alt',
'may_call_work',
'subject_work_place',
'subject_work_phone',
'may_contact_someone',
'contact_name',
'contact_rel',
'contact_physical_address',
'contact_cell',
'contact_phone',
'successful_mode_of_contact')
list_display = ('may_follow_up', 'may_call_work')
list_filter = ('may_follow_up', 'may_call_work')
search_fields = (
'registered_subject__subject_identifier', 'subject_cell', 'subject_cell_alt',
'subject_phone', 'subject_phone_alt', 'subject_work_place', 'subject_work_phone')
radio_fields = {"home_visit_permission": admin.VERTICAL,
"may_follow_up": admin.VERTICAL,
"may_call_work": admin.VERTICAL,
"may_contact_someone": admin.VERTICAL,
'successful_mode_of_contact': admin.VERTICAL}
actions = [] # do not allow export to CSV
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "registered_subject":
kwargs["queryset"] = RegisteredSubject.objects.filter(id__exact=request.GET.get('registered_subject', 0))
return super(SubjectLocatorAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(SubjectLocator, SubjectLocatorAdmin)
|
nagisa/Feeds
|
gdist/gschemas.py
|
import glob
import os
from distutils.dep_util import newer
from distutils.core import Command
from distutils.spawn import find_executable
from distutils.util import change_root
class build_gschemas(Command):
"""build message catalog files
Build message catalog (.mo) files from .po files using xgettext
and intltool. These are placed directly in the build tree.
"""
description = "build gschemas used for dconf"
user_options = []
build_base = None
def initialize_options(self):
pass
def finalize_options(self):
self.gschemas_directory = self.distribution.gschemas
self.set_undefined_options('build', ('build_base', 'build_base'))
def run(self):
if find_executable("glib-compile-schemas") is None:
raise SystemExit("Error: 'glib-compile-schemas' not found.")
basepath = os.path.join(self.build_base, 'share', 'glib-2.0', 'schemas')
self.copy_tree(self.gschemas_directory, basepath)
class install_gschemas(Command):
"""install message catalog files
Copy compiled message catalog files into their installation
directory, $prefix/share/locale/$lang/LC_MESSAGES/$package.mo.
"""
description = "install message catalog files"
user_options = []
skip_build = None
build_base = None
install_base = None
root = None
def initialize_options(self):
pass
def finalize_options(self):
self.set_undefined_options('build', ('build_base', 'build_base'))
self.set_undefined_options(
'install',
('root', 'root'),
('install_base', 'install_base'),
('skip_build', 'skip_build'))
def run(self):
if not self.skip_build:
self.run_command('build_gschemas')
src = os.path.join(self.build_base, 'share', 'glib-2.0', 'schemas')
dest = os.path.join(self.install_base, 'share', 'glib-2.0', 'schemas')
if self.root != None:
dest = change_root(self.root, dest)
self.copy_tree(src, dest)
self.spawn(['glib-compile-schemas', dest])
__all__ = ["build_gschemas", "install_gschemas"]
|
mypaint/mypaint
|
gui/device.py
|
# This file is part of MyPaint.
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2019 by the MyPaint Development Team.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Device specific settings and configuration"""
## Imports
from __future__ import division, print_function
import logging
import collections
import re
from lib.gettext import C_
from lib.gibindings import Gtk
from lib.gibindings import Gdk
from lib.gibindings import Pango
from lib.observable import event
import gui.application
import gui.mode
logger = logging.getLogger(__name__)
## Device prefs
# The per-device settings are stored in the prefs in a sub-dict whose
# string keys are formed from the device name and enough extra
# information to (hopefully) identify the device uniquely. Names are not
# unique, and IDs vary according to the order in which you plug devices
# in. So for now, our unique strings use a combination of the device's
# name, its source as presented by GDK, and the number of axes.
_PREFS_ROOT = "input.devices"
_PREFS_DEVICE_SUBKEY_FMT = "{name}:{source}:{num_axes}"
## Device type strings
_DEVICE_TYPE_STRING = {
Gdk.InputSource.CURSOR: C_(
"prefs: device's type label",
"Cursor/puck",
),
Gdk.InputSource.ERASER: C_(
"prefs: device's type label",
"Eraser",
),
Gdk.InputSource.KEYBOARD: C_(
"prefs: device's type label",
"Keyboard",
),
Gdk.InputSource.MOUSE: C_(
"prefs: device's type label",
"Mouse",
),
Gdk.InputSource.PEN: C_(
"prefs: device's type label",
"Pen",
),
Gdk.InputSource.TOUCHPAD: C_(
"prefs: device's type label",
"Touchpad",
),
Gdk.InputSource.TOUCHSCREEN: C_(
"prefs: device's type label",
"Touchscreen",
),
}
## Settings consts and classes
class AllowedUsage:
"""Consts describing how a device may interact with the canvas"""
ANY = "any" #: Device can be used for any tasks.
NOPAINT = "nopaint" #: No direct painting, but can manipulate objects.
NAVONLY = "navonly" #: Device can only be used for navigation.
IGNORED = "ignored" #: Device cannot interact with the canvas at all.
VALUES = (ANY, IGNORED, NOPAINT, NAVONLY)
DISPLAY_STRING = {
IGNORED: C_(
"device settings: allowed usage",
u"Ignore",
),
ANY: C_(
"device settings: allowed usage",
u"Any Task",
),
NOPAINT: C_(
"device settings: allowed usage",
u"Non-painting tasks",
),
NAVONLY: C_(
"device settings: allowed usage",
u"Navigation only",
),
}
BEHAVIOR_MASK = {
ANY: gui.mode.Behavior.ALL,
IGNORED: gui.mode.Behavior.NONE,
NOPAINT: gui.mode.Behavior.NON_PAINTING,
NAVONLY: gui.mode.Behavior.CHANGE_VIEW,
}
class ScrollAction:
"""Consts describing how a device's scroll events should be used.
The user can assign one of these values to a device to configure
whether they'd prefer panning or scrolling for unmodified scroll
events. This setting can be queried via the device monitor.
"""
ZOOM = "zoom" #: Alter the canvas scaling
PAN = "pan" #: Pan across the canvas
VALUES = (ZOOM, PAN)
DISPLAY_STRING = {
ZOOM: C_("device settings: unmodified scroll action", u"Zoom"),
PAN: C_("device settings: unmodified scroll action", u"Pan"),
}
class Settings (object):
"""A device's settings"""
DEFAULT_USAGE = AllowedUsage.VALUES[0]
DEFAULT_SCROLL = ScrollAction.VALUES[0]
def __init__(self, prefs, usage=DEFAULT_USAGE, scroll=DEFAULT_SCROLL):
super(Settings, self).__init__()
self._usage = self.DEFAULT_USAGE
self._update_usage_mask()
self._scroll = self.DEFAULT_SCROLL
self._prefs = prefs
self._load_from_prefs()
@property
def usage(self):
return self._usage
@usage.setter
def usage(self, value):
if value not in AllowedUsage.VALUES:
raise ValueError("Unrecognized usage value")
self._usage = value
self._update_usage_mask()
self._save_to_prefs()
@property
def usage_mask(self):
return self._usage_mask
@property
def scroll(self):
return self._scroll
@scroll.setter
def scroll(self, value):
if value not in ScrollAction.VALUES:
raise ValueError("Unrecognized scroll value")
self._scroll = value
self._save_to_prefs()
def _load_from_prefs(self):
usage = self._prefs.get("usage", self.DEFAULT_USAGE)
if usage not in AllowedUsage.VALUES:
usage = self.DEFAULT_USAGE
self._usage = usage
scroll = self._prefs.get("scroll", self.DEFAULT_SCROLL)
if scroll not in ScrollAction.VALUES:
scroll = self.DEFAULT_SCROLL
self._scroll = scroll
self._update_usage_mask()
def _save_to_prefs(self):
self._prefs.update({
"usage": self._usage,
"scroll": self._scroll,
})
def _update_usage_mask(self):
self._usage_mask = AllowedUsage.BEHAVIOR_MASK[self._usage]
## Main class defs
class Monitor (object):
"""Monitors device use & plugging, and manages their configuration
An instance resides in the main application. It is responsible for
monitoring known devices, determining their characteristics, and
storing their settings. Per-device settings are stored in the main
application preferences.
"""
def __init__(self, app):
"""Initializes, assigning initial input device uses
:param app: the owning Application instance.
:type app: gui.application.Application
"""
super(Monitor, self).__init__()
self._app = app
if app is not None:
self._prefs = app.preferences
else:
self._prefs = {}
if _PREFS_ROOT not in self._prefs:
self._prefs[_PREFS_ROOT] = {}
# Transient device information
self._device_settings = collections.OrderedDict() # {dev: settings}
self._last_event_device = None
self._last_pen_device = None
disp = Gdk.Display.get_default()
mgr = disp.get_device_manager()
mgr.connect("device-added", self._device_added_cb)
mgr.connect("device-removed", self._device_removed_cb)
self._device_manager = mgr
for physical_device in mgr.list_devices(Gdk.DeviceType.SLAVE):
self._init_device_settings(physical_device)
## Devices list
def get_device_settings(self, device):
"""Gets the settings for a device
:param Gdk.Device device: a physical ("slave") device
:returns: A settings object which can be manipulated, or None
:rtype: Settings
Changes to the returned object made via its API are saved to the
user preferences immediately.
If the device is a keyboard, or is otherwise unsuitable as a
pointing device, None is returned instead. The caller needs to
check this case.
"""
return (self._device_settings.get(device)
or self._init_device_settings(device))
def _init_device_settings(self, device):
"""Ensures that the device settings are loaded for a device"""
source = device.get_source()
if source == Gdk.InputSource.KEYBOARD:
return
num_axes = device.get_n_axes()
if num_axes < 2:
return
settings = self._device_settings.get(device)
if not settings:
try:
vendor_id = device.get_vendor_id()
product_id = device.get_product_id()
except AttributeError:
# New in GDK 3.16
vendor_id = "?"
product_id = "?"
logger.info(
"New device %r"
" (%s, axes:%d, class=%s, vendor=%r, product=%r)",
device.get_name(),
source.value_name,
num_axes,
device.__class__.__name__,
vendor_id,
product_id,
)
dev_prefs_key = _device_prefs_key(device)
dev_prefs = self._prefs[_PREFS_ROOT].setdefault(dev_prefs_key, {})
settings = Settings(dev_prefs)
self._device_settings[device] = settings
self.devices_updated()
assert settings is not None
return settings
def _device_added_cb(self, mgr, device):
"""Informs that a device has been plugged in"""
logger.debug("device-added %r", device.get_name())
self._init_device_settings(device)
def _device_removed_cb(self, mgr, device):
"""Informs that a device has been unplugged"""
logger.debug("device-removed %r", device.get_name())
self._device_settings.pop(device, None)
self.devices_updated()
@event
def devices_updated(self):
"""Event: the devices list was changed"""
def get_devices(self):
"""Yields devices and their settings, for UI stuff
:rtype: iterator
:returns: ultimately a sequence of (Gdk.Device, Settings) pairs
"""
for device, settings in self._device_settings.items():
yield (device, settings)
## Current device
@event
def current_device_changed(self, old_device, new_device):
"""Event: the current device has changed
:param Gdk.Device old_device: Previous device used
:param Gdk.Device new_device: New device used
"""
def device_used(self, device):
"""Informs about a device being used, for use by controllers
:param Gdk.Device device: the device being used
:returns: whether the device changed
If the device has changed, this method then notifies interested
parties via the device_changed observable @event.
This method returns True if the device was the same as the previous
device, and False if it has changed.
"""
if not self.get_device_settings(device):
return False
if device == self._last_event_device:
return True
self.current_device_changed(self._last_event_device, device)
old_device = self._last_event_device
new_device = device
self._last_event_device = device
# small problem with this code: it doesn't work well with brushes that
# have (eraser not in [1.0, 0.0])
new_device.name = new_device.props.name
new_device.source = new_device.props.input_source
logger.debug(
"Device change: name=%r source=%s",
new_device.name, new_device.source.value_name,
)
# When editing brush settings, it is often more convenient to use the
# mouse. Because of this, we don't restore brushsettings when switching
# to/from the mouse. We act as if the mouse was identical to the last
# active pen device.
if (new_device.source == Gdk.InputSource.MOUSE and
self._last_pen_device):
new_device = self._last_pen_device
if new_device.source == Gdk.InputSource.PEN:
self._last_pen_device = new_device
if (old_device and old_device.source == Gdk.InputSource.MOUSE and
self._last_pen_device):
old_device = self._last_pen_device
bm = self._app.brushmanager
if old_device:
# Clone for saving
old_brush = bm.clone_selected_brush(name=None)
bm.store_brush_for_device(old_device.name, old_brush)
if new_device.source == Gdk.InputSource.MOUSE:
# Avoid fouling up unrelated devbrushes at stroke end
self._prefs.pop('devbrush.last_used', None)
else:
# Select the brush and update the UI.
# Use a sane default if there's nothing associated
# with the device yet.
brush = bm.fetch_brush_for_device(new_device.name)
if brush is None:
if device_is_eraser(new_device):
brush = bm.get_default_eraser()
else:
brush = bm.get_default_brush()
self._prefs['devbrush.last_used'] = new_device.name
bm.select_brush(brush)
class SettingsEditor (Gtk.Grid):
"""Per-device settings editor"""
## Class consts
_USAGE_CONFIG_COL = 0
_USAGE_STRING_COL = 1
_SCROLL_CONFIG_COL = 0
_SCROLL_STRING_COL = 1
__gtype_name__ = "MyPaintDeviceSettingsEditor"
## Initialization
def __init__(self, monitor=None):
"""Initialize
:param Monitor monitor: monitor instance (for testing)
By default, the central app's `device_monitor` is used to permit
parameterless construction.
"""
super(SettingsEditor, self).__init__()
if monitor is None:
app = gui.application.get_app()
monitor = app.device_monitor
self._monitor = monitor
self._devices_store = Gtk.ListStore(object)
self._devices_view = Gtk.TreeView(model=self._devices_store)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is the device's name
"Device",
))
col.set_min_width(200)
col.set_expand(True)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
cell.set_property("ellipsize", Pango.EllipsizeMode.MIDDLE)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_name_datafunc)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is the number of axes (an integer)
"Axes",
))
col.set_min_width(30)
col.set_resizable(True)
col.set_expand(False)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_axes_datafunc)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column shows type labels ("Touchscreen", "Pen" etc.)
"Type",
))
col.set_min_width(120)
col.set_resizable(True)
col.set_expand(False)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_type_datafunc)
# Usage config value => string store (dropdowns)
store = Gtk.ListStore(str, str)
for conf_val in AllowedUsage.VALUES:
string = AllowedUsage.DISPLAY_STRING[conf_val]
store.append([conf_val, string])
self._usage_store = store
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is a dropdown allowing the allowed
# TRANSLATORS: tasks for the row's device to be configured.
u"Use for…",
))
col.set_min_width(100)
col.set_resizable(True)
col.set_expand(False)
self._devices_view.append_column(col)
cell = Gtk.CellRendererCombo()
cell.set_property("model", self._usage_store)
cell.set_property("text-column", self._USAGE_STRING_COL)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.set_property("has-entry", False)
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
cell.connect("changed", self._usage_cell_changed_cb)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_usage_datafunc)
# Scroll action config value => string store (dropdowns)
store = Gtk.ListStore(str, str)
for conf_val in ScrollAction.VALUES:
string = ScrollAction.DISPLAY_STRING[conf_val]
store.append([conf_val, string])
self._scroll_store = store
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is a dropdown for how the device's
# TRANSLATORS: scroll wheel or scroll-gesture events are to be
# TRANSLATORS: interpreted normally.
u"Scroll…",
))
col.set_min_width(100)
col.set_resizable(True)
col.set_expand(False)
self._devices_view.append_column(col)
cell = Gtk.CellRendererCombo()
cell.set_property("model", self._scroll_store)
cell.set_property("text-column", self._USAGE_STRING_COL)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.set_property("has-entry", False)
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
cell.connect("changed", self._scroll_cell_changed_cb)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_scroll_datafunc)
# Pretty borders
view_scroll = Gtk.ScrolledWindow()
view_scroll.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
pol = Gtk.PolicyType.AUTOMATIC
view_scroll.set_policy(pol, pol)
view_scroll.add(self._devices_view)
view_scroll.set_hexpand(True)
view_scroll.set_vexpand(True)
self.attach(view_scroll, 0, 0, 1, 1)
self._update_devices_store()
self._monitor.devices_updated += self._update_devices_store
## Display and sort funcs
def _device_name_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
cell.set_property("text", device.get_name())
def _device_axes_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
n_axes = device.get_n_axes()
cell.set_property("text", "%d" % (n_axes,))
def _device_type_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
source = device.get_source()
text = _DEVICE_TYPE_STRING.get(source, source.value_nick)
cell.set_property("text", text)
def _device_usage_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
text = AllowedUsage.DISPLAY_STRING[settings.usage]
cell.set_property("text", text)
def _device_scroll_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
text = ScrollAction.DISPLAY_STRING[settings.scroll]
cell.set_property("text", text)
## Updates
def _usage_cell_changed_cb(self, combo, device_path_str,
usage_iter, *etc):
config = self._usage_store.get_value(
usage_iter,
self._USAGE_CONFIG_COL,
)
device_iter = self._devices_store.get_iter(device_path_str)
device = self._devices_store.get_value(device_iter, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
settings.usage = config
self._devices_view.columns_autosize()
def _scroll_cell_changed_cb(self, conf_combo, device_path_str,
conf_iter, *etc):
conf_store = self._scroll_store
conf_col = self._SCROLL_CONFIG_COL
conf_value = conf_store.get_value(conf_iter, conf_col)
device_store = self._devices_store
device_iter = device_store.get_iter(device_path_str)
device = device_store.get_value(device_iter, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
settings.scroll = conf_value
self._devices_view.columns_autosize()
def _update_devices_store(self, *_ignored):
"""Repopulates the displayed list"""
updated_list = list(self._monitor.get_devices())
updated_list_map = dict(updated_list)
paths_for_removal = []
devices_retained = set()
for row in self._devices_store:
device, = row
if device not in updated_list_map:
paths_for_removal.append(row.path)
continue
devices_retained.add(device)
for device, config in updated_list:
if device in devices_retained:
continue
self._devices_store.append([device])
for unwanted_row_path in reversed(paths_for_removal):
unwanted_row_iter = self._devices_store.get_iter(unwanted_row_path)
self._devices_store.remove(unwanted_row_iter)
self._devices_view.queue_draw()
## Helper funcs
def _device_prefs_key(device):
"""Returns the subkey to use in the app prefs for a device"""
source = device.get_source()
name = device.get_name()
n_axes = device.get_n_axes()
return u"%s:%s:%d" % (name, source.value_nick, n_axes)
def device_is_eraser(device):
"""Tests whether a device appears to be an eraser"""
if device is None:
return False
if device.get_source() == Gdk.InputSource.ERASER:
return True
if re.search(r'\<eraser\>', device.get_name(), re.I):
return True
return False
## Testing
def _test():
"""Interactive UI testing for SettingsEditor and Monitor"""
logging.basicConfig(level=logging.DEBUG)
win = Gtk.Window()
win.set_title("gui.device.SettingsEditor")
win.set_default_size(500, 400)
win.connect("destroy", Gtk.main_quit)
monitor = Monitor(app=None)
editor = SettingsEditor(monitor)
win.add(editor)
win.show_all()
Gtk.main()
print(monitor._prefs)
if __name__ == '__main__':
_test()
|
repotvsupertuga/tvsupertuga.repository
|
plugin.video.youtube/resources/lib/youtube_plugin/kodion/items/audio_item.py
|
__author__ = 'bromix'
from .base_item import BaseItem
class AudioItem(BaseItem):
def __init__(self, name, uri, image=u'', fanart=u''):
BaseItem.__init__(self, name, uri, image, fanart)
self._duration = None
self._track_number = None
self._year = None
self._genre = None
self._album = None
self._artist = None
self._title = name
self._rating = None
def set_rating(self, rating):
self._rating = float(rating)
def get_rating(self):
return self._rating
def set_title(self, title):
self._title = unicode(title)
def get_title(self):
return self._title
def set_artist_name(self, artist_name):
self._artist = unicode(artist_name)
def get_artist_name(self):
return self._artist
def set_album_name(self, album_name):
self._album = unicode(album_name)
def get_album_name(self):
return self._album
def set_genre(self, genre):
self._genre = unicode(genre)
def get_genre(self):
return self._genre
def set_year(self, year):
self._year = int(year)
def set_year_from_datetime(self, date_time):
self.set_year(date_time.year)
def get_year(self):
return self._year
def set_track_number(self, track_number):
self._track_number = int(track_number)
def get_track_number(self):
return self._track_number
def set_duration_from_milli_seconds(self, milli_seconds):
self.set_duration_from_seconds(int(milli_seconds) / 1000)
def set_duration_from_seconds(self, seconds):
self._duration = int(seconds)
def set_duration_from_minutes(self, minutes):
self.set_duration_from_seconds(int(minutes) * 60)
def get_duration(self):
return self._duration
|
MarioVilas/secondlife-experiments
|
SimProxy/extract_xml.py
|
import os
import types
from sllib.LLSD import LLSD
try:
os.makedirs('./httpcap')
except:
pass
data = open('httpcap.txt','r').read()
c = 0
btag = '<llsd>'
etag = '</llsd>'
##mbtag = '<key>message</key><string>'
##metag = '</string>'
b = data.find(btag)
mnames = {}
while b >= 0:
e = data.find(etag, b) + len(etag)
xml = data[b:e]
## bm = xml.rfind(mbtag)
## em = xml.find(metag, bm)
## if bm >= 0 and em >= 0 and em >= bm:
## bm = bm + len(mbtag)
## m = xml[bm:em]
## mnames[m] = None
## else:
## m = 'Unknown'
ll = LLSD.fromstring(xml)
m = 'DATA'
if type(ll) == types.DictType and ll.has_key('events'):
## print ll
for msg in ll['events']:
m = msg['message']
## print m
mnames[m] = None
name = './httpcap/%s_%d.xml' % (m,c)
try:
open(name, 'w+').write(xml)
except:
print xml
raise
c += 1
b = data.find(btag, e)
print mnames.keys()
|
ryanmiao/libvirt-test-API
|
repos/virconn/cpu_stats.py
|
#!/usr/bin/env python
# test libvirt cpu stats
import libvirt
from libvirt import libvirtError
from src import sharedmod
from utils import utils
required_params = ('cpuNum',)
optional_params = {'conn': '', }
STATFILE = "/proc/stat"
GETCPUSTAT = "cat /proc/stat | grep cpu%s"
USR_POS = 1
NI_POS = 2
SYS_POS = 3
IDLE_POS = 4
IOWAIT_POS = 5
IRQ_POS = 6
SOFTIRQ_POS = 7
def compare_result(dest, src, delta, logger):
""" compare two integer results with delta bias
"""
if dest >= src - delta and dest <= src + delta:
return True
return False
def check_stat(cpu, stat, stat_type, logger):
""" check cpu stat for cpu[cpunum]
"""
delta = 0
if cpu == "-1":
cmd = GETCPUSTAT % " | head -1"
cpu = ""
else:
cmd = GETCPUSTAT % cpu
status, out = utils.exec_cmd(cmd, shell=True)
if status != 0:
logger.error("Exec %s fails" % cmd)
return False
logger.debug("get cpu%s stats: %s" % (cpu, out))
stats = out[0].split()
logger.debug("cpu stats: %s" % stats)
if stat_type == "kernel":
target_stat = int(stats[SYS_POS]) + int(stats[IRQ_POS]) + \
int(stats[SOFTIRQ_POS])
delta = 1
elif stat_type == "idle":
target_stat = int(stats[IDLE_POS])
delta = 10
elif stat_type == "user":
target_stat = int(stats[USR_POS]) + int(stats[NI_POS])
delta = 2
elif stat_type == "iowait":
target_stat = int(stats[IOWAIT_POS])
delta = 10
else:
logger.error("Unidentified type %s" % stat_type)
return False
if compare_result(stat, target_stat, delta, logger):
logger.info("%s stat check success" % stat_type)
else:
logger.error("%s stat check failed" % stat_type)
logger.error("%s stat is %d, should be %d" %
(stat_type, stat, target_stat))
return False
return True
def cpu_stats(params):
""" test libvirt cpu stats
"""
logger = params['logger']
cpunum = int(params['cpuNum'])
stat_types = ['kernel', 'idle', 'user', 'iowait']
try:
# get connection firstly.
# If conn is not specified, use conn from sharedmod
if 'conn' in params:
conn = libvirt.open(params['conn'])
else:
conn = sharedmod.libvirtobj['conn']
res = conn.getCPUStats(cpunum, 0)
for s in stat_types:
if not s in res:
logger.error("%s is not the key" % s)
return 1
if not check_stat(str(cpunum), res[s] / 10000000, s, logger):
return 1
except libvirtError, e:
logger.error("API error message: %s, error code is %s" %
e.message)
return 1
return 0
|
ShivamSarodia/ShivC
|
rules.py
|
"""
The symbols and rules for the CFG of C. I generated these myself by hand, so
they're probably not perfectly correct.
"""
from rules_obj import *
from lexer import *
import tokens
### Symbols ###
# Most symbols are either self-explanatory, or best understood by examining the
# rules below to see how they're used.
S = Symbol("S")
main_setup = Symbol("main_setup") #TODO: is this neccesary?
# `statments` is a buch of `statement`s
statements = Symbol("statements")
# `statement` is a single C statement, semicolon included
statement = Symbol("statement")
# a generic expression
E = Symbol("E")
declare_separator = Symbol("declare_separator")
declare_type = Symbol("declare_type")
declare_expression = Symbol("declare_expression");
arr_start = Symbol("arr_start")
arr_end = Symbol("arr_end")
arr_list = Symbol("arr_list")
if_start = Symbol("if_start");
if_statement = Symbol("if_statement");
else_statement = Symbol("else_statement");
while_start = Symbol("while_start")
while_statement = Symbol("while_statement")
for_start = Symbol("for_start")
for1 = Symbol("for1")
for2 = Symbol("for2")
for3 = Symbol("for3")
for_expr = Symbol("for_expr")
arg_start = Symbol("arg_start")
func_dec = Symbol("func_dec")
func_def = Symbol("func_def")
func_call_start = Symbol("func_call_start")
### Rules ###
# After adding a rule, make sure to add it to the rules list at the bottom!
# something that stands alone as a program, plus a function definition or
# declaration, can also stand alone as a program.
main_func_dec_cont = Rule(S, [S, func_dec])
main_func_def_cont = Rule(S, [S, func_def])
main_func_dec = Rule(S, [func_dec])
main_func_def = Rule(S, [func_def])
# make a `statements` symbol by extending another `statements` symbol
statements_cont = Rule(statements, [statements,
statement])
# make a single `statement` symbol into a `statements` symbol
statements_end = Rule(statements, [statement])
# return statement
return_form = Rule(statement, [tokens.return_command,
E,
tokens.semicolon])
# a print statement
# The print statement is not valid C. I added it for ease of use, however, as
# I do not forsee this compiler being able to inclue stdio.h anytime soon.
print_form = Rule(statement, [tokens.print_command,
E,
tokens.semicolon])
# a declaration of the form int;
useless_declaration = Rule(statement, [Token("type"), tokens.semicolon])
# a declaration of the form `int a;` or `int a, b = 0;`
real_declaration = Rule(statement, [declare_expression, tokens.semicolon])
# the type part of a declaration, along with any pointers on the first variable
declare_type_base = Rule(declare_type, [Token("type")])
declare_type_cont = Rule(declare_type, [declare_type, tokens.aster])
# used to separate declarations. all these are declare_separators:
# ,
# ,*
# , **
#
declare_separator_base = Rule(declare_separator, [tokens.comma])
declare_separator_cont = Rule(declare_separator, [declare_separator, tokens.aster])
# the base of a declaration, like `int hello` or `int* hello`.
base_declare = Rule(declare_expression, [declare_type, Token("name")])
# a non-array declaration with an assignment, like `int hello = 4` or `int* hello = &p`.
assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, E], 49)
# an array declaration with assignment, like `int hi[4] = {1, 2, 3, 4}`.
# Note--I imagine a better parser would catch things like `int hi = {1, 3}`.
# Mine, however, catches these errors at the code generation stage.
arr_assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, arr_list], 49)
# Converts things like `int a, b` into a fresh declare_expression to chain declarations
cont_declare = Rule(declare_expression, [declare_expression, declare_separator, Token("name")])
# Defines `int a[5]` as a valid declare expression
array_num_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
E,
tokens.close_sq_bracket])
# Defines `int a[]` as a valid declare expression
array_nonum_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
tokens.close_sq_bracket])
E_num = Rule(E, [Token("integer")])
E_parens = Rule(E, [tokens.open_paren,
E,
tokens.close_paren])
# Badly named--E_add can be binary addition or subtraction
E_add = Rule(E, [E,
Token("addop"),
E], 85)
E_mult = Rule(E, [E,
tokens.aster,
E], 90)
E_div = Rule(E, [E,
tokens.slash,
E], 90)
E_mod = Rule(E, [E,
tokens.percent,
E], 90)
E_boolean_and = Rule(E, [E,
tokens.logic_and,
E], 65)
E_boolean_or = Rule(E, [E,
tokens.logic_or,
E], 60)
E_eq_compare = Rule(E, [E,
Token("eq_compare"),
E], 70)
E_compare = Rule(E, [E,
Token("compare"),
E], 75)
# Again, badly named. E_neg can be either unary addition or subtraction
E_neg = Rule(E, [Token("addop"),
E], 95)
# Note this covers all of `a = 5`, `a *= 5`, `a /= 5`, etc.
# We give this rule a priority of 49, which is less than 50 (the priority) of
# the assignment symbols. This makes it right associative.
E_equal = Rule(E, [E,
Token("assignment"),
E], 49)
E_boolean_not = Rule(E, [tokens.logic_not, E], 95)
# Covers both a++ and a--
E_inc_after = Rule(E, [E,
Token("crement")], 100)
# Covers both ++a and --a
E_inc_before = Rule(E, [Token("crement"),
E], 95)
E_point = Rule(E, [tokens.aster, E], 95)
E_deref = Rule(E, [tokens.amper, E], 95)
# Calling a function like `f()`
E_func_noarg = Rule(E, [E, tokens.open_paren, tokens.close_paren])
# The start of a function call and first argument, like `f(1`
E_func_call_start = Rule(func_call_start, [E, tokens.open_paren, E], 0)
# Chaining more arguments onto the function call
E_func_call_cont = Rule(func_call_start, [func_call_start, tokens.comma, E], 0)
# Completing the function call
E_func_call_end = Rule(E, [func_call_start, tokens.close_paren])
# Array referencing, like `a[4]`
E_array = Rule(E, [E, tokens.open_sq_bracket, E, tokens.close_sq_bracket], 100)
E_var = Rule(E, [Token("name")])
E_form = Rule(statement, [E, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E in `if(E)`
if_start_form = Rule(if_start, [tokens.if_keyword,
tokens.open_paren])
# an if statement like `if(E) {}`
if_form_brackets = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
# a one line if statement like `if(E) a = 5;`
# it's OK to use "statements" here because statement -> statements immediately,
# so then this rule will apply right away
if_form_oneline = Rule(if_statement, [if_start,
E,
tokens.close_paren,
statements])
# the most common if form, like `if(E) {a = 5;}`
if_form_main = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Same things, but for else
else_form_brackets = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
tokens.close_bracket])
else_form_oneline = Rule(else_statement, [tokens.else_keyword,
statements])
else_form_main = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
statements,
tokens.close_bracket])
# We use a priority here so if an "else" follows an "if_statement", the parser
# won't apply the if_form_general rule (instead of the correct ifelse_form_general)
if_form_general = Rule(statement, [if_statement], 200)
ifelse_form_general = Rule(statement, [if_statement, else_statement])
break_form = Rule(statement, [tokens.break_keyword, tokens.semicolon])
cont_form = Rule(statement, [tokens.cont_keyword, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E
while_start_form = Rule(while_start, [tokens.while_keyword, tokens.open_paren])
# Same as if statement rules
while_form_brackets = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
while_form_oneline = Rule(statement, [while_start,
E,
tokens.close_paren,
statements])
while_form_main = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# for statements
for_start_form = Rule(for_start, [tokens.for_keyword, tokens.open_paren])
for1_form = Rule(for1, [for_start, statements])
# The `statements` here better have a tree of the form:
# statements -> statement -> E, semicolon
# A better parser would probably check this while parsing, but I check during
# code gen.
for2_form = Rule(for2, [for1, statements])
for_expr_form = Rule(for_expr, [for2, E, tokens.close_paren])
for_expr_form_empty = Rule(for_expr, [for2, tokens.close_paren])
# Same as if statement rules
for_form_empty = Rule(statement, [for_expr,
tokens.semicolon])
for_form_brackets = Rule(statement, [for_expr,
tokens.open_bracket,
tokens.close_bracket])
for_form_oneline = Rule(statement, [for_expr,
statements])
for_form_main = Rule(statement, [for_expr,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Array initializer with one element, like `{1}`
arr_list_one = Rule(arr_list, [tokens.open_bracket, E, tokens.close_bracket])
# Array initializer with no elements, like `{}`
arr_list_none = Rule(arr_list, [tokens.open_bracket, tokens.close_bracket])
# Start of array initializer and first element, like `{1,`
arr_list_start = Rule(arr_start, [tokens.open_bracket, E, tokens.comma])
# Contining array initalizer, like `{1, 2,`
arr_list_cont = Rule(arr_start, [arr_start, E, tokens.comma])
# Total array initializer, like `{1, 2, 3}`
arr_list_total = Rule(arr_list, [arr_start, arr_end])
# Array initializer end, like `3}`
arr_list_end = Rule(arr_end, [E, tokens.close_bracket])
# Argument list for defining/declaring functions
base_arg_form = Rule(arg_start, [declare_expression, # should have children [declare_type, name]
tokens.open_paren,
declare_expression])
cont_arg_form = Rule(arg_start, [arg_start,
tokens.comma,
declare_expression]) # should have kids [declare_type, name]
func_dec_form = Rule(func_dec, [arg_start, tokens.close_paren, tokens.semicolon])
func_def_form = Rule(func_def, [arg_start,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
noarg_func_dec_form = Rule(func_dec, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.semicolon])
noarg_func_def_form = Rule(func_def, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
semicolon_form = Rule(statement, [tokens.semicolon])
# List of all the rules to apply. Applied in the listed order.
# In general, try to list rules above in the same order as they're listed here.
rules = [main_func_def_cont,
main_func_dec_cont,
main_func_def,
main_func_dec,
statements_cont,
statements_end,
return_form,
print_form,
useless_declaration,
real_declaration,
declare_type_base,
declare_type_cont,
declare_separator_base,
declare_separator_cont,
base_declare,
assign_declare,
arr_assign_declare,
cont_declare,
array_num_declare,
array_nonum_declare,
E_num,
E_parens,
E_add,
E_mult,
E_div,
E_mod,
E_boolean_and,
E_boolean_or,
E_eq_compare,
E_compare,
E_neg,
E_equal,
E_boolean_not,
E_inc_after,
E_inc_before,
E_point,
E_deref,
E_func_noarg,
E_func_call_start,
E_func_call_cont,
E_func_call_end,
E_array,
E_var,
E_form,
if_start_form,
if_form_brackets,
if_form_oneline,
if_form_main,
if_form_general,
else_form_brackets,
else_form_oneline,
else_form_main,
ifelse_form_general,
break_form,
cont_form,
while_start_form,
while_form_brackets,
while_form_oneline,
while_form_main,
for_start_form,
for1_form,
for2_form,
for_expr_form,
for_expr_form_empty,
for_form_brackets,
for_form_oneline,
for_form_main,
arr_list_one,
arr_list_none,
arr_list_start,
arr_list_cont,
arr_list_total,
arr_list_end,
base_arg_form,
cont_arg_form,
func_dec_form,
func_def_form,
noarg_func_dec_form,
noarg_func_def_form,
semicolon_form]
|
jongyeob/swpy
|
swpy/backup/ace.py
|
'''
Created on 2014. 9. 26.
@author: jongyeob
'''
from __future__ import absolute_import
import sys
import logging
import re
from . import utils
from .utils import datetime as dt
from .utils import download as dl
DATA_DIR = 'data/'
LOG = logging.getLogger(__name__); LOG.setLevel(0)
PACKAGES = ''
INST_NAME = ['mag','swepam','sis','loc']
MAG_KEYS = ['datetime','status','bx','by','bz','bt','latitude','longitude']
SWEPAM_KEYS = ['datetime','status','density','speed','temperature']
SIS_KEYS = []
LOC_KEYS = []
INST_KEYS = [MAG_KEYS,SWEPAM_KEYS,SIS_KEYS,LOC_KEYS]
def initialize(config=Config()):
global DATA_DIR,PACKAGES
config.set_section(__name__)
config.load_ns('DATA_DIR',globals())
config.load_ns('PACKAGES',globals())
LOG = utils.get_logger()
for pkg in PACKAGES.split():
utils.import_all(pkg, globals())
def empty_data(instrument):
mag = {'datetime':[],'status':[],'bx':[],'by':[],'bz':[],'bt':[],'latitude':[],'longitude':[]}
swepam = {'datetime':[],'status':[],'density':[],'speed':[],'temperature':[]}
return [mag,swepam,None,None][INST_NAME.index(instrument)]
def check_instrument(data):
'''
Check data dictionary
:param dict data: Input data
:return: (list) INST_NAME
'''
inst = INST_NAME[:]
i = 0
for keys in INST_KEYS:
if len(keys) == 0:
inst.remove(INST_NAME[i])
for key in keys:
if data.has_key(key) == False:
inst.remove(INST_NAME[i])
break
i += 1
return inst
def load(start_date,instrument,end_date=''):
'''
Load files of instrument (mag,swepam) of ACE realtime, from start datetime to end datetime
:param string|datetime start_date: start date for searching
:param string|datetime end_date: end date for searching
:param string instrument: Instrument name [swepam,mag]
:return: dictionary for instrument
:rtype: dict
'''
start_dt = dt.parse(start_date)
end_dt = start_dt
if end_date is not None:
end_dt = dt.parse(end_date)
data_total = empty_data(instrument)
for t in dt.datetime_range(start_dt, end_dt, months=1):
localfile = local_path(dt.tuples(t,'date'), instrument)
try:
data = load_file(localfile,instrument)
except:
LOG.error("Data can not read - %s"%(localfile))
continue
for i in range(len(data['datetime'])):
if start_dt <= dt.parsing(data['datetime'][i]) <= end_dt:
for key in data_total.keys():
data_total[key].append(data[key][i])
return data_total
def load_file(filepath,instrument):
data = None
if(instrument == 'mag'):
data = load_mag(filepath)
elif(instrument == 'swepam'):
data = load_swepam(filepath)
return data
def load_mag(filepath):
'''
Load a file is magnetic parameters of 1hr averaged ACE realtime data
:param string filepath: local filepath
:return: (dict) mag data
'''
lines = []
with open(filepath) as f:
lines = f.readlines()
mag = empty_data('mag')
for line in lines:
###
# if mag['date'] is None:
# date = re.match(':Product: (\d+)_ace_(\S+)_1h.txt',line)
# if(date is not None):
# mag['date'] = date.group(1)[:4]+'-'+date.group(1)[-2:]
# continue
###
data = re.match('\A(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+\d+\s+\d+\s+(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+',line)
if(data is not None):
datetime_string = "%4s-%02s-%02s"%data.groups()[0:3] + " %2s:%2s:00"%(data.group(4)[0:2],data.group(4)[2:4])
mag['datetime'].append(datetime_string)
i = 5
for key in MAG_KEYS[1:]:
mag[key].append(data.group(i))
i = i + 1
return mag
def load_swepam(filepath):
'''
Load a file is solar wind parameters of 1hr averaged ACE realtime data
:param string filepath: local filepath
:return: (dict) swepam data
'''
lines = []
with open(filepath) as f:
lines = f.readlines()
swepam = empty_data('swepam')
for line in lines:
###
# if item['date'] is None:
# date = re.match(':Product: (\d+)_ace_(\S+)_1h.txt',line)
# if(date is not None):
# item['date'] = date.group(1)[:4]+'-'+date.group(1)[-2:]
# continue
###
data = re.match('\A(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+\d+\s+\d+\s+(\d+)\s+(\S+)\s+(\S+)\s+(\S+)',line)
if(data is not None):
datetime_string = "%4s-%2s-%2s"%data.groups()[0:3] + " %2s:%2s:00"%(data.group(4)[0:2],data.group(4)[2:4])
swepam['datetime'].append(datetime_string)
i = 5
for key in SWEPAM_KEYS[1:]:
swepam[key].append(data.group(i))
i = i + 1
return(swepam)
def local_path(date,inst):
'''
Return file path pattern string.
:param tuple date: date
:param string inst: instrument name
:return: file path
'''
yyyy,mm,_ = date
localfile = DATA_DIR + 'ace_rt1h/%04d/%4d%02d_ace_%s_1h.txt'%(yyyy,yyyy,mm,inst)
LOG.debug("local file : %s"%(localfile))
return localfile
def remote_path(date,inst):
host = 'http://www.swpc.noaa.gov'
loc = '/ftpdir/lists/ace2'
yyyy,mm,_ = date
return host + loc + '/%4d%02d_ace_%s_1h.txt'%(yyyy,mm,inst)
def download_file(date,inst,filepath='',overwrite=False):
'''
Download ACE Realtime 1h average data.
:param datetime date: Datetime
:param string inst: Instrument name
:return: Downloaded path
'''
f = remote_path(dt.tuples(date, 'date'), inst)
if filepath == '' :
filepath = local_path(date, inst)
LOG.debug("Download start : %s"%(f))
rv = dl.download_http_file(f, filepath,overwrite=overwrite)
return rv
def download(start_date,instrument,end_date=None,overwrite=False):
'''
Download files of instrument of ACE realtime, from start_datetime to end_datetime
:param string instrument: Instrument name [swepam,mag]
:param string|datetime start_date: start date for searching
:param string|datetime end_date: end date for searching
:return: (list) file list
'''
start_dt = dt.parse(start_date)
end_dt = start_dt
if end_date is not None:
end_dt = dt.parse(end_date)
for t in dt.datetime_range(start_dt, end_dt, months=1):
localfile = local_path(dt.tuples(t,'date'), instrument)
try:
rv = download_file(t, instrument, localfile,overwrite=overwrite)
except Exception as err:
LOG.error(str(err))
if rv == False:
LOG.error("Download failed : %s"%(localfile))
if __name__ == '__main__':
logging.basicConfig(level=0)
from swpy.backup import _data as da
start = (2014,01,01)
end = (2014,01,02)
print local_path(start, 'mag')
print local_path(start, 'mag')
download(start, 'mag',end_date = end)
download(start, 'swepam',end_date = end)
print load(start,'mag',end_date=end)
print load(start,'swepam',end_date=end)
|
facebookexperimental/eden
|
eden/scm/tests/test-fb-hgext-diff-since-last-submit-t.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
from __future__ import absolute_import
from testutil.dott import feature, sh, testtmp # noqa: F401
# Load extensions
(
sh % "cat"
<< r"""
[extensions]
arcconfig=$TESTDIR/../edenscm/hgext/extlib/phabricator/arcconfig.py
arcdiff=
"""
>> "$HGRCPATH"
)
# Diff with no revision
sh % "hg init repo"
sh % "cd repo"
sh % "touch foo"
sh % "hg add foo"
sh % "hg ci -qm 'No rev'"
sh % "hg diff --since-last-submit" == r"""
abort: local changeset is not associated with a differential revision
[255]"""
sh % "hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: local changeset is not associated with a differential revision
[255]"""
# Fake a diff
sh % "echo bleet" > "foo"
sh % "hg ci -qm 'Differential Revision: https://phabricator.fb.com/D1'"
sh % "hg diff --since-last-submit" == r"""
abort: no .arcconfig found
[255]"""
sh % "hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: no .arcconfig found
[255]"""
# Prep configuration
sh % "echo '{}'" > ".arcrc"
sh % 'echo \'{"config" : {"default" : "https://a.com/api"}, "hosts" : {"https://a.com/api/" : { "user" : "testuser", "oauth" : "garbage_cert"}}}\'' > ".arcconfig"
# Now progressively test the response handling for variations of missing data
sh % "cat" << r"""
[{}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
Error calling graphql: Unexpected graphql response format
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
Error calling graphql: Unexpected graphql response format
abort: unable to determine previous changeset hash
[255]"""
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"differential_diffs": {"count": 3},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit" == r"""
abort: unable to determine previous changeset hash
[255]"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == r"""
abort: unable to determine previous changeset hash
[255]"""
# This is the case when the diff is up to date with the current commit;
# there is no diff since what was landed.
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"latest_active_diff": {
"local_commit_info": {
"nodes": [
{"property_value": "{\"lolwut\": {\"time\": 0, \"commit\": \"2e6531b7dada2a3e5638e136de05f51e94a427f4\"}}"}
]
}
},
"differential_diffs": {"count": 1},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == "2e6531b7dada2a3e5638e136de05f51e94a427f4 Differential Revision: https://phabricator.fb.com/D1"
# This is the case when the diff points at our parent commit, we expect to
# see the bleet text show up. There's a fake hash that I've injected into
# the commit list returned from our mocked phabricator; it is present to
# assert that we order the commits consistently based on the time field.
sh % "cat" << r"""
[{"data": {"query": [{"results": {"nodes": [{
"number": 1,
"diff_status_name": "Needs Review",
"latest_active_diff": {
"local_commit_info": {
"nodes": [
{"property_value": "{\"lolwut\": {\"time\": 0, \"commit\": \"88dd5a13bf28b99853a24bddfc93d4c44e07c6bd\"}}"}
]
}
},
"differential_diffs": {"count": 1},
"is_landing": false,
"land_job_status": "NO_LAND_RUNNING",
"needs_final_review_status": "NOT_NEEDED",
"created_time": 123,
"updated_time": 222
}]}}]}}]
""" > "$TESTTMP/mockduit"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit --nodates" == r"""
diff -r 88dd5a13bf28 -r 2e6531b7dada foo
--- a/foo
+++ b/foo
@@ -0,0 +1,1 @@
+bleet"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(.)' -T '{node} {desc}\\n'" == "88dd5a13bf28b99853a24bddfc93d4c44e07c6bd No rev"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit-2o" == r"""
Phabricator rev: 88dd5a13bf28b99853a24bddfc93d4c44e07c6bd
Local rev: 2e6531b7dada2a3e5638e136de05f51e94a427f4 (.)
Changed: foo
| ...
| +bleet"""
# Make a new commit on top, and then use -r to look at the previous commit
sh % "echo other" > "foo"
sh % "hg commit -m 'Other commmit'"
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg diff --since-last-submit --nodates -r 2e6531b" == r"""
diff -r 88dd5a13bf28 -r 2e6531b7dada foo
--- a/foo
+++ b/foo
@@ -0,0 +1,1 @@
+bleet"""
sh % "'HG_ARC_CONDUIT_MOCK=$TESTTMP/mockduit' hg log -r 'lastsubmitted(2e6531b)' -T '{node} {desc}\\n'" == "88dd5a13bf28b99853a24bddfc93d4c44e07c6bd No rev"
|
mkoura/dump2polarion
|
dump2polarion/exporters/transform.py
|
"""Helper functions for transforming results."""
import hashlib
import logging
import os
import re
import urllib.parse
from typing import Optional
from docutils.core import publish_parts
from dump2polarion.exporters.verdicts import Verdicts
# pylint: disable=invalid-name
logger = logging.getLogger(__name__)
TEST_PARAM_RE = re.compile(r"\[.*\]")
def only_passed_and_wait(result):
"""Return PASS and WAIT results only, skips everything else."""
verdict = result.get("verdict", "").strip().lower()
if verdict in Verdicts.PASS + Verdicts.WAIT:
return result
return None
def insert_source_info(result):
"""Add info about source of test result if available."""
comment = result.get("comment")
# don't change comment if it already exists
if comment:
return
source = result.get("source")
job_name = result.get("job_name")
run = result.get("run")
source_list = [source, job_name, run]
if not all(source_list):
return
source_note = "/".join(source_list)
source_note = "Source: {}".format(source_note)
result["comment"] = source_note
def setup_parametrization(result, parametrize):
"""Modify result's data according to the parametrization settings."""
if parametrize:
# remove parameters from title
title = result.get("title")
if title:
result["title"] = TEST_PARAM_RE.sub("", title)
# remove parameters also from id if it's identical to title
if title and result.get("id") == title:
result["id"] = result["title"]
else:
# don't parametrize if not specifically configured
if "params" in result:
del result["params"]
def include_class_in_title(result):
"""Make sure that test class is included in "title".
Applies only to titles derived from test function names, e.g.
"test_power_parent_service" -> "TestServiceRESTAPI.test_power_parent_service"
>>> result = {"title": "test_foo", "id": "test_foo", "classname": "foo.bar.baz.TestFoo",
... "file": "foo/bar/baz.py"}
>>> include_class_in_title(result)
>>> str(result.get("title"))
'TestFoo.test_foo'
>>> str(result.get("id"))
'TestFoo.test_foo'
>>> result.get("classname")
>>> result = {"title": "some title", "id": "test_foo", "classname": "foo.bar.baz.TestFoo",
... "file": "foo/bar/baz.py"}
>>> include_class_in_title(result)
>>> str(result.get("title"))
'some title'
>>> str(result.get("id"))
'test_foo'
"""
classname = result.get("classname", "")
if not classname:
return
filepath = result.get("file", "")
title = result.get("title")
if title and title.startswith("test_") and "/" in filepath and "." in classname:
fname = filepath.split("/")[-1].replace(".py", "")
last_classname = classname.split(".")[-1]
# last part of classname is not file name
if fname != last_classname and last_classname not in title:
result["title"] = "{}.{}".format(last_classname, title)
# update also the id if it is identical to original title
if result.get("id") == title:
result["id"] = result["title"]
# we don't need to pass classnames?
del result["classname"]
def gen_unique_id(string):
"""Generate unique id out of a string.
>>> gen_unique_id("vmaas_TestClass.test_name")
'5acc5dc795a620c6b4491b681e5da39c'
"""
return hashlib.sha1(string.encode("utf-8")).hexdigest()[:32]
def get_testcase_id(testcase, append_str):
"""Return new test case ID.
>>> get_testcase_id({"title": "TestClass.test_name"}, "vmaas_")
'5acc5dc795a620c6b4491b681e5da39c'
>>> get_testcase_id({"title": "TestClass.test_name", "id": "TestClass.test_name"}, "vmaas_")
'5acc5dc795a620c6b4491b681e5da39c'
>>> get_testcase_id({"title": "TestClass.test_name", "id": "test_name"}, "vmaas_")
'5acc5dc795a620c6b4491b681e5da39c'
>>> get_testcase_id({"title": "some title", "id": "TestClass.test_name"}, "vmaas_")
'2ea7695b73763331f8a0c4aec75362b8'
>>> str(get_testcase_id({"title": "some title", "id": "some_id"}, "vmaas_"))
'some_id'
"""
testcase_title = testcase.get("title")
testcase_id = testcase.get("id")
if not testcase_id or testcase_id.lower().startswith("test"):
testcase_id = gen_unique_id("{}{}".format(append_str, testcase_title))
return testcase_id
def parse_rst_description(testcase):
"""Create an HTML version of the RST formatted description."""
description = testcase.get("description")
if not description:
return
try:
with open(os.devnull, "w") as devnull:
testcase["description"] = publish_parts(
description,
writer_name="html",
settings_overrides={"report_level": 2, "halt_level": 2, "warning_stream": devnull},
)["html_body"]
# pylint: disable=broad-except
except Exception as exp:
testcase_id = testcase.get("nodeid") or testcase.get("id") or testcase.get("title")
logger.error("%s: description: %s", str(exp), testcase_id)
def preformat_plain_description(testcase):
"""Create a preformatted HTML version of the description."""
description = testcase.get("description")
if not description:
return
# naive approach to removing indent from pytest docstrings
nodeid = testcase.get("nodeid") or ""
indent = None
if "::Test" in nodeid:
indent = 8 * " "
elif "::test_" in nodeid:
indent = 4 * " "
if indent:
orig_lines = description.split("\n")
new_lines = []
for line in orig_lines:
if line.startswith(indent):
line = line.replace(indent, "", 1)
new_lines.append(line)
description = "\n".join(new_lines)
testcase["description"] = "<pre>\n{}\n</pre>".format(description)
def add_unique_runid(testcase, run_id=None):
"""Add run id to the test description.
The `run_id` runs makes the descriptions unique between imports and force Polarion
to update every testcase every time.
"""
testcase["description"] = '{visible}<br id="{invisible}"/>'.format(
visible=testcase.get("description") or "empty-description-placeholder",
invisible=run_id or id(add_unique_runid),
)
def get_full_repo_address(repo_address: Optional[str]):
"""Make sure the repo address is complete path in repository.
>>> get_full_repo_address("https://gitlab.com/somerepo")
'https://gitlab.com/somerepo/blob/master/'
>>> get_full_repo_address("https://github.com/otherrepo/blob/branch/")
'https://github.com/otherrepo/blob/branch/'
>>> get_full_repo_address(None)
"""
if not repo_address:
return None
if "/blob/" not in repo_address:
# the master here should probably link the latest "commit" eventually
repo_address = "{}/blob/master".format(repo_address)
# make sure the / is present at the end of address
repo_address = "{}/".format(repo_address.rstrip("/ "))
return repo_address
def fill_automation_repo(repo_address: Optional[str], testcase: dict) -> dict:
"""Fill repo address to "automation_script" if missing."""
automation_script = testcase.get("automation_script")
if not automation_script:
return testcase
if not repo_address:
del testcase["automation_script"]
return testcase
if automation_script.startswith("http"):
return testcase
testcase["automation_script"] = urllib.parse.urljoin(repo_address, automation_script)
return testcase
def add_automation_link(testcase):
"""Append link to automation script to the test description."""
automation_script = testcase.get("automation_script")
if not automation_script:
return testcase
automation_link = '<a href="{}">Test Source</a>'.format(automation_script)
testcase["description"] = "{}<br/>{}".format(testcase.get("description") or "", automation_link)
return testcase
|
julcollas/django-smokeping
|
smokeping/templatetags/repeat.py
|
from django import template
register = template.Library()
class RepeatNode(template.Node):
def __init__(self, nodelist, count):
self.nodelist = nodelist
self.count = template.Variable(count)
def render(self, context):
output = self.nodelist.render(context)
return output * int(self.count.resolve(context) + 1)
def repeat(parser, token):
"""
Repeats the containing text a certain number of times.
Requires a single argument, an integer, to indicate the number of times to
repeat the enclosing content.
Example::
{% repeat 3 %}foo{% endrepeat %}
Yields::
foofoofoo
"""
bits = token.split_contents()
if len(bits) != 2:
raise template.TemplateSyntaxError('%r tag requires 1 argument.' % bits[0])
count = bits[1]
nodelist = parser.parse(('endrepeat',))
parser.delete_first_token()
return RepeatNode(nodelist, count)
repeat = register.tag(repeat)
|
has2k1/plotnine
|
plotnine/stats/stat_qq.py
|
import numpy as np
import pandas as pd
from scipy.stats.mstats import plotting_positions
from ..mapping.evaluation import after_stat
from ..doctools import document
from ..exceptions import PlotnineError
from .distributions import get_continuous_distribution
from .stat import stat
# Note: distribution should be a name from scipy.stat.distribution
@document
class stat_qq(stat):
"""
Calculation for quantile-quantile plot
{usage}
Parameters
----------
{common_parameters}
distribution : str (default: norm)
Distribution or distribution function name. The default is
*norm* for a normal probability plot. Objects that look enough
like a stats.distributions instance (i.e. they have a ppf
method) are also accepted. See :mod:`scipy stats <scipy.stats>`
for available distributions.
dparams : dict
Distribution-specific shape parameters (shape parameters plus
location and scale).
quantiles : array_like, optional
Probability points at which to calculate the theoretical
quantile values. If provided, must be the same number as
as the sample data points. The default is to use calculated
theoretical points, use to ``alpha_beta`` control how
these points are generated.
alpha_beta : tuple
Parameter values to use when calculating the quantiles.
Default is :py:`(3/8, 3/8)`.
See Also
--------
scipy.stats.mstats.plotting_positions : Uses ``alpha_beta``
to calculate the quantiles.
"""
_aesthetics_doc = """
{aesthetics_table}
.. rubric:: Options for computed aesthetics
::
'theoretical' # theoretical quantiles
'sample' # sample quantiles
"""
REQUIRED_AES = {'sample'}
DEFAULT_AES = {'x': after_stat('theoretical'), 'y': after_stat('sample')}
DEFAULT_PARAMS = {'geom': 'qq', 'position': 'identity',
'na_rm': False,
'distribution': 'norm', 'dparams': (),
'quantiles': None, 'alpha_beta': (3/8, 3/8)}
@classmethod
def compute_group(cls, data, scales, **params):
sample = data['sample'].sort_values().values
alpha, beta = params['alpha_beta']
quantiles = params['quantiles']
if quantiles is None:
quantiles = plotting_positions(sample, alpha, beta)
elif len(quantiles) != len(sample):
raise PlotnineError(
"The number of quantile values is not the same as "
"the number of sample values.")
quantiles = np.asarray(quantiles)
cdist = get_continuous_distribution(params['distribution'])
theoretical = cdist.ppf(quantiles, *params['dparams'])
return pd.DataFrame({'sample': sample,
'theoretical': theoretical})
|
PuZheng/cloud-dashing
|
cloud_dashing/default_settings.py
|
# -*- coding: UTF-8 -*-
"""
this is the default settings, don't insert into your customized settings!
"""
DEBUG = True
TESTING = True
SECRET_KEY = "5L)0K%,i.;*i/s("
SECURITY_SALT = "sleiuyyao"
# DB config
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
SQLALCHEMY_ECHO = True
UPLOADS_DEFAULT_DEST = 'uploads'
LOG_FILE = 'log.txt'
ERROR_LOG_RECIPIENTS = []
# Flask-Mail related configuration, refer to
# `http://pythonhosted.org/flask-mail/#configuring-flask-mail`
MAIL_SERVER = 'smtp.foo.com'
MAIL_USERNAME = 'username'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = 'user@foo.com'
FREEZER_RELATIVE_URLS = False
|
estnltk/estnltk
|
estnltk/vabamorf/tests/test_disambiguate.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
import unittest
from ..morf import analyze, disambiguate
# EINO SANTANEN. Muodon vanhimmat
# http://luulet6lgendus.blogspot.com/
sentences = '''KÕIGE VANEM MUDEL
Pimedas luusivad robotid,
originaalsed tšehhi robotid kahekümnendatest.
Robota! kisendavad nad, uhked originaalsed robotid,
hüüdes iseenda nime.
Robota! möirgavad nad, naftasegused elukad,
hiiglase vaimusünnitised, robotid:
kurvameelsetena kauguses,
ebamäärastena kauguses,
mattudes vastuoludesse,
muutudes peaaegu julmaks oma õiglusejanus.
Robota! Kui päike pageb monoliitide kohalt,
tähistavad nad vägisi
öö salajast geomeetriat.
Õudne on inimesel vaadata
neid metsikuid mudeleid.
Kuuntele, romantiikkaa, 2002'''.split('\n')
class TestDisambiguator(unittest.TestCase):
"""Test the separate disambiguate function
against the built in disambiguate=True function.
Both must work the same."""
def test_disambiguator(self):
for sentence in sentences:
an_with = analyze(sentence)
an_without = analyze(sentence, disambiguate=False)
disamb = disambiguate(an_without)
self.assertListEqual(an_with, disamb)
|
aheadley/spoke
|
spoke.py
|
#!/usr/bin/env python
"""spoke -- Git plugin for GitHub integration
Copyright (C) 2012 Alex Headley <aheadley@waysaboutstuff.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import argparse
import os
from pprint import pprint
import functools
import inspect
import textwrap
import tempfile
import time
import subprocess
import git
import pygithub3
def guess_type(obj):
ok_types = [int, str, bool]
obj_type = type(obj)
if obj_type in ok_types:
return obj_type
else:
if obj_type == list or obj_type == tuple:
if len(obj):
obj_e_type = type(obj[0])
if obj_e_type in ok_types and \
all(type(e) == obj_e_type for e in obj[1:]):
return obj_e_type
return str
def guess_action(obj):
return {
bool: 'store_false' if obj else 'store_true',
}.get(guess_type(obj), 'store')
def guess_nargs(obj):
if guess_type(obj) == bool:
return 0
else:
try:
len(obj)
except TypeError:
return 1
else:
return '+'
def get_console_size():
with os.popen('stty size', 'r') as p:
return map(int, p.read().strip().split())
class ArgFunc(object):
@staticmethod
def define_args(**kwargs):
def wrapper(func):
for (arg, attrs) in kwargs.iteritems():
if 'default' in attrs and 'name' not in attrs:
attrs['name'] = '--' + arg.replace('_', '-')
if 'dest' not in attrs and 'name' in attrs:
attrs['dest'] = arg
func._argfunc_attrs = kwargs
return func
return wrapper
@staticmethod
def auto_define_args(func):
(args, pargs, kwargs, defaults) = inspect.getargspec(func)
if args[0] == 'self' or args[0] == 'cls':
args = args[1:]
defaults = defaults if defaults is not None else []
arg_no_defaults = args[:-len(defaults)]
arg_defaults = zip(args[-len(defaults):], defaults)
attrs = {}
for arg in arg_no_defaults:
arg_attrs = {
'metavar': arg.upper(),
}
attrs[arg] = arg_attrs
for (arg, default) in arg_defaults:
arg_attrs = {
'name': '--' + arg.replace('_', '-'),
'action': guess_action(default),
'default': default,
'dest': arg,
}
attrs[arg] = arg_attrs
if pargs is not None:
attrs[pargs] = {
'name': pargs,
'nargs': '*',
}
if kwargs is not None:
pass
func._argfunc_attrs = attrs
return func
def add_func(self, parser, func):
if hasattr(func, '_argfunc_attrs'):
for (arg, attrs) in func._argfunc_attrs.iteritems():
fixed_attrs = attrs.copy()
if 'name' in attrs:
command_name = fixed_attrs.pop('name')
fixed_attrs['dest'] = arg
else:
command_name = arg
parser.add_argument(command_name, **fixed_attrs)
def add_obj(self, parser, obj):
for func in (a for a in dir(obj) \
if callable(obj, a) and hasattr(getattr(obj, a), '_argfunc_attrs')):
self.add_func(parser, func)
class GithubActor(object):
"""
"""
CONFIG_NS = 'hub'
GIT_REMOTE_NAME = 'github'
FALLBACK_EDITOR = 'nano'
_current_repo = None
_current_user = None
_github = None
def __init__(self, output=None):
self._current_repo = self._init_repo()
creds = self._get_github_credentials(self._current_repo)
self._current_user = creds[0]
self._github = self._init_github(creds[0], creds[1], self._current_repo)
if output is not None:
self._output = output
def _output(self, obj, *pargs, **kwargs):
if issubclass(obj.__class__, basestring):
print unicode(obj).format(*pargs, **kwargs)
else:
try:
pprint(obj, indent=2)
except Exception:
print repr(obj)
def _init_repo(self):
try:
repo = git.Repo(os.getcwd())
except git.exc.InvalidGitRepositoryError:
repo = None
return repo
def _init_github(self, username, password, repo=None):
repo_name = self._get_repo_name(repo)
return pygithub3.Github(login=username, password=password,
user=username, repo=repo_name)
@property
def _current_repo_name(self):
return self._get_repo_name(self._current_repo)
def _get_repo_name(self, repo):
if repo is not None:
return os.path.basename(repo.working_tree_dir)
else:
return None
def _get_github_credentials(self, repo=None):
if repo is None:
user_cfg_file = os.path.expanduser('~/.gitconfig')
if os.path.exists(user_cfg_file):
cfg = git.config.GitConfigParser(user_cfg_file)
else:
raise ValueError("""Can\'t find a gitconfig file for github login info.
Set the login info with:
git config --global --add {0}.username <username>
git config --global --add {0}.password <password>
""".format(self.CONFIG_NS))
else:
cfg = repo.config_reader()
return (cfg.get_value(self.CONFIG_NS, 'username'),
cfg.get_value(self.CONFIG_NS, 'password'))
def _get_padding(self, f, iterable):
return max(len(f(i)) for i in iterable)
def _require_in_repo(func):
@functools.wraps(func)
def wrapper(self, *pargs, **kwargs):
if self._current_repo is None:
self._output('You need to be in a repo for this command')
else:
return func(self, *pargs, **kwargs)
try:
wrapper._argfunc_attrs = func._argfunc_attrs
except AttributeError:
pass
return wrapper
@ArgFunc.auto_define_args
def develop(self, org=None, **kwargs):
"""Clone a repo so you can start working on it, forking to your account
if needed
"""
target_user = kwargs.get('user', self._current_user)
target_repo = kwargs.get('repo', self._current_repo_name)
if os.path.exists(os.path.join(os.getcwd(), target_repo)):
raise ValueError('Looks like the repo already exists at {0}'.format(
os.path.join(os.getcwd(), target_repo)))
if target_user != self._current_user:
#need to fork first
self._output('Looks like someone else\'s repo, forking...')
try:
fork = self._github.repos.forks.create(
user=target_user,
repo=target_repo,
org=org,
)
except AssertionError:
pass
self._output('Waiting for GitHub to stop forking around...')
time.sleep(5)
self._output('Getting repo info...')
gh_repo = self._github.repos.get(
user=self._current_user,
repo=target_repo,
)
repo_path = os.path.join(os.getcwd(), gh_repo.name)
self._output('Cloning repo {0} ...', gh_repo.full_name)
git.repo.base.Repo.clone_from(gh_repo.ssh_url, repo_path)
self._output('Repo cloned to {0}, enjoy!', repo_path)
@ArgFunc.auto_define_args
def repos_show(self, **kwargs):
"""Show a repo's info from GitHub
"""
display_tpl = '\n'.join([
'{repo.full_name: <48} {repo.language: <16} {repo.forks_count: >3} ' \
'Fork(s) {repo.watchers_count: >4} Watcher(s)',
'{repo.description}',
'{repo.html_url: <64} {repo.homepage}',
])
gh_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output(display_tpl, repo=gh_repo)
@ArgFunc.define_args(
repo_type={'choices': ('all', 'owner', 'public', 'private', 'member'), 'default': 'all'},
)
def repos_list(self, repo_type='all', **kwargs):
"""List your or another user's repos
"""
repos = self._github.repos.list(
user=kwargs.get('user', self._current_user),
type=repo_type).all()
padding = self._get_padding(lambda r: r.name, repos)
for repo in repos:
fork_icon = 'V' if repo.fork else '|'
self._output(' {fork_icon} {name: <{padding}} -- {description}',
fork_icon=fork_icon, padding=padding, **vars(repo))
@ArgFunc.auto_define_args
def repos_create(self, description='', homepage='', private=False,
has_issues=False, has_wiki=False, has_downloads=False, in_org=None,
**kwargs):
"""Create a new repo on GitHub
"""
data = locals().copy()
del data['self'], data['kwargs'], data['in_org']
data['name'] = kwargs.get('repo', self._current_repo_name)
new_repo = self._github.repos.create(data, in_org)
@ArgFunc.auto_define_args
def repos_fork(self, org=None, **kwargs):
"""Fork a repo on GitHub to your account (or organization)
"""
try:
self._github.repos.forks.create(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name),
org=org)
except AssertionError:
pass
@ArgFunc.auto_define_args
def repos_clone(self, **kwargs):
"""Clone a repo from GitHub
"""
repo_name = kwargs.get('repo', None)
if repo_name is None:
raise ValueError('Use --repo to tell me the repo name')
try:
github_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=repo_name)
except Exception as e:
#TODO make this not dumb
raise e
repo_path = os.path.join(os.getcwd(), repo_name)
if github_repo.permissions['push']:
git.repo.base.Repo.clone_from(github_repo.ssh_url, repo_path)
else:
git.repo.base.Repo.clone_from(github_repo.git_url, repo_path)
self._output('Cloned {user}/{repo} to {path}',
user=kwargs.get('user', self._current_user),
repo=repo_name,
path=repo_path)
@_require_in_repo
@ArgFunc.auto_define_args
def repos_addremote(self, remote_name=GIT_REMOTE_NAME, **kwargs):
"""Add a remote for the corresponding repo on GitHub
"""
actual_repo = self._current_repo
if remote_name in (rm.name for rm in actual_repo.remotes):
self._output('Looks like the "{0}" remote already exists',
remote_name)
else:
github_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
if github_repo.permissions['push']:
#read-write, use ssh url
actual_repo.create_remote(remote_name, github_repo.ssh_url)
else:
#read only, use git url
actual_repo.create_remote(remote_name, github_repo.git_url)
self._output('"{0}" remote added', remote_name)
@ArgFunc.auto_define_args
def pr_show(self, pr_number, DUMMYOPT=None, **kwargs):
"""Display a pull request
"""
pr = self._github.pull_requests.get(pr_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output(vars(pr))
@ArgFunc.define_args(
state={'choices': ('open', 'closed'), 'default': 'open'},
)
def pr_list(self, state='open', **kwargs):
"""List the open pull requests for a repo
Note that the --state option is currently non-functional
"""
pull_requests = self._github.pull_requests.list(
user=kwargs.get('user', kwargs.get('user', self._current_user)),
repo=kwargs.get('repo', self._current_repo_name)).all()
padding = self._get_padding(lambda pr: pr.user['login'], pull_requests)
for pr in pull_requests:
commit_count = len(self._github.pull_requests.list_commits(pr.number,
user=kwargs.get('user', kwargs.get('user', self._current_user)),
repo=kwargs.get('repo', self._current_repo_name)).all())
self._output('#{number:0>4} {commit_count:0>2}c @{user[login]: <{padding}} {title} -- <{html_url}>',
padding=padding, commit_count=commit_count, **vars(pr))
@ArgFunc.auto_define_args
def pr_merge(self, pr_number, commit_message='', **kwargs):
"""Do a simple merge of a pull request (Merge Button)
"""
self._github.pull_requests.merge(number, commit_message,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Pull request #{0:0>4} merged!', pr_number)
@_require_in_repo
@ArgFunc.auto_define_args
def pr_addremote(self, pr_number, remote_name=None, **kwargs):
"""Add a remote for the source repo in a PR
"""
if remote_name is None:
remote_name = 'pr-{n:0>4}'.format(n=pr_number)
repo = self._current_repo
pr = self._github.pull_requests.get(pr_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
if remote_name in (rm.name for rm in repo.remotes):
self._output('Looks like the "{0}" remote already exists',
remote_name)
else:
repo.create_remote(remote_name, pr.head['repo']['git_url'])
self._output('"{0}" remote added', remote_name)
@ArgFunc.auto_define_args
def issues_show(self, issue_number, DUMMYOPT=None, **kwargs):
"""Display a specific issue
"""
issue = self._github.issues.get(issue_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
msg = [
'#{i.number:0>4} ({i.state}) -- {i.title}',
'@{i.user.login}:',
]
if issue.body:
msg.append(self._wrap_text_body(issue.body))
self._output('\n'.join(msg), i=issue)
comments = self._github.issues.comments.list(issue_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name)).all()
for comment in comments:
self._output('@{c.user.login}:\n{wrapped_body}',
c=comment, wrapped_body=self._wrap_text_body(comment.body))
def _wrap_text_body(self, text, padding=8):
"""Wrap :text: so that there are :padding: spaces on either side, based on
terminal width
"""
console_width = max(get_console_size()[1], padding * 3)
return '\n'.join(' ' * padding + line \
for line in textwrap.wrap(text.strip(), console_width - (padding * 2)))
@ArgFunc.auto_define_args
def issues_list(self, milestone='none', state='open', assignee='none', labels='',
sort='created', **kwargs):
"""List a repo's issues
"""
issues = self._github.issues.list_by_repo(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name),
state=state,
assignee=assignee,
milestone=milestone,
labels=labels,
sort=sort,
)
for page in issues:
for issue in page:
self._output('#{issue.number:0>4} ({issue.state}) @{issue.user.login: <16} -- {issue.title}',
issue=issue)
@ArgFunc.auto_define_args
def issues_create(self, title=None, body=None, assignee=None, milestone=None,
labels=None, **kwargs):
"""Open a new issue
"""
data = locals().copy()
del data['self'], data['kwargs']
if data['labels'] is not None:
data['labels'] = [l.strip() for l in data['labels'].split(',')]
if data['body'] is None:
(_, path) = tempfile.mkstemp()
with open(path, 'w') as handle:
handle.write('# Put the body of your issue here\n' \
'# Lines starting with \'#\' are ignored\n' \
'# If you didn\'t provide a title, the first line here will be used\n')
subprocess.call([self._get_editor(), path])
with open(path, 'r') as handle:
body = [line.rstrip() for line in handle.readlines() \
if not line.startswith('#') and line.strip()]
if not data['title']:
data['title'] = body[0].strip()
data['body'] = '\n'.join(body[1:])
else:
data['body'] = '\n'.join(body)
os.unlink(path)
issue = self._github.issues.create(data,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Issue #{issue.number:0>4} created: {issue.html_url}',
issue=issue)
def _get_editor(self):
"""Get the editor from env variables
Looks at $EDITOR, then $VISUAL, then falls back to :FALLBACK_EDITOR:
"""
return os.environ.get('EDITOR',
os.environ.get('VISUAL',
self.FALLBACK_EDITOR))
@ArgFunc.auto_define_args
def issues_comment(self, issue_number, message=None, close=False, **kwargs):
"""Add a comment to an issue
"""
if message is None:
(_, path) = tempfile.mkstemp()
with open(path, 'w') as handle:
handle.write('# Write your comment here\n' \
'# Lines starting with \'#\' are ignored\n')
subprocess.call([self._get_editor(), path])
with open(path, 'r') as handle:
message = '\n'.join(line.rstrip() for line in handle.readlines() \
if not line.startswith('#') and line.strip())
os.unlink(path)
comment = self._github.issues.comments.create(issue_number, message,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Comment {comment.id} added!', comment=comment)
if close:
self._github.issues.update(issue_number, {'state': 'closed'},
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Issue closed')
def build_parser(actor):
af = ArgFunc()
parser = argparse.ArgumentParser(description='git-hub - Do stuff with GitHub',
prog='git-hub')
parser.add_argument('--verbose', help='Display more output', action='store_true')
command_parsers = parser.add_subparsers(title='GitHub commands',
dest='command')
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument('-u', '--user', help='Override target username')
parent_parser.add_argument('-r', '--repo', help='Override target repo name')
#oh god wat
command_verbs = dict((c, [v.split('_', 1)[1] for v in dir(actor) \
if v.startswith(c+'_') and callable(getattr(actor, v))]) \
for c in set(c.split('_')[0] for c in dir(actor) \
if not c.startswith('_') and callable(getattr(actor, c))))
for command in command_verbs:
for verb in command_verbs[command]:
command_verb = command + '_' + verb
cv_func = getattr(actor, command_verb)
attrs = {'parents': [parent_parser]}
try:
attrs['help'] = cv_func.__doc__.split('\n')[0].strip()
except AttributeError:
pass
verb_parser = command_parsers.add_parser(
command_verb.replace('_', '-'), **attrs)
af.add_func(verb_parser, cv_func)
develop_parser = command_parsers.add_parser('develop',
help=actor.develop.__doc__.split('\n')[0].strip(),
parents=[parent_parser])
af.add_func(develop_parser, actor.develop)
return parser
def main():
actor = GithubActor()
parser = build_parser(actor)
result = parser.parse_args()
command_verb = result.command.replace('-', '_')
del result.command
action = getattr(actor, command_verb)
return action(**vars(result))
if __name__ == '__main__':
main()
|
GNOME/d-feet
|
src/dfeet/introspection_helper.py
|
# -*- coding: utf-8 -*-
from gi.repository import GLib, GObject, Gio
from dfeet import dbus_utils
def args_signature_markup(arg_signature):
return '<small><span foreground="#2E8B57">%s</span></small>' % (arg_signature)
def args_name_markup(arg_name):
return '<small>%s</small>' % (arg_name,)
class DBusNode(GObject.GObject):
"""object to represent a DBus Node (object path)"""
def __init__(self, name, object_path, node_info):
GObject.GObject.__init__(self)
self.__name = name
self.__object_path = object_path
self.__node_info = node_info # Gio.GDBusNodeInfo object
def __repr__(self):
return "Name: %s ; ObjPath: %s ; NodeInfo: %s" % (
self.name, self.object_path, self.node_info)
@property
def name(self):
return self.__name
@property
def object_path(self):
return self.__object_path
@property
def node_info(self):
return self.__node_info
class DBusInterface(DBusNode):
"""object to represent a DBus Interface"""
def __init__(self, dbus_node_obj, iface_info):
DBusNode.__init__(self, dbus_node_obj.name,
dbus_node_obj.object_path, dbus_node_obj.node_info)
self.__iface_info = iface_info # Gio.GDBusInterfaceInfo object
def __repr__(self):
return "iface '%s' on node '%s'" % (self.iface_info.name, self.node_info.path)
@property
def iface_info(self):
return self.__iface_info
class DBusProperty(DBusInterface):
"""object to represent a DBus Property"""
def __init__(self, dbus_iface_obj, property_info):
DBusInterface.__init__(self, dbus_iface_obj, dbus_iface_obj.iface_info)
self.__property_info = property_info # Gio.GDBusPropertyInfo object
self.__value = None # the value
def __repr__(self):
sig = dbus_utils.sig_to_string(self.property_info.signature)
return "%s %s (%s)" % (sig, self.property_info.name, self.property_info.flags)
@property
def property_info(self):
return self.__property_info
@property
def value(self):
return self.__value
@value.setter
def value(self, new_val):
self.__value = new_val
@property
def markup_str(self):
sig = dbus_utils.sig_to_string(self.property_info.signature)
readwrite = list()
if self.readable:
readwrite.append("read")
if self.writable:
readwrite.append("write")
s = "%s %s <small>(%s)</small>" % (
args_signature_markup(sig),
args_name_markup(self.property_info.name), " / ".join(readwrite))
if self.value is not None:
s += " = %s" % (GLib.markup_escape_text(str(self.value), -1),)
return s
@property
def readable(self):
if int(self.property_info.flags) == int(Gio.DBusPropertyInfoFlags.READABLE) or \
int(self.property_info.flags) == \
(int(Gio.DBusPropertyInfoFlags.WRITABLE | Gio.DBusPropertyInfoFlags.READABLE)):
return True
else:
return False
@property
def writable(self):
if int(self.property_info.flags) == int(Gio.DBusPropertyInfoFlags.WRITABLE) or \
int(self.property_info.flags) == \
(int(Gio.DBusPropertyInfoFlags.WRITABLE | Gio.DBusPropertyInfoFlags.READABLE)):
return True
else:
return False
class DBusSignal(DBusInterface):
"""object to represent a DBus Signal"""
def __init__(self, dbus_iface_obj, signal_info):
DBusInterface.__init__(self, dbus_iface_obj,
dbus_iface_obj.iface_info)
self.__signal_info = signal_info # Gio.GDBusSignalInfo object
def __repr__(self):
return "%s" % (self.signal_info.name)
@property
def signal_info(self):
return self.__signal_info
@property
def args(self):
args = list()
for arg in self.signal_info.args:
sig = dbus_utils.sig_to_string(arg.signature)
args.append({'signature': sig, 'name': arg.name})
return args
@property
def args_markup_str(self):
result = ''
result += '<span foreground="#FF00FF">(</span>'
result += ', '.join('%s' % (args_signature_markup(arg['signature'])) for arg in self.args)
result += '<span foreground="#FF00FF">)</span>'
return result
@property
def markup_str(self):
return "%s %s" % (self.signal_info.name, self.args_markup_str)
class DBusMethod(DBusInterface):
"""object to represent a DBus Method"""
def __init__(self, dbus_iface_obj, method_info):
DBusInterface.__init__(self, dbus_iface_obj, dbus_iface_obj.iface_info)
self.__method_info = method_info # Gio.GDBusMethodInfo object
def __repr__(self):
return "%s(%s) ↦ %s (%s)" % (
self.method_info.name, self.in_args_str,
self.out_args_str, DBusInterface.__repr__(self))
@property
def in_args_code(self):
in_args = ""
for a in self.__method_info.in_args:
in_args += a.signature
return in_args
@property
def method_info(self):
return self.__method_info
@property
def markup_str(self):
return "%s %s <b>↦</b> %s" % (
self.method_info.name, self.in_args_markup_str, self.out_args_markup_str)
@property
def in_args(self):
in_args = list()
for in_arg in self.method_info.in_args:
sig = dbus_utils.sig_to_string(in_arg.signature)
in_args.append({'signature': sig, 'name': in_arg.name})
return in_args
@property
def out_args(self):
out_args = list()
for out_arg in self.method_info.out_args:
sig = dbus_utils.sig_to_string(out_arg.signature)
out_args.append({'signature': sig, 'name': out_arg.name})
return out_args
@property
def in_args_str(self):
result = ""
for arg in self.in_args:
result += "%s %s, " % (arg['signature'], arg['name'])
return result[0:-2]
@property
def out_args_str(self):
result = ""
for arg in self.out_args:
result += "%s %s, " % (arg['signature'], arg['name'])
return result[0:-2]
def __args_markup_str(self, args):
"""markup a given list of args"""
result = ''
result += '<span foreground="#FF00FF">(</span>'
result += ', '.join(
'%s %s' % (
args_signature_markup(arg['signature']),
args_name_markup(arg['name'])) for arg in args)
result += '<span foreground="#FF00FF">)</span>'
return result
@property
def in_args_markup_str(self):
return self.__args_markup_str(self.in_args)
@property
def out_args_markup_str(self):
return self.__args_markup_str(self.out_args)
class DBusAnnotation(DBusInterface):
"""object to represent a DBus Annotation"""
def __init__(self, dbus_iface_obj, annotation_info):
DBusInterface.__init__(self, dbus_iface_obj,
dbus_iface_obj.iface_info)
self.__annotation_info = annotation_info # Gio.GDBusAnnotationInfo object
def __repr__(self):
return "%s: %s" % (self.annotation_info.key, self.annotation_info.value)
@property
def annotation_info(self):
return self.__annotation_info
@property
def markup_str(self):
return "%s: %s" % (self.annotation_info.key, self.annotation_info.value)
|
ceph/autotest
|
client/tests/kvm/tests/timedrift_with_migration.py
|
import logging
from autotest_lib.client.common_lib import error
import kvm_test_utils
def run_timedrift_with_migration(test, params, env):
"""
Time drift test with migration:
1) Log into a guest.
2) Take a time reading from the guest and host.
3) Migrate the guest.
4) Take a second time reading.
5) If the drift (in seconds) is higher than a user specified value, fail.
@param test: KVM test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
# Collect test parameters:
# Command to run to get the current time
time_command = params.get("time_command")
# Filter which should match a string to be passed to time.strptime()
time_filter_re = params.get("time_filter_re")
# Time format for time.strptime()
time_format = params.get("time_format")
drift_threshold = float(params.get("drift_threshold", "10"))
drift_threshold_single = float(params.get("drift_threshold_single", "3"))
migration_iterations = int(params.get("migration_iterations", 1))
try:
# Get initial time
# (ht stands for host time, gt stands for guest time)
(ht0, gt0) = kvm_test_utils.get_time(session, time_command,
time_filter_re, time_format)
# Migrate
for i in range(migration_iterations):
# Get time before current iteration
(ht0_, gt0_) = kvm_test_utils.get_time(session, time_command,
time_filter_re, time_format)
session.close()
# Run current iteration
logging.info("Migrating: iteration %d of %d...",
(i + 1), migration_iterations)
vm.migrate()
# Log in
logging.info("Logging in after migration...")
session = vm.wait_for_login(timeout=30)
logging.info("Logged in after migration")
# Get time after current iteration
(ht1_, gt1_) = kvm_test_utils.get_time(session, time_command,
time_filter_re, time_format)
# Report iteration results
host_delta = ht1_ - ht0_
guest_delta = gt1_ - gt0_
drift = abs(host_delta - guest_delta)
logging.info("Host duration (iteration %d): %.2f",
(i + 1), host_delta)
logging.info("Guest duration (iteration %d): %.2f",
(i + 1), guest_delta)
logging.info("Drift at iteration %d: %.2f seconds",
(i + 1), drift)
# Fail if necessary
if drift > drift_threshold_single:
raise error.TestFail("Time drift too large at iteration %d: "
"%.2f seconds" % (i + 1, drift))
# Get final time
(ht1, gt1) = kvm_test_utils.get_time(session, time_command,
time_filter_re, time_format)
finally:
if session:
session.close()
# Report results
host_delta = ht1 - ht0
guest_delta = gt1 - gt0
drift = abs(host_delta - guest_delta)
logging.info("Host duration (%d migrations): %.2f",
migration_iterations, host_delta)
logging.info("Guest duration (%d migrations): %.2f",
migration_iterations, guest_delta)
logging.info("Drift after %d migrations: %.2f seconds",
migration_iterations, drift)
# Fail if necessary
if drift > drift_threshold:
raise error.TestFail("Time drift too large after %d migrations: "
"%.2f seconds" % (migration_iterations, drift))
|
shohamp/Gobi
|
runner/gobi_runner.py
|
from glob import glob
import subprocess
import vagrant
from fabric.api import execute, env, quiet
from fabric.state import connections
from logger import init_logger, debug, info
VM_NAME = "default"
def clear_fabric_cache():
"""
Fabric caches it's connections, so it won't have to re-connect every time you use it.
But, when working with VMs whose connections are getting reset, we can't use a cache.
Use this function to reset fabric's cache
"""
connection_keys = connections.keys()
for host_string in connection_keys:
connections[host_string].close()
del connections[host_string]
def get_all_test_functions():
"""
Get all the tests from the current directory
Looking for python files starting with "test", and within, functions that start with "test"
"""
test_files = glob("test*.py")
test_modules = [__import__(module_name[:-3]) for module_name in test_files]
test_tasks = []
for test_module in test_modules:
functions_in_module = dir(test_module)
test_functions = [func for func in functions_in_module if func.startswith("test")]
for test_function in test_functions:
test_tasks.append(test_module.__dict__[test_function])
return test_tasks
def vagrant_run_command(command):
"""
Run the given command in a shell, after preceding it with "vagrant"
"""
subprocess.call("vagrant " + command, shell=True, stdout=subprocess.PIPE)
def vagrant_take_snapshot():
"""
Take a snapshot from the running machine, and name it "snapshot"
"""
vagrant_run_command("snapshot take snapshot")
def vagrant_revert_to_snapshot():
"""
In the running machine, revert to the last snapshot
"""
vagrant_run_command("snapshot back")
def init_fabric(vclient):
"""
init all the required environment for fabric
"""
env.host_string = vclient.user_hostname_port(vm_name=VM_NAME)
env.key_filename = vclient.keyfile(vm_name=VM_NAME)
env.disable_known_hosts = True
env.quiet = True
env.warn_only = True
def main():
"""
Gobi's main function.
Finds the test functions, runs the machine, connects to them, and runs the tests
"""
init_logger()
info("Welcome to gobi. Sit back and relax :)")
vclient = vagrant.Vagrant()
test_funcs = get_all_test_functions()
assert test_funcs > 0, "No tests found. What do you want me to run?"
info("Found %d tests to run" % len(test_funcs))
info("Setting up the environment...")
vclient.up()
info("Environment is up and ready")
debug("Taking snapshot...")
vagrant_take_snapshot()
debug("Snapshot taken")
init_fabric(vclient)
counter = 1
for task in test_funcs:
# After the first test, clean - delete cache and revert to snapshot
if counter != 1:
clear_fabric_cache()
debug("Reverting to snapshot...")
vagrant_revert_to_snapshot()
debug("Reverted!")
info("Running test number %d - %s" % (counter, task.__name__))
execute(task)
counter += 1
info("All tests finished")
info("Destroying environment...")
vclient.destroy()
info("Environment has been destroyed...")
info("Gobi, out")
if __name__ == "__main__":
main()
|
asimonov-im/boinc
|
py/Boinc/tools.py
|
## $Id: tools.py 23525 2011-05-12 04:11:40Z davea $
import configxml
try:
# use new hashlib if available
from hashlib import md5
except:
import md5
import os, shutil, binascii, filecmp
# from http://www.plope.com/software/uuidgen/view
_urandomfd = None
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
global _urandomfd
if _urandomfd is None:
try:
_urandomfd = os.open("/dev/urandom", os.O_RDONLY)
except:
_urandomfd = NotImplementedError
if _urandomfd is NotImplementedError:
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
bytes += os.read(_urandomfd, n - len(bytes))
return bytes
def make_uuid():
return binascii.hexlify(urandom(16))
def md5_file(path):
"""
Return a 16-digit MD5 hex digest of a file's contents
Read the file in chunks
"""
chunk = 8096
try:
checksum = md5()
except NameError:
checksum = md5.new()
fp = open(path, 'r')
while True:
buffer = fp.read(chunk)
if not buffer:
break
checksum.update(buffer)
fp.close()
return checksum
def file_size(path):
"""Return the size of a file"""
f = open(path)
f.seek(0,2)
return f.tell()
def query_yesno(str):
'''Query user; default Yes'''
print str, "[Y/n] ",
return not raw_input().strip().lower().startswith('n')
def query_noyes(str):
'''Query user; default No'''
print str, "[y/N] ",
return raw_input().strip().lower().startswith('y')
def get_output_file_path(filename):
""" Return the filename's path in the upload directory
Use this if you're developing a validator/assimilator in Python
"""
config = configxml.default_config()
fanout = long(config.config.uldl_dir_fanout)
s = md5.new(filename).hexdigest()[1:8]
x = long(s, 16)
return "%s/%x/%s" % (config.config.upload_dir, x % fanout, filename)
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/webapps/galaxy/api/group_roles.py
|
"""
API operations on Group objects.
"""
import logging
from galaxy.web.base.controller import BaseAPIController, url_for
from galaxy import web
log = logging.getLogger( __name__ )
class GroupRolesAPIController( BaseAPIController ):
@web.expose_api
@web.require_admin
def index( self, trans, group_id, **kwd ):
"""
GET /api/groups/{encoded_group_id}/roles
Displays a collection (list) of groups.
"""
decoded_group_id = trans.security.decode_id( group_id )
try:
group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
except:
group = None
if not group:
trans.response.status = 400
return "Invalid group id ( %s ) specified." % str( group_id )
rval = []
try:
for gra in group.roles:
role = gra.role
encoded_id = trans.security.encode_id( role.id )
rval.append( dict( id = encoded_id,
name = role.name,
url = url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) )
except Exception, e:
rval = "Error in group API at listing roles"
log.error( rval + ": %s" % str(e) )
trans.response.status = 500
return rval
@web.expose_api
@web.require_admin
def show( self, trans, id, group_id, **kwd ):
"""
GET /api/groups/{encoded_group_id}/roles/{encoded_role_id}
Displays information about a group role.
"""
role_id = id
decoded_group_id = trans.security.decode_id( group_id )
decoded_role_id = trans.security.decode_id( role_id )
item = None
try:
group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
for gra in group.roles:
if gra.role == role:
item = dict( id = role_id,
name = role.name,
url = url_for( 'group_role', group_id=group_id, id=role_id) ) # TODO Fix This
if not item:
item = "role %s not in group %s" % (role.name,group.name)
except Exception, e:
item = "Error in group_role API group %s role %s" % (group.name, role.name)
log.error(item + ": %s" % str(e))
return item
@web.expose_api
@web.require_admin
def update( self, trans, id, group_id, **kwd ):
"""
PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id}
Adds a role to a group
"""
role_id = id
decoded_group_id = trans.security.decode_id( group_id )
decoded_role_id = trans.security.decode_id( role_id )
item = None
try:
group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
for gra in group.roles:
if gra.role == role:
item = dict( id = role_id,
name = role.name,
url = url_for( 'group_role', group_id=group_id, id=role_id) )
if not item:
gra = trans.app.model.GroupRoleAssociation( group, role )
# Add GroupRoleAssociation
trans.sa_session.add( gra )
trans.sa_session.flush()
item = dict( id = role_id,
name = role.name,
url = url_for( 'group_role', group_id=group_id, id=role_id) )
except Exception, e:
item = "Error in group_role API Adding role %s to group %s" % (role.name,group.name)
log.error(item + ": %s" % str(e))
return item
@web.expose_api
@web.require_admin
def delete( self, trans, id, group_id, **kwd ):
"""
DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id}
Removes a role from a group
"""
role_id = id
decoded_group_id = trans.security.decode_id( group_id )
decoded_role_id = trans.security.decode_id( role_id )
try:
group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
for gra in group.roles:
if gra.role == role:
trans.sa_session.delete( gra )
trans.sa_session.flush()
item = dict( id = role_id,
name = role.name,
url = url_for( 'group_role', group_id=group_id, id=role_id) )
if not item:
item = "role %s not in group %s" % (role.name,group.name)
except Exception, e:
item = "Error in group_role API Removing role %s from group %s" % (role.name,group.name)
log.error(item + ": %s" % str(e))
return item
|
tannmay/Algorithms-1
|
Sorting/Codes/mergeSort.py
|
'''
Python program for implementation of Merge Sort
l is left index, m is middle index and r is right index
L[l...m] and R[m+1.....r] are respective left and right sub-arrays
'''
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r-m
#create temporary arrays
L = [0]*(n1)
R = [0]*(n2)
#Copy data to temp arrays L[] and R[]
for i in range(0, n1):
L[i] = arr[l + i]
for j in range(0, n2):
R[j] = arr[m+1+j]
# Merge the temp array back into arr[l...r]
i = 0 # Initial index of first subarray
j = 0 # Initial index of second subarray
k = l # Initial index of merged subarray
#Comparing the elements of the array and filling them into one array
while i < n1 and j < n2 :
if L[i] <= R[j] :
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Copy the remaining element of L[], if there are any
while i < n1:
arr[k] = L[i]
i += 1
k += 1
# Copy the remaining element of R[], if there are any
while j < n2:
arr[k] R[j]
j += 1
k += 1
# l is for left index and r is for right index of the
# subarray of arr to be sorted
def mergeSort(arr, l, r):
if l < r:
#Same as (l+r)/2, but avoid overflow for large l and h
m = (l+(r-1))/2
# Sort first and second halves
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
|
fabric8-analytics/fabric8-analytics-worker
|
f8a_worker/utils.py
|
"""Module containing helper functions that are used by other parts of worker."""
import datetime
import getpass
import json
import logging
import signal
import re
from contextlib import contextmanager
from os import path as os_path, walk, getcwd, chdir, environ as os_environ, killpg, getpgid
from queue import Queue, Empty
from shlex import split
from subprocess import Popen, PIPE, check_output, CalledProcessError, TimeoutExpired
from threading import Thread
from traceback import format_exc
from urllib.parse import unquote, urlparse, parse_qs
import tenacity
import requests
from requests.adapters import HTTPAdapter
from requests.exceptions import HTTPError
from requests.packages.urllib3.util.retry import Retry
from selinon import StoragePool
from sqlalchemy.exc import SQLAlchemyError
from f8a_worker.enums import EcosystemBackend
from f8a_worker.errors import (TaskError,
NotABugTaskError,
F8AConfigurationException)
from f8a_worker.models import (Analysis,
Ecosystem,
Package,
Version)
from f8a_worker.defaults import configuration
logger = logging.getLogger(__name__)
def get_latest_analysis(ecosystem, package, version, db_session=None):
"""Get latest analysis for the given EPV."""
if not db_session:
storage = StoragePool.get_connected_storage("BayesianPostgres")
db_session = storage.session
try:
return db_session.query(Analysis). \
filter(Ecosystem.name == ecosystem). \
filter(Package.name == package). \
filter(Version.identifier == version). \
order_by(Analysis.started_at.desc()). \
first()
except SQLAlchemyError:
db_session.rollback()
raise
@contextmanager
def cwd(target):
"""Manage cwd in a pushd/popd fashion."""
curdir = getcwd()
chdir(target)
try:
yield
finally:
chdir(curdir)
@contextmanager
def username():
"""Workaround for failing getpass.getuser().
http://blog.dscpl.com.au/2015/12/unknown-user-when-running-docker.html
"""
user = ''
try:
user = getpass.getuser()
except KeyError:
os_environ['LOGNAME'] = 'f8aworker'
try:
yield
finally:
if not user:
del os_environ['LOGNAME']
def assert_not_none(name, value):
"""Assert value is not None."""
if value is None:
raise ValueError('Parameter %r is None' % name)
class TimedCommand(object):
"""Execute arbitrary shell command in a timeout-able manner."""
def __init__(self, command):
"""Initialize command."""
# parse with shlex if not execve friendly
if isinstance(command, str):
command = split(command)
self.command = command
def run(self, timeout=None, is_json=False, **kwargs):
"""Run the self.command and wait up to given time period for results.
:param timeout: how long to wait, in seconds, for the command to finish
before terminating it
:param is_json: hint whether output of the command is a JSON
:return: triplet (return code, stdout, stderr), stdout will be a
dictionary if `is_json` is True
"""
logger.debug("running command '%s'; timeout '%s'", self.command, timeout)
# this gets executed in a separate thread
def target(**kwargs):
try:
self.process = Popen(self.command, universal_newlines=True, **kwargs)
self.output, self.error = self.process.communicate()
self.status = self.process.returncode
except Exception:
self.output = {} if is_json else []
self.error = format_exc()
self.status = -1
# default stdout and stderr
if 'stdout' not in kwargs:
kwargs['stdout'] = PIPE
if 'stderr' not in kwargs:
kwargs['stderr'] = PIPE
if 'update_env' in kwargs:
# make sure we update environment, not override it
kwargs['env'] = dict(os_environ, **kwargs['update_env'])
kwargs.pop('update_env')
# thread
thread = Thread(target=target, kwargs=kwargs)
thread.start()
thread.join(timeout)
# timeout reached, terminate the thread
if thread.is_alive():
logger.error('Command {cmd} timed out after {t} seconds'.format(cmd=self.command,
t=timeout))
# this is tricky - we need to make sure we kill the process with all its subprocesses;
# using just kill might create zombie process waiting for subprocesses to finish
# and leaving us hanging on thread.join()
# TODO: we should do the same for get_command_output!
killpg(getpgid(self.process.pid), signal.SIGKILL)
thread.join()
if not self.error:
self.error = 'Killed by timeout after {t} seconds'.format(t=timeout)
if self.output:
if is_json:
self.output = json.loads(self.output)
else:
self.output = [f for f in self.output.split('\n') if f]
return self.status, self.output, self.error
@staticmethod
def get_command_output(args, graceful=True, is_json=False, timeout=300, **kwargs):
"""Wrap the function to get command output with implicit timeout of 5 minutes."""
kwargs['timeout'] = 10800
return get_command_output(args, graceful, is_json, **kwargs)
def get_command_output(args, graceful=True, is_json=False, **kwargs):
"""Improved version of subprocess.check_output.
:param graceful: bool, if False, raise Exception when command fails
:param is_json: bool, if True, return decoded json
:return: list of strings, output which command emitted
"""
logger.debug("running command %s", args)
try:
# Using universal_newlines mostly for the side-effect of decoding
# the output as UTF-8 text on Python 3.x
out = check_output(args, universal_newlines=True, **kwargs)
except (CalledProcessError, TimeoutExpired) as ex:
# TODO: we may want to use subprocess.Popen to be able to also print stderr here
# (while not mixing it with stdout that is returned if the subprocess succeeds)
if isinstance(ex, TimeoutExpired):
logger.warning("command %s timed out:\n%s", args, ex.output)
else:
logger.warning("command %s ended with %s\n%s", args, ex.returncode, ex.output)
if not graceful:
logger.error(ex)
# we don't know whether this is a bug or the command was simply called
# with invalid/unsupported input. Caller needs to catch the exception
# and decide.
raise TaskError("Error during running command %s: %r" % (args, ex.output))
else:
logger.debug("Ignoring because graceful flag is set")
return []
else:
if is_json:
# FIXME: some error handling here would be great
return json.loads(out)
else:
return [f for f in out.split('\n') if f] # py2 & 3 compat
def get_all_files_from(target, path_filter=None, file_filter=None):
"""Enumerate all files in target directory, can be filtered with custom delegates."""
for root, dirs, files in walk(target):
for file in files:
joined = os_path.abspath(os_path.join(root, file))
# filter the list early on
if path_filter and not path_filter(joined):
continue
if file_filter and not file_filter(file):
continue
yield joined
def hidden_path_filter(item):
"""Filter out hidden files or files in hidden directories."""
return not any(sub.startswith('.') for sub in item.split(os_path.sep))
def json_serial(obj):
"""Return time obj formatted according to ISO."""
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('Type {t} not serializable'.format(t=type(obj)))
def in_path(directory, path):
"""Check whether directory is in path.
:param directory: str
:param path: str
:return: True if directory is in path
"""
return any(directory == x for x in path.split(os_path.sep))
def skip_git_files(path):
"""Git skipping closure of in_path."""
return not in_path('.git', path)
class ThreadPool(object):
"""Implementation of thread pool."""
def __init__(self, target, num_workers=10, timeout=3):
"""Initialize `ThreadPool`.
:param target: Function that accepts exactly one argument
:param num_workers: int, number of worker threads to spawn
:param timeout: int, maximum number of seconds workers wait for new task
"""
self.target = target
self.num_workers = num_workers
self.timeout = timeout
self.queue = Queue()
self._threads = [Thread(target=self._work) for i in range(0, num_workers)]
def add_task(self, arg):
"""Enqueue a new task.
:param arg: argument for the `target` that was passed to constructor
"""
self.queue.put(arg)
def start(self):
"""Start processing by all threads."""
[t.start() for t in self._threads]
def join(self):
"""Join all threads."""
[t.join() for t in self._threads]
self.queue.join()
def _work(self):
while True:
try:
arg = self.queue.get(block=True, timeout=self.timeout)
except Empty:
break
try:
self.target(arg)
finally:
self.queue.task_done()
def __enter__(self):
"""Enter context manager."""
self.start()
return self
def __exit__(self, *_args, **_kwargs):
"""Exit context manager."""
self.join()
def compute_digest(target, function='sha256', raise_on_error=False):
"""Compute digest of a provided file.
:param target: str, file path
:param function: str, prefix name of the hashing function
:param raise_on_error: bool, raise an error when computation wasn't successful if set to True
:returns str or None, computed digest
`function` requires an executable with matching name on the system (sha256sum, sha1sum etc.)
"""
function += 'sum'
# returns e.g.:
# 65ecde5d025fcf57ceaa32230e2ff884ab204065b86e0e34e609313c7bdc7b47 /etc/passwd
data = TimedCommand.get_command_output([function, target], graceful=not raise_on_error)
try:
return data[0].split(' ')[0].strip()
except IndexError as exc:
logger.error("unable to compute digest of %r, likely it doesn't exist or is a directory",
target)
if raise_on_error:
raise RuntimeError("can't compute digest of %s" % target) from exc
class MavenCoordinates(object):
"""Represents Maven coordinates.
https://maven.apache.org/pom.html#Maven_Coordinates
"""
_default_packaging = 'jar'
def __init__(self, groupId, artifactId, version='',
classifier='', packaging=None):
"""Initialize attributes."""
self.groupId = groupId
self.artifactId = artifactId
self.classifier = classifier
self.packaging = packaging or MavenCoordinates._default_packaging
self.version = version
def is_valid(self):
"""Check if the current coordinates are valid."""
return self.groupId and self.artifactId and self.version and self.packaging
def to_str(self, omit_version=False):
"""Return string representation of the coordinates."""
mvnstr = "{g}:{a}".format(g=self.groupId, a=self.artifactId)
pack = self.packaging
if pack == MavenCoordinates._default_packaging:
pack = ''
if pack:
mvnstr += ":{p}".format(p=pack)
if self.classifier:
if not pack:
mvnstr += ':'
mvnstr += ":{c}".format(c=self.classifier)
if not self.version or omit_version:
if self.classifier or pack:
mvnstr += ':'
else:
mvnstr += ":{v}".format(v=self.version)
return mvnstr
def to_repo_url(self, ga_only=False):
"""Return relative path to the artifact in Maven repository."""
if ga_only:
return "{g}/{a}".format(g=self.groupId.replace('.', '/'),
a=self.artifactId)
dir_path = "{g}/{a}/{v}/".format(g=self.groupId.replace('.', '/'),
a=self.artifactId,
v=self.version)
classifier = "-{c}".format(c=self.classifier) if self.classifier else ''
filename = "{a}-{v}{c}.{e}".format(a=self.artifactId,
v=self.version,
c=classifier,
e=self.packaging)
return dir_path + filename
@staticmethod
def _parse_string(coordinates_str):
"""Parse string representation into a dictionary."""
a = {'groupId': '',
'artifactId': '',
'packaging': MavenCoordinates._default_packaging,
'classifier': '',
'version': ''}
ncolons = coordinates_str.count(':')
if ncolons == 1:
a['groupId'], a['artifactId'] = coordinates_str.split(':')
elif ncolons == 2:
a['groupId'], a['artifactId'], a['version'] = coordinates_str.split(':')
elif ncolons == 3:
a['groupId'], a['artifactId'], a['packaging'], a['version'] = coordinates_str.split(':')
elif ncolons == 4:
a['groupId'], a['artifactId'], a['packaging'], a['classifier'], a['version'] = \
coordinates_str.split(':')
else:
raise ValueError('Invalid Maven coordinates %s', coordinates_str)
return a
def __repr__(self):
"""Represent as string."""
return self.to_str()
def __eq__(self, other):
"""Implement == operator."""
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
"""Implement != operator."""
return not self.__eq__(other)
@classmethod
def normalize_str(cls, coordinates_str):
"""Normalize string representation."""
return cls.from_str(coordinates_str).to_str()
@classmethod
def from_str(cls, coordinates_str):
"""Create instance from string."""
coordinates = MavenCoordinates._parse_string(coordinates_str)
return cls(**coordinates)
def parse_gh_repo(potential_url):
"""Cover the following variety of URL forms for Github repo referencing.
1) www.github.com/foo/bar
2) (same as above, but with ".git" in the end)
3) (same as the two above, but without "www.")
# all of the three above, but starting with "http://", "https://", "git://" or "git+https://"
4) git@github.com:foo/bar
5) (same as above, but with ".git" in the end)
6) (same as the two above but with "ssh://" in front or with "git+ssh" instead of "git")
We return repository name in form `<username>/<reponame>` or `None` if this does not
seem to be a Github repo (or if someone invented yet another form that we can't parse yet...)
Notably, the Github repo *must* have exactly username and reponame, nothing else and nothing
more. E.g. `github.com/<username>/<reponame>/<something>` is *not* recognized.
"""
# TODO: reduce cyclomatic complexity
if not potential_url:
return None
repo_name = None
# transform 4-6 to a URL-like string, so that we can handle it together with 1-3
if '@' in potential_url:
split = potential_url.split('@')
if len(split) == 2 and split[1].startswith('github.com:'):
potential_url = 'http://' + split[1].replace('github.com:', 'github.com/')
# make it parsable by urlparse if it doesn't contain scheme
if not potential_url.startswith(('http://', 'https://', 'git://', 'git+https://')):
potential_url = 'http://' + potential_url
# urlparse should handle it now
parsed = urlparse(potential_url)
if parsed.netloc in ['github.com', 'www.github.com'] and \
parsed.scheme in ['http', 'https', 'git', 'git+https']:
repo_name = parsed.path
if repo_name.endswith('.git'):
repo_name = repo_name[:-len('.git')]
if repo_name:
repo_name = repo_name.strip('/')
if len(repo_name.split('/')) > 2:
temp_list = repo_name.split('/')
repo_name = temp_list[0] + '/' + temp_list[1]
if repo_name.count('/') != 1:
return None
return repo_name
def url2git_repo(url):
"""Convert URL to git repo URL and force use HTTPS."""
if url.startswith('git+'):
return url[len('git+'):]
if url.startswith('git@'):
url = url[len('git@'):]
url = url.split(':')
if len(url) != 2:
raise ValueError("Unable to parse git repo URL '%s'" % str(url))
return 'https://{}/{}'.format(url[0], url[1])
if not url.startswith(('http://', 'https://', 'git://')):
return 'http://' + url
return url
def case_sensitivity_transform(ecosystem, name):
"""Transform package name to lowercase for ecosystem that are not case sensitive.
:param ecosystem: name of ecosystem in which the package is sits
:param name: name of ecosystem
:return: transformed package name base on ecosystem package case sensitivity
"""
if Ecosystem.by_name(StoragePool.get_connected_storage('BayesianPostgres').session,
ecosystem).is_backed_by(EcosystemBackend.pypi):
return name.lower()
return name
def get_session_retry(retries=3, backoff_factor=0.2, status_forcelist=(404, 500, 502, 504),
session=None):
"""Set HTTP Adapter with retries to session."""
session = session or requests.Session()
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor, status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
return session
def normalize_package_name(ecosystem_backend, name):
"""Normalize package name.
:param ecosystem_backend: str, ecosystem backend
:param name: str, package name
:return: str, normalized package name for supported ecosystem backend,
the same package name otherwise
"""
normalized_name = name
if ecosystem_backend == 'pypi':
# https://www.python.org/dev/peps/pep-0503/#normalized-names
normalized_name = re.sub(r'[-_.]+', '-', name).lower()
elif ecosystem_backend == 'maven':
# https://maven.apache.org/pom.html#Maven_Coordinates
normalized_name = MavenCoordinates.normalize_str(name)
elif ecosystem_backend == 'npm':
normalized_name = name
elif ecosystem_backend == 'go':
# go package name is the host+path part of a URL, thus it can be URL encoded
normalized_name = unquote(name)
return normalized_name
def get_user_email(user_profile):
"""Return default email if user_profile doesn't contain any."""
default_email = 'bayesian@redhat.com'
if user_profile is not None:
return user_profile.get('email', default_email)
else:
return default_email
@tenacity.retry(stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(multiplier=2, min=10, max=60))
def get_response(url):
"""Wrap requests which tries to get response.
:param url: URL where to do the request
:param sleep_time: sleep time between retries
:param retry_count: number of retries
:return: content of response's json
"""
try:
response = requests.get(url, headers=get_header())
# If status code is 404 or 204 then don't retry
if response.status_code in [404, 204]:
return {}
response.raise_for_status()
response = response.json()
return response
except HTTPError as err:
message = "Failed to get results from {url} with {err}".format(url=url, err=err)
logger.error(message)
raise NotABugTaskError(message) from err
def add_maven_coords_to_set(coordinates_str, gav_set):
"""Add Maven coordinates to the gav_set set."""
artifact_coords = MavenCoordinates.from_str(coordinates_str)
gav_set.add("{ecosystem}:{group_id}:{artifact_id}:{version}".format(
ecosystem="maven",
group_id=artifact_coords.groupId,
artifact_id=artifact_coords.artifactId,
version=artifact_coords.version
))
def peek(iterable):
"""Peeks the iterable to check if it's empty."""
try:
first = next(iterable)
except StopIteration:
return None
return first
@tenacity.retry(stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(multiplier=2, min=10, max=60))
def get_gh_contributors(url):
"""Get number of contributors from Git URL.
:param url: URL where to do the request
:return: length of contributor's list
"""
try:
response = requests.get("{}?per_page=1".format(url),
headers=get_header())
# If status code is 404 or 204 then don't retry
if response.status_code == 404:
return -1
if response.status_code == 204:
return 0
response.raise_for_status()
contributors_count = int(parse_qs(response.links['last']['url'])['page'][0]) \
if response.links else 1
return contributors_count
except HTTPError as err:
raise NotABugTaskError(err) from err
def store_data_to_s3(arguments, s3, result):
"""Store data to S3 bucket."""
try:
s3.store_data(arguments, result)
except Exception as e:
logger.error(e)
@tenacity.retry(stop=tenacity.stop_after_attempt(4),
wait=tenacity.wait_exponential(multiplier=3, min=10, max=60))
def get_gh_query_response(repo_name, status, type, start_date, end_date, event):
"""Get details of PRs and Issues from given Github repo.
:param repo_name: Github repo name
:param status: status of issue Ex. open/closed
:param type: type of issue to set in search query Ex. pr/issue
:param start_date: date since data has to be collected
:param end_date: date upto data has to be collected
:param event: even which need to be considered Ex. created/closed
:return: count of issue/pr based on criteria
"""
try:
"""
Create search query for given criteria
page and per_page is set to 1, as search query provides count of entities
matching with given criteria in all pages we dont need to collect all data.
"""
url = "{GITHUB_API}search/issues?" \
"page=1" \
"&per_page=1" \
"&q=repo:{repo_name}" \
"+is:{type}" \
"+{event}:{start_date}..{end_date}"\
.format(GITHUB_API=configuration.GITHUB_API,
repo_name=repo_name,
start_date=start_date,
end_date=end_date,
type=type,
event=event)
# If status is set to closed by default open & closed both are set
if status:
url = '{url}+is:{status}'.format(url=url, status=status)
response = requests.get(url, headers=get_header())
response.raise_for_status()
resp = response.json()
return resp.get('total_count', 0)
except Exception as e:
logger.error(e)
raise
@tenacity.retry(stop=tenacity.stop_after_attempt(2),
wait=tenacity.wait_exponential(multiplier=1, min=4, max=10))
def execute_gh_queries(repo_name, start_date, end_date):
"""Get details of Github PR/Issues based on given date range.
:param repo_name: Github repo name
:param start_date: date since data has to be collected
:param end_date: date upto data has to be collected
:return: count of issue/pr based on criteria
"""
try:
# Get PR details based on date range provided
pr_opened = get_gh_query_response(repo_name, '',
'pr', start_date, end_date, 'created')
pr_closed = get_gh_query_response(repo_name, 'closed',
'pr', start_date, end_date, 'closed')
# Get Issue details based on date range provided
issues_opened = get_gh_query_response(repo_name,
'', 'issue', start_date, end_date, 'created')
issues_closed = get_gh_query_response(repo_name,
'closed', 'issue', start_date, end_date, 'closed')
return pr_opened, pr_closed, issues_opened, issues_closed
except Exception as e:
logger.error(e)
raise
def get_gh_pr_issue_counts(repo_name):
"""Get details of Github PR/Issues for given repo.
:param repo_name: Github repo name
:return: Dict having Issue/PR details
"""
today = datetime.date.today()
# Get previous month start and end dates
last_month_end_date = today
last_month_start_date = today - datetime.timedelta(days=30)
# Get PR/Issue counts for previous month
try:
pr_opened_last_month, \
pr_closed_last_month, \
issues_opened_last_month, \
issues_closed_last_month = execute_gh_queries(repo_name,
last_month_start_date,
last_month_end_date)
except Exception as e:
logger.error(e)
pr_opened_last_month = \
pr_closed_last_month = \
issues_opened_last_month = \
issues_closed_last_month = -1
# Get previous year and start and end dates of year
last_year_start_date = today - datetime.timedelta(days=365)
last_year_end_date = today
# Get PR/Issue counts for previous year
try:
pr_opened_last_year, \
pr_closed_last_year, \
issues_opened_last_year, \
issues_closed_last_year = execute_gh_queries(repo_name,
last_year_start_date,
last_year_end_date)
except Exception as e:
logger.error(e)
pr_opened_last_year = \
pr_closed_last_year = \
issues_opened_last_year = \
issues_closed_last_year = -1
# Set output in required format by data importer
result = {
"updated_pull_requests": {
"year": {"opened": pr_opened_last_year, "closed": pr_closed_last_year},
"month": {"opened": pr_opened_last_month, "closed": pr_closed_last_month}
},
"updated_issues": {
"year": {"opened": issues_opened_last_year, "closed": issues_closed_last_year},
"month": {"opened": issues_opened_last_month, "closed": issues_closed_last_month}
}
}
return result
def get_header():
"""Get random Github token from env variables."""
headers = {
'Accept': 'application/vnd.github.mercy-preview+json, ' # for topics
'application/vnd.github.v3+json' # recommended by GitHub for License API
}
try:
_, header = configuration.select_random_github_token()
headers.update(header)
except F8AConfigurationException as e:
logger.error(e)
headers.update({})
return headers
|
ocelot-collab/ocelot
|
ocelot/cpbd/coord_transform.py
|
"""
S.Tomin and I.Zagorodnov, 2017, DESY/XFEL
"""
from ocelot.common.globals import *
import logging
logger = logging.getLogger(__name__)
try:
import numexpr as ne
ne_flag = True
except:
logger.debug("coord_transform.py: module NUMEXPR is not installed. Install it to speed up calculation")
ne_flag = False
def xp_2_xxstg_mad(xp, xxstg, gamref):
# to mad format
N = xp.shape[1]
pref = m_e_eV * np.sqrt(gamref ** 2 - 1)
betaref = np.sqrt(1 - gamref ** -2)
u = np.c_[xp[3], xp[4], xp[5]]
if ne_flag:
sum_u2 = ne.evaluate('sum(u * u, 1)')
gamma = ne.evaluate('sqrt(1 + sum_u2 / m_e_eV ** 2)')
beta = ne.evaluate('sqrt(1 - gamma ** -2)')
else:
gamma = np.sqrt(1 + np.sum(u * u, 1) / m_e_eV ** 2)
beta = np.sqrt(1 - gamma ** -2)
if np.__version__ > "1.8":
p0 = np.linalg.norm(u, 2, 1).reshape((N, 1))
else:
p0 = np.sqrt(u[:, 0] ** 2 + u[:, 1] ** 2 + u[:, 2] ** 2).reshape((N, 1))
u = u / p0
u0 = u[:, 0]
u1 = u[:, 1]
u2 = u[:, 2]
if ne_flag:
xp0 = xp[0]
xp1 = xp[1]
xp2 = xp[2]
cdt = ne.evaluate('-xp2 / (beta * u2)')
xxstg[0] = ne.evaluate('xp0 + beta * u0 * cdt')
xxstg[2] = ne.evaluate('xp1 + beta * u1 * cdt')
xxstg[5] = ne.evaluate('(gamma / gamref - 1) / betaref')
else:
cdt = -xp[2] / (beta * u2)
xxstg[0] = xp[0] + beta * u0 * cdt
xxstg[2] = xp[1] + beta * u1 * cdt
xxstg[5] = (gamma / gamref - 1) / betaref
xxstg[4] = cdt
xxstg[1] = xp[3] / pref
xxstg[3] = xp[4] / pref
return xxstg
def xxstg_2_xp_mad(xxstg, xp, gamref):
# from mad format
N = xxstg.shape[1]
#pref = m_e_eV * np.sqrt(gamref ** 2 - 1)
betaref = np.sqrt(1 - gamref ** -2)
if ne_flag:
xxstg1 = xxstg[1]
xxstg3 = xxstg[3]
xxstg5 = xxstg[5]
gamma = ne.evaluate('(betaref * xxstg5 + 1) * gamref')
beta = ne.evaluate('sqrt(1 - gamma ** -2)')
pz2pref = ne.evaluate('sqrt(((gamma * beta) / (gamref * betaref)) ** 2 - xxstg1 ** 2 - xxstg3 ** 2)')
else:
gamma = (betaref * xxstg[5] + 1) * gamref
beta = np.sqrt(1 - gamma ** -2)
pz2pref = np.sqrt(((gamma * beta) / (gamref * betaref)) ** 2 - xxstg[1] ** 2 - xxstg[3] ** 2)
u = np.c_[xxstg[1] / pz2pref, xxstg[3] / pz2pref, np.ones(N)]
if np.__version__ > "1.8":
norm = np.linalg.norm(u, 2, 1).reshape((N, 1))
else:
norm = np.sqrt(u[:, 0] ** 2 + u[:, 1] ** 2 + u[:, 2] ** 2).reshape((N, 1))
u = u / norm
u0 = u[:, 0]
u1 = u[:, 1]
u2 = u[:, 2]
if ne_flag:
xxstg0 = xxstg[0]
xxstg2 = xxstg[2]
xxstg4 = xxstg[4]
xp[0] = ne.evaluate('xxstg0 - u0 * beta * xxstg4')
xp[1] = ne.evaluate('xxstg2 - u1 * beta * xxstg4')
xp[2] = ne.evaluate('-u2 * beta * xxstg4')
xp[3] = ne.evaluate('u0 * gamma * beta * m_e_eV')
xp[4] = ne.evaluate('u1 * gamma * beta * m_e_eV')
xp[5] = ne.evaluate('u2 * gamma * beta * m_e_eV')
else:
xp[0] = xxstg[0] - u0 * beta * xxstg[4]
xp[1] = xxstg[2] - u1 * beta * xxstg[4]
xp[2] = -u2 * beta * xxstg[4]
xp[3] = u0 * gamma * beta * m_e_eV
xp[4] = u1 * gamma * beta * m_e_eV
xp[5] = u2 * gamma * beta * m_e_eV
return xp
|
nansencenter/nansat
|
nansat/tests/test_node.py
|
#------------------------------------------------------------------------------
# Name: test_node.py
# Purpose: Test the Node class
#
# Author: Aleksander Vines
#
# Created: 2016-02-26
# Last modified:2016-02-26T16:00
# Copyright: (c) NERSC
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
#------------------------------------------------------------------------------
from __future__ import absolute_import
import unittest
import os
from . import nansat_test_data as ntd
from nansat.node import Node
class NodeTest(unittest.TestCase):
def test_creation(self):
tag = 'Root'
value = ' Value '
anAttr = 'elValue'
new_value = 'New Value'
node = Node(tag, value=value, anAttr=anAttr)
self.assertEqual(node.tag, tag)
self.assertDictEqual(node.attributes, {'anAttr': anAttr})
self.assertEqual(node.value, value.strip())
self.assertEqual(node[tag], value.strip())
node[tag] = new_value
self.assertEqual(node.value, new_value)
def test_getAttributeList(self):
tag = 'Root'
value = ' Value '
anAttr = 'elValue'
secondAttr = 'Some value'
finalAttribute = 'A last value'
node = Node(tag, value=value, anAttr=anAttr, secondAttr=secondAttr,
finalAttribute=finalAttribute)
nameList, valList = node.getAttributeList()
self.assertIsInstance(nameList, list)
self.assertIsInstance(valList, list)
index = valList.index(anAttr)
self.assertEqual(nameList[index], 'anAttr')
index = valList.index(secondAttr)
self.assertEqual(nameList[index], 'secondAttr')
index = valList.index(finalAttribute)
self.assertEqual(nameList[index], 'finalAttribute')
def test_insert(self):
contents = ('<Element attr="attrValue"><Subnode>testValue</Subnode>'
'</Element>')
root = Node('root')
root2 = root.insert(contents)
element = root2.node('Element')
rawElement = Node.create(contents)
self.assertEqual(element.xml(), rawElement.xml())
def test_create(self):
test_file_element = os.path.join(ntd.test_data_path,
'some_xml_file.xml')
fileElement = Node.create(test_file_element)
with open(test_file_element, 'r') as myfile:
contents = myfile.read().replace('\n', '')
root = Node('root')
root = root.insert(contents)
rawElement = root.children[0]
self.assertEqual(fileElement.xml(), rawElement.xml())
def test_delete_attribute(self):
tag = 'Root'
value = ' Value '
anAttr = 'elValue'
node = Node(tag, value=value, anAttr=anAttr)
self.assertIn('anAttr', node.attributes)
node.delAttribute('anAttr')
self.assertNotIn('anAttr', node.attributes)
def test_add_node(self):
rootTag = 'Root'
root = Node(rootTag)
firstLevelTag = 'FirstLevel'
firstLevel = Node(firstLevelTag)
root += firstLevel
self.assertIn(firstLevel, root.children)
def test_add_nodes(self):
rootTag = 'Root'
root = Node(rootTag)
firstLevelTag = 'FirstLevel'
firstLevel = Node(firstLevelTag)
root += firstLevel
firstLevel2 = Node(firstLevelTag)
root += firstLevel2
firstLevel2ndTag = 'FirstLevel2ndTag'
firstLevel3 = Node(firstLevel2ndTag)
root = root + firstLevel3
self.assertIn(firstLevel, root.children)
self.assertIn(firstLevel2, root.children)
self.assertIn(firstLevel3, root.children)
def test_xml(self):
rootTag = 'Root'
root = Node(rootTag)
firstLevelTag = 'FirstLevel'
firstLevel = Node(firstLevelTag)
root += firstLevel
firstLevel2 = Node(firstLevelTag)
root += firstLevel2
firstLevel2ndTag = 'FirstLevel2ndTag'
firstLevel3 = Node(firstLevel2ndTag)
root += firstLevel3
self.assertEqual(root.xml(),
('<Root>\n'
' <FirstLevel/>\n'
' <FirstLevel/>\n'
' <FirstLevel2ndTag/>\n'
'</Root>\n'),)
def test_replace_node(self):
rootTag = 'Root'
root = Node(rootTag)
firstLevelTag = 'FirstLevel'
firstLevel = Node(firstLevelTag)
root += firstLevel
firstLevel2 = Node(firstLevelTag)
root += firstLevel2
firstLevel2ndTag = 'FirstLevel2ndTag'
firstLevel3 = Node(firstLevel2ndTag)
root.replaceNode(firstLevelTag, 1, firstLevel3)
self.assertIn(firstLevel, root.children)
self.assertNotIn(firstLevel2, root.children)
self.assertIn(firstLevel3, root.children)
self.assertEqual(len(root.children), 2)
def test_search_node(self):
rootTag = 'Root'
root = Node(rootTag)
firstLevelTag = 'FirstLevel'
firstLevel = Node(firstLevelTag)
root += firstLevel
firstLevel2 = Node(firstLevelTag)
root += firstLevel2
firstLevel2ndTag = 'FirstLevel2ndTag'
firstLevel3 = Node(firstLevel2ndTag)
root += firstLevel3
self.assertEqual(root.node(firstLevelTag,0), firstLevel)
self.assertEqual(root.node(firstLevelTag,1), firstLevel2)
def test_str(self):
tag = 'Root'
value = 'Value'
node = Node(tag, value=value)
self.assertEqual(str(node), '%s\n value: [%s]' % (tag, value))
if __name__ == "__main__":
unittest.main()
|
adazey/Muzez
|
libs/soundcloud/tests/test_client.py
|
import soundcloud
from soundcloud.tests.utils import MockResponse
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
from nose.tools import eq_, raises
from fudge import patch
def test_kwargs_parsing_valid():
"""Test that valid kwargs are stored as properties on the client."""
client = soundcloud.Client(client_id='foo', client_secret='foo')
assert isinstance(client, soundcloud.Client)
eq_('foo', client.client_id)
client = soundcloud.Client(client_id='foo', client_secret='bar',
access_token='baz', username='you',
password='secret', redirect_uri='foooo')
eq_('foo', client.client_id)
eq_('baz', client.access_token)
@raises(AttributeError)
def test_kwargs_parsing_invalid():
"""Test that unknown kwargs are ignored."""
client = soundcloud.Client(foo='bar', client_id='bar')
client.foo
def test_url_creation():
"""Test that resources are turned into urls properly."""
client = soundcloud.Client(client_id='foo')
url = client._resolve_resource_name('tracks')
eq_('https://api.soundcloud.com/tracks', url)
url = client._resolve_resource_name('/tracks/')
eq_('https://api.soundcloud.com/tracks', url)
def test_url_creation_options():
"""Test that resource resolving works with different options."""
client = soundcloud.Client(client_id='foo', use_ssl=False)
client.host = 'soundcloud.dev'
url = client._resolve_resource_name('apps/132445')
eq_('http://soundcloud.dev/apps/132445', url)
def test_method_dispatching():
"""Test that getattr is doing right by us."""
client = soundcloud.Client(client_id='foo')
for method in ('get', 'post', 'put', 'delete', 'head'):
p = getattr(client, method)
eq_((method,), p.args)
eq_('_request', p.func.__name__)
def test_host_config():
"""We should be able to set the host on the client."""
client = soundcloud.Client(client_id='foo', host='api.soundcloud.dev')
eq_('api.soundcloud.dev', client.host)
client = soundcloud.Client(client_id='foo')
eq_('api.soundcloud.com', client.host)
@patch('requests.get')
def test_disabling_ssl_verification(fake_get):
"""We should be able to disable ssl verification when we are in dev mode"""
client = soundcloud.Client(client_id='foo', host='api.soundcloud.dev',
verify_ssl=False)
expected_url = '%s?%s' % (
client._resolve_resource_name('tracks'),
urlencode({
'limit': 5,
'client_id': 'foo'
}))
headers = {
'User-Agent': soundcloud.USER_AGENT,
'Accept': 'application/json'
}
(fake_get.expects_call()
.with_args(expected_url,
headers=headers,
verify=False,
allow_redirects=True)
.returns(MockResponse("{}")))
client.get('tracks', limit=5)
@raises(AttributeError)
def test_method_dispatching_invalid_method():
"""Test that getattr raises an attributeerror if we give it garbage."""
client = soundcloud.Client(client_id='foo')
client.foo()
@patch('requests.get')
def test_method_dispatching_get_request_readonly(fake_get):
"""Test that calling client.get() results in a proper call
to the get function in the requests module with the provided
kwargs as the querystring.
"""
client = soundcloud.Client(client_id='foo')
expected_url = '%s?%s' % (
client._resolve_resource_name('tracks'),
urlencode({
'limit': 5,
'client_id': 'foo'
}))
headers = {
'User-Agent': soundcloud.USER_AGENT,
'Accept': 'application/json'
}
(fake_get.expects_call()
.with_args(expected_url, headers=headers, allow_redirects=True)
.returns(MockResponse("{}")))
client.get('tracks', limit=5)
@patch('requests.post')
def test_method_dispatching_post_request(fake_post):
"""Test that calling client.post() results in a proper call
to the post function in the requests module.
TODO: Revise once read/write support has been added.
"""
client = soundcloud.Client(client_id='foo')
expected_url = client._resolve_resource_name('tracks')
data = {
'client_id': 'foo'
}
headers = {
'User-Agent': soundcloud.USER_AGENT
}
(fake_post.expects_call()
.with_args(expected_url,
data=data,
headers=headers,
allow_redirects=True)
.returns(MockResponse("{}")))
client.post('tracks')
@patch('requests.get')
def test_proxy_servers(fake_request):
"""Test that providing a dictionary of proxy servers works."""
proxies = {
'http': 'myproxyserver:1234'
}
client = soundcloud.Client(client_id='foo', proxies=proxies)
expected_url = "%s?%s" % (
client._resolve_resource_name('me'),
urlencode({
'client_id': 'foo'
})
)
headers = {
'User-Agent': soundcloud.USER_AGENT,
'Accept': 'application/json'
}
(fake_request.expects_call()
.with_args(expected_url,
headers=headers,
proxies=proxies,
allow_redirects=True)
.returns(MockResponse("{}")))
client.get('/me')
|
django-wiki/django-wiki
|
src/wiki/core/markdown/mdx/codehilite.py
|
import logging
import re
from markdown.extensions.codehilite import CodeHilite
from markdown.extensions.codehilite import CodeHiliteExtension
from markdown.preprocessors import Preprocessor
from markdown.treeprocessors import Treeprocessor
from wiki.core.markdown import add_to_registry
logger = logging.getLogger(__name__)
def highlight(code, config, tab_length, lang=None):
code = CodeHilite(
code,
linenums=config["linenums"],
guess_lang=config["guess_lang"],
css_class=config["css_class"],
style=config["pygments_style"],
noclasses=config["noclasses"],
tab_length=tab_length,
use_pygments=config["use_pygments"],
lang=lang,
)
html = code.hilite()
html = """<div class="codehilite-wrap">{}</div>""".format(html)
return html
class WikiFencedBlockPreprocessor(Preprocessor):
"""
This is a replacement of markdown.extensions.fenced_code which will
directly and without configuration options invoke the vanilla CodeHilite
extension.
"""
FENCED_BLOCK_RE = re.compile(
r"""
(?P<fence>^(?:~{3,}|`{3,}))[ ]* # Opening ``` or ~~~
(\{?\.?(?P<lang>[a-zA-Z0-9_+-]*))?[ ]* # Optional {, and lang
# Optional highlight lines, single- or double-quote-delimited
(hl_lines=(?P<quot>"|')(?P<hl_lines>.*?)(?P=quot))?[ ]*
}?[ ]*\n # Optional closing }
(?P<code>.*?)(?<=\n)
(?P=fence)[ ]*$""",
re.MULTILINE | re.DOTALL | re.VERBOSE,
)
CODE_WRAP = "<pre>%s</pre>"
def __init__(self, md):
super().__init__(md)
self.checked_for_codehilite = False
self.codehilite_conf = {}
def run(self, lines):
"""Match and store Fenced Code Blocks in the HtmlStash."""
text = "\n".join(lines)
while 1:
m = self.FENCED_BLOCK_RE.search(text)
if m:
lang = ""
if m.group("lang"):
lang = m.group("lang")
html = highlight(
m.group("code"), self.config, self.markdown.tab_length, lang=lang
)
placeholder = self.markdown.htmlStash.store(html)
text = "%s\n%s\n%s" % (text[: m.start()], placeholder, text[m.end() :])
else:
break
return text.split("\n")
class HiliteTreeprocessor(Treeprocessor):
"""Hilight source code in code blocks."""
def run(self, root):
"""Find code blocks and store in htmlStash."""
blocks = root.iter("pre")
for block in blocks:
if len(block) == 1 and block[0].tag == "code":
html = highlight(block[0].text, self.config, self.markdown.tab_length)
placeholder = self.markdown.htmlStash.store(html)
# Clear codeblock in etree instance
block.clear()
# Change to p element which will later
# be removed when inserting raw html
block.tag = "p"
block.text = placeholder
class WikiCodeHiliteExtension(CodeHiliteExtension):
"""
markdown.extensions.codehilite cannot configure container tags but forces
code to be in <table></table>, so we had to overwrite some of the code
because it's hard to extend...
"""
def extendMarkdown(self, md):
"""Add HilitePostprocessor to Markdown instance."""
hiliter = HiliteTreeprocessor(md)
hiliter.config = self.getConfigs()
if "hilite" in md.treeprocessors:
logger.warning(
"Replacing existing 'hilite' extension - please remove "
"'codehilite' from WIKI_MARKDOWN_KWARGS"
)
del md.treeprocessors["hilite"]
add_to_registry(md.treeprocessors, "hilite", hiliter, "<inline")
if "fenced_code_block" in md.preprocessors:
logger.warning(
"Replacing existing 'fenced_code_block' extension - please remove "
"'fenced_code_block' or 'extras' from WIKI_MARKDOWN_KWARGS"
)
del md.preprocessors["fenced_code_block"]
hiliter = WikiFencedBlockPreprocessor(md)
hiliter.config = self.getConfigs()
add_to_registry(
md.preprocessors, "fenced_code_block", hiliter, ">normalize_whitespace"
)
md.registerExtension(self)
def makeExtension(*args, **kwargs):
"""Return an instance of the extension."""
return WikiCodeHiliteExtension(*args, **kwargs)
|
attente/snapcraft
|
snapcraft/tests/test_plugin_gulp.py
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from os import path
from unittest import mock
import fixtures
import snapcraft
from snapcraft.plugins import gulp, nodejs
from snapcraft import tests
class GulpPluginTestCase(tests.TestCase):
def setUp(self):
super().setUp()
self.project_options = snapcraft.ProjectOptions()
patcher = mock.patch('snapcraft.internal.common.run')
self.run_mock = patcher.start()
self.addCleanup(patcher.stop)
patcher = mock.patch('snapcraft.sources.Tar')
self.tar_mock = patcher.start()
self.addCleanup(patcher.stop)
patcher = mock.patch('sys.stdout')
patcher.start()
self.addCleanup(patcher.stop)
def test_pull_local_sources(self):
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
plugin.pull()
self.assertFalse(self.run_mock.called, 'run() was called')
self.tar_mock.assert_has_calls([
mock.call(
nodejs.get_nodejs_release(plugin.options.node_engine),
path.join(os.path.abspath('.'), 'parts', 'test-part', 'npm')),
mock.call().download()])
def test_build(self):
self.useFixture(tests.fixture_setup.CleanEnvironment())
self.useFixture(fixtures.EnvironmentVariable(
'PATH', '/bin'))
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
open(os.path.join(plugin.sourcedir, 'package.json'), 'w').close()
plugin.build()
path = '{}:/bin'.format(os.path.join(plugin._npm_dir, 'bin'))
self.run_mock.assert_has_calls([
mock.call(['npm', 'install', '-g', 'gulp-cli'],
cwd=plugin.builddir, env={'PATH': path}),
mock.call(['npm', 'install', '--only-development'],
cwd=plugin.builddir, env={'PATH': path}),
])
self.tar_mock.assert_has_calls([
mock.call(
nodejs.get_nodejs_release(plugin.options.node_engine),
os.path.join(plugin._npm_dir)),
mock.call().provision(
plugin._npm_dir, clean_target=False, keep_tarball=True)])
@mock.patch('platform.machine')
def test_unsupported_arch_raises_exception(self, machine_mock):
machine_mock.return_value = 'fantasy-arch'
class Options:
source = None
gulp_tasks = []
node_engine = '4'
with self.assertRaises(EnvironmentError) as raised:
gulp.GulpPlugin('test-part', Options(), self.project_options)
self.assertEqual(raised.exception.__str__(),
'architecture not supported (fantasy-arch)')
def test_schema(self):
self.maxDiff = None
plugin_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'additionalProperties': False,
'properties': {
'gulp-tasks': {'default': [],
'items': {'type': 'string'},
'minitems': 1,
'type': 'array',
'uniqueItems': True},
'node-engine': {'default': '4.4.4', 'type': 'string'},
'source': {'type': 'string'},
'source-branch': {'default': '', 'type': 'string'},
'source-subdir': {'default': None, 'type': 'string'},
'source-tag': {'default': '', 'type:': 'string'},
'source-type': {'default': '', 'type': 'string'},
'disable-parallel': {'default': False, 'type': 'boolean'}},
'pull-properties': ['source', 'source-type', 'source-branch',
'source-tag', 'source-subdir', 'node-engine'],
'build-properties': ['disable-parallel', 'gulp-tasks'],
'required': ['source', 'gulp-tasks'],
'type': 'object'}
self.assertEqual(gulp.GulpPlugin.schema(), plugin_schema)
def test_clean_pull_step(self):
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
plugin.pull()
self.assertTrue(os.path.exists(plugin._npm_dir))
plugin.clean_pull()
self.assertFalse(os.path.exists(plugin._npm_dir))
|
bronycub/sugarcub
|
sugarcub/celery.py
|
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sugarcub.settings')
from django.conf import settings # noqa
app = Celery('sugarcub')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
amenonsen/ansible
|
test/lib/ansible_test/_internal/provider/layout/__init__.py
|
"""Code for finding content."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
import collections
import os
from ... import types as t
from ...util import (
ANSIBLE_SOURCE_ROOT,
)
from .. import (
PathProvider,
)
class Layout:
"""Description of content locations and helper methods to access content."""
def __init__(self,
root, # type: str
paths, # type: t.List[str]
): # type: (...) -> None
self.root = root
self.__paths = paths # contains both file paths and symlinked directory paths (ending with os.path.sep)
self.__files = [path for path in paths if not path.endswith(os.path.sep)] # contains only file paths
self.__paths_tree = paths_to_tree(self.__paths)
self.__files_tree = paths_to_tree(self.__files)
def all_files(self, include_symlinked_directories=False): # type: (bool) -> t.List[str]
"""Return a list of all file paths."""
if include_symlinked_directories:
return self.__paths
return self.__files
def walk_files(self, directory, include_symlinked_directories=False): # type: (str, bool) -> t.List[str]
"""Return a list of file paths found recursively under the given directory."""
if include_symlinked_directories:
tree = self.__paths_tree
else:
tree = self.__files_tree
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(tree, parts)
if not item:
return []
directories = collections.deque(item[0].values())
files = list(item[1])
while directories:
item = directories.pop()
directories.extend(item[0].values())
files.extend(item[1])
return files
def get_dirs(self, directory): # type: (str) -> t.List[str]
"""Return a list directory paths found directly under the given directory."""
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(self.__files_tree, parts)
return [os.path.join(directory, key) for key in item[0].keys()] if item else []
def get_files(self, directory): # type: (str) -> t.List[str]
"""Return a list of file paths found directly under the given directory."""
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(self.__files_tree, parts)
return item[1] if item else []
class ContentLayout(Layout):
"""Information about the current Ansible content being tested."""
def __init__(self,
root, # type: str
paths, # type: t.List[str]
plugin_paths, # type: t.Dict[str, str]
collection=None, # type: t.Optional[CollectionDetail]
integration_path=None, # type: t.Optional[str]
unit_path=None, # type: t.Optional[str]
unit_module_path=None, # type: t.Optional[str]
unit_module_utils_path=None, # type: t.Optional[str]
): # type: (...) -> None
super(ContentLayout, self).__init__(root, paths)
self.plugin_paths = plugin_paths
self.collection = collection
self.integration_path = integration_path
self.integration_targets_path = os.path.join(integration_path, 'targets')
self.integration_vars_path = os.path.join(integration_path, 'integration_config.yml')
self.unit_path = unit_path
self.unit_module_path = unit_module_path
self.unit_module_utils_path = unit_module_utils_path
self.is_ansible = root == ANSIBLE_SOURCE_ROOT
@property
def prefix(self): # type: () -> str
"""Return the collection prefix or an empty string if not a collection."""
if self.collection:
return self.collection.prefix
return ''
@property
def module_path(self): # type: () -> t.Optional[str]
"""Return the path where modules are found, if any."""
return self.plugin_paths.get('modules')
@property
def module_utils_path(self): # type: () -> t.Optional[str]
"""Return the path where module_utils are found, if any."""
return self.plugin_paths.get('module_utils')
@property
def module_utils_powershell_path(self): # type: () -> t.Optional[str]
"""Return the path where powershell module_utils are found, if any."""
if self.is_ansible:
return os.path.join(self.plugin_paths['module_utils'], 'powershell')
return self.plugin_paths.get('module_utils')
@property
def module_utils_csharp_path(self): # type: () -> t.Optional[str]
"""Return the path where csharp module_utils are found, if any."""
if self.is_ansible:
return os.path.join(self.plugin_paths['module_utils'], 'csharp')
return self.plugin_paths.get('module_utils')
class CollectionDetail:
"""Details about the layout of the current collection."""
def __init__(self,
name, # type: str
namespace, # type: str
root, # type: str
): # type: (...) -> None
self.name = name
self.namespace = namespace
self.root = root
self.full_name = '%s.%s' % (namespace, name)
self.prefix = '%s.' % self.full_name
self.directory = os.path.join('ansible_collections', namespace, name)
class LayoutProvider(PathProvider):
"""Base class for layout providers."""
PLUGIN_TYPES = (
'action',
'become',
'cache',
'callback',
'cliconf',
'connection',
'doc_fragments',
'filter',
'httpapi',
'inventory',
'lookup',
'module_utils',
'modules',
'netconf',
'shell',
'strategy',
'terminal',
'test',
'vars',
)
@abc.abstractmethod
def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout
"""Create a layout using the given root and paths."""
def paths_to_tree(paths): # type: (t.List[str]) -> t.Tuple(t.Dict[str, t.Any], t.List[str])
"""Return a filesystem tree from the given list of paths."""
tree = {}, []
for path in paths:
parts = path.split(os.sep)
root = tree
for part in parts[:-1]:
if part not in root[0]:
root[0][part] = {}, []
root = root[0][part]
root[1].append(path)
return tree
def get_tree_item(tree, parts): # type: (t.Tuple(t.Dict[str, t.Any], t.List[str]), t.List[str]) -> t.Optional[t.Tuple(t.Dict[str, t.Any], t.List[str])]
"""Return the portion of the tree found under the path given by parts, or None if it does not exist."""
root = tree
for part in parts:
root = root[0].get(part)
if not root:
return None
return root
|
gwr/samba
|
source4/scripting/python/samba/__init__.py
|
#!/usr/bin/env python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2008
#
# Based on the original in EJS:
# Copyright (C) Andrew Tridgell <tridge@samba.org> 2005
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Samba 4."""
__docformat__ = "restructuredText"
import os
import sys
import samba.param
def source_tree_topdir():
'''return the top level directory (the one containing the source4 directory)'''
paths = [ "../../..", "../../../.." ]
for p in paths:
topdir = os.path.normpath(os.path.join(os.path.dirname(__file__), p))
if os.path.exists(os.path.join(topdir, 'source4')):
return topdir
raise RuntimeError("unable to find top level source directory")
def in_source_tree():
'''return True if we are running from within the samba source tree'''
try:
topdir = source_tree_topdir()
except RuntimeError:
return False
return True
import ldb
from samba._ldb import Ldb as _Ldb
class Ldb(_Ldb):
"""Simple Samba-specific LDB subclass that takes care
of setting up the modules dir, credentials pointers, etc.
Please note that this is intended to be for all Samba LDB files,
not necessarily the Sam database. For Sam-specific helper
functions see samdb.py.
"""
def __init__(self, url=None, lp=None, modules_dir=None, session_info=None,
credentials=None, flags=0, options=None):
"""Opens a Samba Ldb file.
:param url: Optional LDB URL to open
:param lp: Optional loadparm object
:param modules_dir: Optional modules directory
:param session_info: Optional session information
:param credentials: Optional credentials, defaults to anonymous.
:param flags: Optional LDB flags
:param options: Additional options (optional)
This is different from a regular Ldb file in that the Samba-specific
modules-dir is used by default and that credentials and session_info
can be passed through (required by some modules).
"""
if modules_dir is not None:
self.set_modules_dir(modules_dir)
else:
self.set_modules_dir(os.path.join(samba.param.modules_dir(), "ldb"))
if session_info is not None:
self.set_session_info(session_info)
if credentials is not None:
self.set_credentials(credentials)
if lp is not None:
self.set_loadparm(lp)
# This must be done before we load the schema, as these handlers for
# objectSid and objectGUID etc must take precedence over the 'binary
# attribute' declaration in the schema
self.register_samba_handlers()
# TODO set debug
def msg(l, text):
print text
#self.set_debug(msg)
self.set_utf8_casefold()
# Allow admins to force non-sync ldb for all databases
if lp is not None:
nosync_p = lp.get("nosync", "ldb")
if nosync_p is not None and nosync_p == True:
flags |= ldb.FLG_NOSYNC
self.set_create_perms(0600)
if url is not None:
self.connect(url, flags, options)
def searchone(self, attribute, basedn=None, expression=None,
scope=ldb.SCOPE_BASE):
"""Search for one attribute as a string.
:param basedn: BaseDN for the search.
:param attribute: Name of the attribute
:param expression: Optional search expression.
:param scope: Search scope (defaults to base).
:return: Value of attribute as a string or None if it wasn't found.
"""
res = self.search(basedn, scope, expression, [attribute])
if len(res) != 1 or res[0][attribute] is None:
return None
values = set(res[0][attribute])
assert len(values) == 1
return self.schema_format_value(attribute, values.pop())
def erase_users_computers(self, dn):
"""Erases user and computer objects from our AD.
This is needed since the 'samldb' module denies the deletion of primary
groups. Therefore all groups shouldn't be primary somewhere anymore.
"""
try:
res = self.search(base=dn, scope=ldb.SCOPE_SUBTREE, attrs=[],
expression="(|(objectclass=user)(objectclass=computer))")
except ldb.LdbError, (errno, _):
if errno == ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
return
else:
raise
try:
for msg in res:
self.delete(msg.dn, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
raise
def erase_except_schema_controlled(self):
"""Erase this ldb.
:note: Removes all records, except those that are controlled by
Samba4's schema.
"""
basedn = ""
# Try to delete user/computer accounts to allow deletion of groups
self.erase_users_computers(basedn)
# Delete the 'visible' records, and the invisble 'deleted' records (if this DB supports it)
for msg in self.search(basedn, ldb.SCOPE_SUBTREE,
"(&(|(objectclass=*)(distinguishedName=*))(!(distinguishedName=@BASEINFO)))",
[], controls=["show_deleted:0", "show_recycled:0"]):
try:
self.delete(msg.dn, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
raise
res = self.search(basedn, ldb.SCOPE_SUBTREE,
"(&(|(objectclass=*)(distinguishedName=*))(!(distinguishedName=@BASEINFO)))", [], controls=["show_deleted:0", "show_recycled:0"])
assert len(res) == 0
# delete the specials
for attr in ["@SUBCLASSES", "@MODULES",
"@OPTIONS", "@PARTITION", "@KLUDGEACL"]:
try:
self.delete(attr, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore missing dn errors
raise
def erase(self):
"""Erase this ldb, removing all records."""
self.erase_except_schema_controlled()
# delete the specials
for attr in ["@INDEXLIST", "@ATTRIBUTES"]:
try:
self.delete(attr, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore missing dn errors
raise
def load_ldif_file_add(self, ldif_path):
"""Load a LDIF file.
:param ldif_path: Path to LDIF file.
"""
self.add_ldif(open(ldif_path, 'r').read())
def add_ldif(self, ldif, controls=None):
"""Add data based on a LDIF string.
:param ldif: LDIF text.
"""
for changetype, msg in self.parse_ldif(ldif):
assert changetype == ldb.CHANGETYPE_NONE
self.add(msg, controls)
def modify_ldif(self, ldif, controls=None):
"""Modify database based on a LDIF string.
:param ldif: LDIF text.
"""
for changetype, msg in self.parse_ldif(ldif):
if changetype == ldb.CHANGETYPE_ADD:
self.add(msg, controls)
else:
self.modify(msg, controls)
def substitute_var(text, values):
"""Substitute strings of the form ${NAME} in str, replacing
with substitutions from values.
:param text: Text in which to subsitute.
:param values: Dictionary with keys and values.
"""
for (name, value) in values.items():
assert isinstance(name, str), "%r is not a string" % name
assert isinstance(value, str), "Value %r for %s is not a string" % (value, name)
text = text.replace("${%s}" % name, value)
return text
def check_all_substituted(text):
"""Check that all substitution variables in a string have been replaced.
If not, raise an exception.
:param text: The text to search for substitution variables
"""
if not "${" in text:
return
var_start = text.find("${")
var_end = text.find("}", var_start)
raise Exception("Not all variables substituted: %s" %
text[var_start:var_end+1])
def read_and_sub_file(file_name, subst_vars):
"""Read a file and sub in variables found in it
:param file_name: File to be read (typically from setup directory)
param subst_vars: Optional variables to subsitute in the file.
"""
data = open(file_name, 'r').read()
if subst_vars is not None:
data = substitute_var(data, subst_vars)
check_all_substituted(data)
return data
def setup_file(template, fname, subst_vars=None):
"""Setup a file in the private dir.
:param template: Path of the template file.
:param fname: Path of the file to create.
:param subst_vars: Substitution variables.
"""
if os.path.exists(fname):
os.unlink(fname)
data = read_and_sub_file(template, subst_vars)
f = open(fname, 'w')
try:
f.write(data)
finally:
f.close()
def valid_netbios_name(name):
"""Check whether a name is valid as a NetBIOS name. """
# See crh's book (1.4.1.1)
if len(name) > 15:
return False
for x in name:
if not x.isalnum() and not x in " !#$%&'()-.@^_{}~":
return False
return True
def import_bundled_package(modulename, location):
"""Import the bundled version of a package.
:note: This should only be called if the system version of the package
is not adequate.
:param modulename: Module name to import
:param location: Location to add to sys.path (can be relative to
${srcdir}/lib)
"""
if in_source_tree():
sys.path.insert(0, os.path.join(source_tree_topdir(), "lib", location))
sys.modules[modulename] = __import__(modulename)
else:
sys.modules[modulename] = __import__(
"samba.external.%s" % modulename, fromlist=["samba.external"])
def ensure_external_module(modulename, location):
"""Add a location to sys.path if an external dependency can't be found.
:param modulename: Module name to import
:param location: Location to add to sys.path (can be relative to
${srcdir}/lib)
"""
try:
__import__(modulename)
except ImportError:
import_bundled_package(modulename, location)
from samba import _glue
version = _glue.version
interface_ips = _glue.interface_ips
set_debug_level = _glue.set_debug_level
get_debug_level = _glue.get_debug_level
unix2nttime = _glue.unix2nttime
nttime2string = _glue.nttime2string
nttime2unix = _glue.nttime2unix
unix2nttime = _glue.unix2nttime
generate_random_password = _glue.generate_random_password
strcasecmp_m = _glue.strcasecmp_m
strstr_m = _glue.strstr_m
|
Buggaboo/gimp-plugin-export-layers
|
export_layers/pygimplib/pgitemdata.py
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This module defines the following classes:
* `ItemData` - an associative container that stores all GIMP items and item
groups of a certain type
* subclasses of `ItemData`:
* `LayerData` for layers
* `ChannelData` for channels
* `PathData` for paths
* `_ItemDataElement` - wrapper for `gimp.Item` objects containing custom
attributes derived from the original `gimp.Item` attributes
"""
#===============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
str = unicode
#===============================================================================
import os
import abc
from collections import OrderedDict
from collections import namedtuple
import gimp
from . import pgpath
from . import objectfilter
#===============================================================================
pdb = gimp.pdb
#===============================================================================
class ItemData(object):
"""
This class is an interface to store all items (and item groups) of a certain
type (e.g. layers, channels or paths) of a GIMP image in an ordered
dictionary, allowing to access the items via their names and get various
custom attributes derived from the existing item attributes.
Use one of the subclasses for items of a certain type:
* `LayerData` for layers,
* `ChannelData` for channels,
* `PathData` for paths (vectors).
For custom item attributes, see the documentation for the `_ItemDataElement`
class. `_ItemDataElement` is common for all `ItemData` subclasses.
Attributes:
* `image` - GIMP image to get item data from.
* `is_filtered` - If True, ignore items that do not match the filter
(`ObjectFilter`) in this object when iterating.
* `filter` (read-only) - `ObjectFilter` instance where you can add or remove
filter rules or subfilters to filter items.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, image, is_filtered=False, filter_match_type=objectfilter.ObjectFilter.MATCH_ALL):
self.image = image
self.is_filtered = is_filtered
# Filters applied to all items in self._itemdata
self._filter = objectfilter.ObjectFilter(filter_match_type)
# Contains all items (including item groups) in the item tree.
# key: `_ItemDataElement.orig_name` (derived from `gimp.Item.name`, which is unique)
# value: `_ItemDataElement` object
self._itemdata = OrderedDict()
# key `_ItemDataElement` object (parent) or None (root of the item tree)
# value: set of `_ItemDataElement` objects
self._uniquified_itemdata = {}
self._fill_item_data()
@property
def filter(self):
return self._filter
def __getitem__(self, name):
"""
Access an `_ItemDataElement` object by its `orig_name` attribute.
"""
return self._itemdata[name]
def __contains__(self, name):
"""
Return True if an `_ItemDataElement` object, specified by its `orig_name`
attribute, is in the item data. Otherwise return False.
"""
return name in self._itemdata
def __len__(self):
"""
Return the number of all item data elements - that is, all immediate
children of the image and all nested children.
"""
return len([item_elem for item_elem in self])
def __iter__(self):
"""
If `is_filtered` is False, iterate over all items. If `is_filtered` is True,
iterate only over items that match the filter in this object.
Yields:
* `item_elem` - The current `_ItemDataElement` object.
"""
if not self.is_filtered:
for item_elem in self._itemdata.values():
yield item_elem
else:
for item_elem in self._itemdata.values():
if self._filter.is_match(item_elem):
yield item_elem
def _items(self):
"""
Yield current (`gimp.Item.name`, `_ItemDataElement` object) tuple.
"""
if not self.is_filtered:
for name, item_elem in self._itemdata.items():
yield name, item_elem
else:
for name, item_elem in self._itemdata.items():
if self._filter.is_match(item_elem):
yield name, item_elem
def uniquify_name(self, item_elem, include_item_path=True,
uniquifier_position=None, uniquifier_position_parents=None):
"""
Make the `name` attribute in the specified `_ItemDataElement` object
unique among all other, already uniquified `_ItemDataElement` objects.
To achieve uniquification, a string ("uniquifier") in the form of
" (<number>)" is inserted at the end of the item names.
Parameters:
* `item_elem` - `_ItemDataElement` object whose `name` attribute
will be uniquified.
* `include_item_path` - If True, take the item path into account when
uniquifying.
* `uniquifier_position` - Position (index) where the uniquifier is inserted
into the current item. If the position is None, insert the uniquifier at
the end of the item name (i.e. append it).
* `uniquifier_position_parents` - Position (index) where the uniquifier is
inserted into the parents of the current item. If the position is None,
insert the uniquifier at the end of the name of each parent. This
parameter has no effect if `include_item_path` is False.
"""
if include_item_path:
for elem in item_elem.parents + [item_elem]:
parent = elem.parent
if parent not in self._uniquified_itemdata:
self._uniquified_itemdata[parent] = set()
if elem not in self._uniquified_itemdata[parent]:
item_names = set([elem_.name for elem_ in self._uniquified_itemdata[parent]])
if elem.name not in item_names:
self._uniquified_itemdata[parent].add(elem)
else:
if elem == item_elem:
position = uniquifier_position
else:
position = uniquifier_position_parents
elem.name = pgpath.uniquify_string(elem.name, item_names, position)
self._uniquified_itemdata[parent].add(elem)
else:
# Use None as the root of the item tree.
parent = None
if parent not in self._uniquified_itemdata:
self._uniquified_itemdata[parent] = set()
item_elem.name = pgpath.uniquify_string(
item_elem.name, self._uniquified_itemdata[parent], uniquifier_position)
self._uniquified_itemdata[parent].add(item_elem.name)
def _fill_item_data(self):
"""
Fill the _itemdata dictionary, containing
<gimp.Item.name, _ItemDataElement> pairs.
"""
_ItemTreeNode = namedtuple('_ItemTreeNode', ['children', 'parents'])
item_tree = [_ItemTreeNode(self._get_children_from_image(self.image), [])]
while item_tree:
node = item_tree.pop(0)
index = 0
for item in node.children:
parents = list(node.parents)
item_elem = _ItemDataElement(item, parents)
if pdb.gimp_item_is_group(item):
item_tree.insert(index, _ItemTreeNode(self._get_children_from_item(item), parents + [item_elem]))
index += 1
self._itemdata[item_elem.orig_name] = item_elem
@abc.abstractmethod
def _get_children_from_image(self, image):
"""
Return a list of immediate child items from the specified image.
If no child items exist, return an empty list.
"""
pass
@abc.abstractmethod
def _get_children_from_item(self, item):
"""
Return a list of immediate child items from the specified item.
If no child items exist, return an empty list.
"""
pass
class LayerData(ItemData):
def _get_children_from_image(self, image):
return image.layers
def _get_children_from_item(self, item):
return item.layers
class ChannelData(ItemData):
def _get_children_from_image(self, image):
return image.channels
def _get_children_from_item(self, item):
return item.children
class PathData(ItemData):
def _get_children_from_image(self, image):
return image.vectors
def _get_children_from_item(self, item):
return item.children
#===============================================================================
class _ItemDataElement(object):
"""
This class wraps a `gimp.Item` object and defines custom item attributes.
Note that the attributes will not be up to date if changes were made to the
original `gimp.Item` object.
Attributes:
* `item` (read-only) - `gimp.Item` object.
* `parents` (read-only) - List of `_ItemDataElement` parents for this item,
sorted from the topmost parent to the bottommost (immediate) parent.
* `level` (read-only) - Integer indicating which level in the item tree is
the item positioned at. 0 means the item is at the top level. The higher
the level, the deeper the item is in the item tree.
* `parent` (read-only) - Immediate `_ItemDataElement` parent of this object.
If this object has no parent, return None.
* `item_type` (read-only) - Item type - one of the following:
* `ITEM` - normal item,
* `NONEMPTY_GROUP` - non-empty item group (contains children),
* `EMPTY_GROUP` - empty item group (contains no children).
* `name` - Item name as a `unicode` string, initially equal to the `orig_name`
attribute. Modify this attribute instead of `gimp.Item.name` to avoid
modifying the original item.
* `orig_name` (read-only) - original `gimp.Item.name` as a `unicode` string.
* `path_visible` (read-only) - Visibility of all item's parents and this
item. If all items are visible, `path_visible` is True. If at least one
of these items is invisible, `path_visible` is False.
"""
__ITEM_TYPES = ITEM, NONEMPTY_GROUP, EMPTY_GROUP = (0, 1, 2)
def __init__(self, item, parents=None):
if item is None:
raise TypeError("item cannot be None")
self.name = item.name.decode()
self.tags = set()
self._orig_name = self.name
self._item = item
self._parents = parents if parents is not None else []
self._level = len(self._parents)
if self._parents:
self._parent = self._parents[-1]
else:
self._parent = None
if pdb.gimp_item_is_group(self._item):
if self._item.children:
self._item_type = self.NONEMPTY_GROUP
else:
self._item_type = self.EMPTY_GROUP
else:
self._item_type = self.ITEM
self._path_visible = self._get_path_visibility()
@property
def item(self):
return self._item
@property
def parents(self):
return self._parents
@property
def level(self):
return self._level
@property
def parent(self):
return self._parent
@property
def item_type(self):
return self._item_type
@property
def orig_name(self):
return self._orig_name
@property
def path_visible(self):
return self._path_visible
def get_file_extension(self):
"""
Get file extension from the `name` attribute.
If `name` has no file extension, return an empty string.
"""
return pgpath.get_file_extension(self.name)
def set_file_extension(self, file_extension):
"""
Set file extension in the `name` attribute.
To remove the file extension from `name`, pass an empty string or None.
"""
root = os.path.splitext(self.name)[0]
if file_extension:
self.name = '.'.join((root, file_extension))
else:
self.name = root
def get_filepath(self, directory, include_item_path=True):
"""
Return file path given the specified directory, item name and names of its
parents.
If `include_item_path` is True, create file path in the following format:
<directory>/<item path components>/<item name>
If `include_item_path` is False, create file path in the following format:
<directory>/<item name>
If directory is not an absolute path or is None, prepend the current working
directory.
Item path components consist of parents' item names, starting with the
topmost parent.
"""
if directory is None:
directory = ""
path = os.path.abspath(directory)
if include_item_path:
path_components = self.get_path_components()
if path_components:
path = os.path.join(path, os.path.join(*path_components))
path = os.path.join(path, self.name)
return path
def get_path_components(self):
"""
Return a list of names of all parents of this item as path components.
"""
return [parent.name for parent in self.parents]
def validate_name(self):
"""
Validate the `name` attribute of this item and all of its parents.
"""
self.name = pgpath.FilenameValidator.validate(self.name)
for parent in self._parents:
parent.name = pgpath.FilenameValidator.validate(parent.name)
def _get_path_visibility(self):
"""
If this item and all of its parents are visible, return True, otherwise
return False.
"""
path_visible = True
if not self._item.visible:
path_visible = False
else:
for parent in self._parents:
if not parent.item.visible:
path_visible = False
break
return path_visible
|
cliburn/flow
|
src/plugins/visual/TwoDFrame/colormap.py
|
#!/usr/bin/env python
#
"""
These functions, when given a magnitude mag between cmin and cmax, return
a colour tuple (red, green, blue). Light blue is cold (low magnitude)
and yellow is hot (high magnitude).
"""
import math
def floatRgb(mag, cmin, cmax, alpha=1.0):
"""
Return a tuple of floats between 0 and 1 for the red, green and
blue amplitudes.
"""
try:
# normalize to [0,1]
x = float(mag-cmin)/float(cmax-cmin)
except:
# cmax = cmin
x = 0.5
blue = min((max((4*(0.75-x), 0.)), 1.))
red = min((max((4*(x-0.25), 0.)), 1.))
green= min((max((4*math.fabs(x-0.5)-1., 0.)), 1.))
return (red, green, blue, alpha)
def strRgb(mag, cmin, cmax):
"""
Return a tuple of strings to be used in Tk plots.
"""
red, green, blue = floatRgb(mag, cmin, cmax)
return "#%02x%02x%02x" % (red*255, green*255, blue*255)
def rgb(mag, cmin, cmax):
"""
Return a tuple of integers to be used in AWT/Java plots.
"""
red, green, blue = floatRgb(mag, cmin, cmax)
return (int(red*255), int(green*255), int(blue*255))
def htmlRgb(mag, cmin, cmax):
"""
Return a tuple of strings to be used in HTML documents.
"""
return "#%02x%02x%02x"%rgb(mag, cmin, cmax)
|
yuanyelele/solfege
|
solfege/mainwin.py
|
# vim: set fileencoding=utf-8 :
# GNU Solfege - free ear training software
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import solfege
import webbrowser
import textwrap
# We move x-www-browser to the end of the list because on my
# debian etch system, the browser does will freeze solfege until
# I close the browser window.
try:
i = webbrowser._tryorder.index("x-www-browser")
webbrowser._tryorder.append(webbrowser._tryorder[i])
del webbrowser._tryorder[i]
except ValueError:
pass
import sys
import traceback
import locale
import os
import urllib
import shutil
try:
from pyalsa import alsaseq
except ImportError:
alsaseq = None
from solfege import winlang
from solfege import buildinfo
from solfege.esel import FrontPage, TestsView, SearchView
from gi.repository import Gtk
from gi.repository import Gdk
from solfege import utils
from solfege import i18n
class SplashWin(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, Gtk.WindowType.POPUP)
self.set_position(Gtk.WindowPosition.CENTER)
self.set_resizable(True)
frame = Gtk.Frame()
frame.set_shadow_type(Gtk.ShadowType.OUT)
self.add(frame)
vbox = Gtk.VBox()
vbox.set_border_width(20)
frame.add(vbox)
l = Gtk.Label(label=_("Starting GNU Solfege %s") % buildinfo.VERSION_STRING)
l.set_name("Heading1")
vbox.pack_start(l, True, True, 0)
l = Gtk.Label(label="http://www.solfege.org")
vbox.pack_start(l, True, True, 0)
self.g_infolabel = Gtk.Label(label='')
vbox.pack_start(self.g_infolabel, True, True, 0)
self.show_all()
def show_progress(self, txt):
self.g_infolabel.set_text(txt)
while Gtk.events_pending():
Gtk.main_iteration()
from solfege.configwindow import ConfigWindow
from solfege.profilemanager import ChangeProfileDialog
from solfege import gu
from solfege import cfg
from solfege import mpd
from solfege import lessonfile
from solfege import download_pyalsa
from solfege import statistics
from solfege import stock
from solfege import frontpage
from solfege import fpeditor
from solfege.trainingsetdlg import TrainingSetDialog
from solfege.practisesheetdlg import PractiseSheetDialog
from solfege import filesystem
class MusicViewerWindow(Gtk.Dialog):
def __init__(self):
Gtk.Dialog.__init__(self)
self.set_default_size(500, 300)
self.g_music_displayer = mpd.MusicDisplayer()
self.vbox.pack_start(self.g_music_displayer, True, True, 0)
b = gu.bButton(self.action_area, _("Close"), solfege.win.close_musicviewer)
b.grab_focus()
self.connect('destroy', solfege.win.close_musicviewer)
self.show_all()
def display_music(self, music):
fontsize = cfg.get_int('config/feta_font_size=20')
self.g_music_displayer.display(music, fontsize)
class MainWin(Gtk.Window, cfg.ConfigUtils):
default_front_page = os.path.join(lessonfile.exercises_dir, 'learningtree.txt')
debug_front_page = os.path.join(lessonfile.exercises_dir, 'debugtree.txt')
def __init__(self, options, datadir):
Gtk.Window.__init__(self, Gtk.WindowType.TOPLEVEL)
self._vbox = Gtk.VBox()
self._vbox.show()
self.add(self._vbox)
stock.SolfegeIconFactory(self, datadir)
Gtk.Settings.get_default().set_property('gtk-button-images', True)
cfg.ConfigUtils.__dict__['__init__'](self, 'mainwin')
self.set_resizable(self.get_bool('gui/mainwin_user_resizeable'))
self.add_watch('gui/mainwin_user_resizeable', lambda s: self.set_resizable(self.get_bool('gui/mainwin_user_resizeable')))
self.connect('delete-event', self.quit_program)
self.connect('key_press_event', self.on_key_press_event)
self.g_about_window = None
self.m_exercise = None
self.m_viewer = None
self.box_dict = {}
self.g_config_window = None
self.g_path_info_dlg = None
self.g_musicviewer_window = None
self.m_history = []
self.g_ui_manager = Gtk.UIManager()
self.m_action_groups = {
'Exit': Gtk.ActionGroup('Exit'),
'NotExit': Gtk.ActionGroup('NotExit'),
}
for a in self.m_action_groups.values():
self.g_ui_manager.insert_action_group(a, 1)
self.setup_menu()
self.main_box = Gtk.VBox()
self.main_box.show()
self._vbox.pack_start(self.main_box, True, True, 0)
def get_view(self):
"""
Return the view that is currently visible.
Raise KeyError if no view has yet been added.
"""
return self.box_dict[self.m_viewer]
def add_view(self, view, name):
"""
Hide the current view.
Add and view the new view.
"""
assert name not in self.box_dict
if self.m_viewer:
self.get_view().hide()
self.box_dict[name] = view
self.main_box.pack_start(self.box_dict[name], True, True, 0)
self.box_dict[name].show()
self.m_viewer = name
def show_view(self, name):
"""
Return False if the view does not exist.
Hide the current visible view, show the view named 'name' and
return True.
"""
if name not in self.box_dict:
return False
self.get_view().hide()
self.m_viewer = name
self.box_dict[name].show()
return True
def change_frontpage(self, filename):
"""
Change to a different frontpage file.
"""
self.set_string('app/frontpage', filename)
self.load_frontpage()
def load_frontpage(self):
"""
Load the front page file set in the config database into
solfege.app.m_frontpage_data
"""
filename = self.get_string("app/frontpage")
if filename == self.debug_front_page and not solfege.app.m_options.debug:
self.set_string("app/frontpage", self.default_front_page)
filename = self.default_front_page
if not os.path.isfile(filename):
filename = self.default_front_page
try:
solfege.app.m_frontpage_data = frontpage.load_tree(filename)
except Exception, e:
if solfege.splash_win:
solfege.splash_win.hide()
solfege.app.m_frontpage_data = frontpage.load_tree(self.default_front_page)
self.set_string('app/frontpage', self.default_front_page)
gu.dialog_ok(_("Loading front page '%s' failed. Using default page." % filename),
secondary_text = "\n".join(traceback.format_exception(*sys.exc_info())))
if solfege.splash_win:
solfege.splash_win.show()
self.display_frontpage()
def setup_menu(self):
self.m_action_groups['Exit'].add_actions([
('FileMenu', None, _('_File')),
('AppQuit', 'gtk-quit', None, None, None, self.quit_program),
])
self.m_action_groups['NotExit'].add_actions([
('TheoryMenu', None, _('The_ory')),
('FrontPagesMenu', None, _('Sele_ct Front Page')),
('TheoryIntervals', None, _('_Intervals'), None, None,
lambda o: solfege.app.handle_href('theory-intervals.html')),
('TreeEditor', None, _('_Edit Front Page'), None, None,
self.do_tree_editor),
('ExportTrainingSet', None, _(u'E_xport Exercises to Audio Files…'), None, None,
self.new_training_set_editor),
('EditPractiseSheet', None, _(u'Ear Training Test Pri_ntout…'), None, None,
self.new_practisesheet_editor),
('ProfileManager', None, _("Profile _Manager"), None, None,
self.open_profile_manager),
('OpenPreferencesWindow', 'gtk-preferences', None, '<ctrl>F12', None,
self.open_preferences_window),
('HelpMenu', None, _('_Help')),
('Search', 'gtk-search', _('_Search Exercises'), '<ctrl>F', None,
self.on_search_all_exercises),
('FrontPage', None, _('_Front Page'), 'F5', None,
lambda w: self.display_frontpage()),
('TestsPage', None, _('_Tests Page'), 'F6', None,
lambda w: self.display_testpage()),
('RecentExercises', None, _('_Recent Exercises'), 'F7', None,
self.display_recent_exercises),
('RecentTests', None, _('_Recent Tests'), 'F8', None,
self.display_recent_tests),
('UserExercises', None, _('_User Exercises'), 'F9', None,
self.display_user_exercises),
('SetupPyAlsa', None, _("Download and compile ALSA modules"), None, None, self.setup_pyalsa),
('HelpHelp', 'gtk-help', _('_Help on the current exercise'), 'F1', None,
lambda o: solfege.app.please_help_me()),
('HelpTheory', None, _('_Music theory on the current exercise'), 'F3', None, lambda o: solfege.app.show_exercise_theory()),
('HelpIndex', None, _('_User manual'), None, None,
lambda o: solfege.app.handle_href('index.html')),
('HelpShowPathInfo', None, _('_File locations'), None,
None, self.show_path_info),
('HelpOnline', None, _('_Mailing lists, web page etc.'), None, None,
lambda o: solfege.app.handle_href('online-resources.html')),
('HelpDonate', None, _('_Donate'), None, None,
lambda o: solfege.app.handle_href('http://www.solfege.org/donate/')),
('HelpReportingBugs', None, _('Reporting _bugs'), None, None,
lambda o: solfege.app.handle_href('bug-reporting.html')),
('HelpAbout', 'gtk-about', None, None, None, self.show_about_window),
('ShowBugReports', None, _('_See your bug reports'), None, None,
self.show_bug_reports),
])
self.g_ui_manager.add_ui_from_file("ui.xml")
self.add_accel_group(self.g_ui_manager.get_accel_group())
hdlbox = Gtk.HandleBox()
hdlbox.show()
hdlbox.add(self.g_ui_manager.get_widget('/Menubar'))
self._vbox.pack_start(hdlbox, False, False, 0)
self.m_help_on_current_merge_id = None
def create_frontpage_menu(self):
"""
Create, or update if already existing, the submenu that let the
user choose which front page file to display.
"""
if self.m_frontpage_merge_id:
self.g_ui_manager.remove_ui(self.m_frontpage_merge_id)
actions = []
old_dir = None
s = "<menubar name='Menubar'><menu action='FileMenu'><menu action='FrontPagesMenu'>"
for fn in frontpage.get_front_pages_list(solfege.app.m_options.debug):
if solfege.splash_win:
solfege.splash_win.show_progress(fn)
if not frontpage.may_be_frontpage(fn):
continue
try:
title = lessonfile.infocache.frontpage.get(fn, 'title')
except TypeError:
continue
cur_dir = os.path.split(fn)[0]
if old_dir != cur_dir:
s += '<separator name="sep@%s"/>' % fn
old_dir = cur_dir
s += "<menuitem action='%s'/>\n" % fn
if not self.m_action_groups['NotExit'].get_action(fn):
actions.append((fn, None, lessonfile.infocache.frontpage.get(fn, 'title'), None, fn,
lambda o, f=fn: self.change_frontpage(f)))
else:
action = self.m_action_groups['NotExit'].get_action(fn)
action.props.label = lessonfile.infocache.frontpage.get(fn, 'title')
s += "</menu></menu></menubar>"
self.m_action_groups['NotExit'].add_actions(actions)
self.m_frontpage_merge_id = self.g_ui_manager.add_ui_from_string(s)
def show_help_on_current(self):
"""
Show the menu entries for the exercise help and music theory
pages on the Help menu.
"""
if self.m_help_on_current_merge_id:
return
self.m_help_on_current_merge_id = self.g_ui_manager.add_ui_from_string("""
<menubar name='Menubar'>
<menu action='HelpMenu'>
<placeholder name='PerExerciseHelp'>
<menuitem position='top' action='HelpHelp' />
<menuitem action='HelpTheory' />
</placeholder>
</menu>
</menubar>""")
def hide_help_on_current(self):
"""
Hide the menu entries for the help and music theory pages on the
Help menu.
"""
if not self.m_help_on_current_merge_id:
return
self.g_ui_manager.remove_ui(self.m_help_on_current_merge_id)
self.m_help_on_current_merge_id = None
def show_bug_reports(self, *v):
m = Gtk.Dialog(_("Question"), self, 0)
m.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
m.add_button(Gtk.STOCK_OK, Gtk.ResponseType.OK)
vbox = Gtk.VBox()
m.vbox.pack_start(vbox, False, False, 0)
vbox.set_spacing(18)
vbox.set_border_width(12)
l = Gtk.Label(label=_("Please enter the email used when you submitted the bugs:"))
vbox.pack_start(l, False, False, 0)
self.g_email = Gtk.Entry()
m.action_area.get_children()[0].grab_default()
self.g_email.set_activates_default(True)
vbox.pack_start(self.g_email, False, False, 0)
m.show_all()
ret = m.run()
m.destroy()
if ret == Gtk.ResponseType.OK:
params = urllib.urlencode({
'pagename': 'SITS-Incoming/SearchBugs',
'q': 'SITS-Incoming/"Submitter: %s"' % utils.mangle_email(self.g_email.get_text().decode("utf-8")()),
})
try:
webbrowser.open_new("http://www.solfege.org?%s" % params)
except Exception, e:
self.display_error_message2(_("Error opening web browser"), str(e))
def display_error_message2(self, text, secondary_text):
"""
This is the new version of display_error_message, and it will
eventually replace the old.
"""
if solfege.splash_win and solfege.splash_win.props.visible:
solfege.splash_win.hide()
reshow_splash = True
else:
reshow_splash = False
if not isinstance(text, unicode):
text = text.decode(locale.getpreferredencoding(), 'replace')
if not isinstance(secondary_text, unicode):
secondary_text = secondary_text.decode(locale.getpreferredencoding(), 'replace')
m = Gtk.MessageDialog(None, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE, text)
if secondary_text:
m.format_secondary_text(secondary_text)
m.run()
m.destroy()
if reshow_splash:
solfege.splash_win.show()
while Gtk.events_pending():
Gtk.main_iteration()
def display_error_message(self, msg, title=None, secondary_text=None):
if solfege.splash_win and solfege.splash_win.props.visible:
solfege.splash_win.hide()
reshow_splash = True
else:
reshow_splash = False
if not isinstance(msg, unicode):
msg = msg.decode(locale.getpreferredencoding(), 'replace')
m = Gtk.MessageDialog(None, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE, None)
m.set_markup(gu.escape(msg))
if title:
m.set_title(title)
if secondary_text:
m.format_secondary_text(secondary_text)
m.run()
m.destroy()
if reshow_splash:
solfege.splash_win.show()
while Gtk.events_pending():
Gtk.main_iteration()
def show_path_info(self, w):
if not self.g_path_info_dlg:
self.g_path_info_dlg = Gtk.Dialog(_("_File locations").replace("_", ""), self,
buttons=(Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
sc = Gtk.ScrolledWindow()
sc.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.NEVER)
self.g_path_info_dlg.vbox.pack_start(sc, True, True, 0)
#
vbox = gu.hig_dlg_vbox()
sc.add_with_viewport(vbox)
box1, box2 = gu.hig_category_vbox(_("_File locations").replace("_", ""))
vbox.pack_start(box1, True, True, 0)
sizegroup = Gtk.SizeGroup(Gtk.SizeGroupMode.HORIZONTAL)
# statistics.sqlite
# win32 solfegerc
# win32 langenviron.txt
box2.pack_start(gu.hig_label_widget(_("Solfege application data:"), Gtk.Label(label=filesystem.app_data()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege user data:"), Gtk.Label(label=filesystem.user_data()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege config file:"), Gtk.Label(label=filesystem.rcfile()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege installation directory:"), Gtk.Label(label=os.getcwdu()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("User manual in HTML format:"), Gtk.Label(label=os.path.join(os.getcwdu(), "help")), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("gtk:", Gtk.Label(label=str(Gtk)), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("pyalsa:", Gtk.Label(label=str(alsaseq)), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("PYTHONHOME", Gtk.Label(os.environ.get('PYTHONHOME', 'Not defined')), sizegroup), False, False, 0)
self.g_path_info_dlg.show_all()
def f(*w):
self.g_path_info_dlg.hide()
return True
self.g_path_info_dlg.connect('response', f)
self.g_path_info_dlg.connect('delete-event', f)
sc.set_size_request(min(vbox.size_request().width + gu.SPACE_LARGE * 2,
Gdk.Screen.width() * 0.9),
vbox.size_request().height)
def setup_pyalsa(self, widget):
download_pyalsa.download()
def show_about_window(self, widget):
pixbuf = self.render_icon('solfege-icon', Gtk.IconSize.DIALOG)
a = self.g_about_window = Gtk.AboutDialog()
a.set_program_name("GNU Solfege")
a.set_logo(pixbuf)
a.set_website("http://www.solfege.org")
a.set_version(buildinfo.VERSION_STRING)
a.set_copyright("Copyright (C) 2013 Tom Cato Amundsen and others")
a.set_license("\n".join((solfege.application.solfege_copyright, solfege.application.warranty)))
# Using set_license_type causes the app to print warnings.
#a.set_license_type(Gtk.License.GPL_3_0)
a.set_authors(["Tom Cato Amundsen",
'Giovanni Chierico %s' % _("(some lessonfiles)"),
'Michael Becker %s' % _("(some lessonfiles)"),
'Joe Lee %s' % _("(sound code for the MS Windows port)"),
'Steve Lee %s' % _("(ported winmidi.c to gcc)"),
'Thibaus Cousin %s' % _("(spec file for SuSE 8.2)"),
'David Coe %s' %_("(spec file cleanup)"),
'David Petrou %s' % _("(testing and portability fixes for FreeBSD)"),
'Han-Wen Nienhuys %s' % _("(the music font from Lilypond)"),
'Jan Nieuwenhuizen %s' % _("(the music font from Lilypond)"),
'Davide Bonetti %s' % _("(scale exercises)"),
])
a.set_documenters(["Tom Cato Amundsen",
"Tom Eykens",
])
if _("SOLFEGETRANSLATORS") == 'SOLFEGETRANSLATORS':
a.set_translator_credits(None)
else:
a.set_translator_credits(_("SOLFEGETRANSLATORS"))
self.g_about_window.run()
self.g_about_window.destroy()
def do_tree_editor(self, *v):
"""
Open a front page editor editing the current front page.
"""
fpeditor.Editor.edit_file(self.get_string("app/frontpage"))
def post_constructor(self):
self.m_frontpage_merge_id = None
self.create_frontpage_menu()
self.g_ui_manager.add_ui_from_file("help-menu.xml")
if sys.platform != 'linux2':
self.g_ui_manager.get_widget('/Menubar/HelpMenu/SetupPyAlsa').hide()
if solfege.app.m_sound_init_exception is not None:
if solfege.splash_win:
solfege.splash_win.destroy()
solfege.splash_win = None
solfege.app.display_sound_init_error_message(solfege.app.m_sound_init_exception)
# MIGRATION 3.9.0
if sys.platform == "win32" \
and os.path.exists(os.path.join(filesystem.get_home_dir(), "lessonfiles")) \
and not os.path.exists(filesystem.user_lessonfiles()):
if solfege.splash_win:
solfege.splash_win.hide()
do_move = gu.dialog_yesno(_('In Solfege 3.9.0, the location where Solfege look for lesson files you have created was changed. The files has to be moved from "%(old)s" and into the folder "%(gnu)s" in your "%(doc)s" folder.\nMay I move the files automatically for you now?' % {
'doc': os.path.split(os.path.split(filesystem.user_data())[0])[1],
'gnu': os.path.join(filesystem.appname, 'lessonfiles'),
'old': os.path.join(filesystem.get_home_dir(), "lessonfiles"),
}), parent=self)
if do_move:
try:
os.makedirs(filesystem.user_data())
shutil.copytree(os.path.join(filesystem.get_home_dir(), "lessonfiles"),
os.path.join(filesystem.user_data(), "lessonfiles"))
except (OSError, shutil.Error), e:
gu.dialog_ok(_("Error while copying directory:\n%s" % e))
else:
gu.dialog_ok(_("Files copied. The old files has been left behind. Please delete them when you have verified that all files was copied correctly."))
if solfege.splash_win:
solfege.splash_win.show()
# MIGRATION 3.9.3 when we added langenviron.bat and in 3.11
# we migrated to langenviron.txt because we does not use cmd.exe
if sys.platform == 'win32' and winlang.win32_get_langenviron() != self.get_string('app/lc_messages'):
gu.dialog_ok(_("Migrated old language setup. You might have to restart the program all translated messages to show up."))
winlang.win32_put_langenviron(self.get_string('app/lc_messages'))
# MIGRATION 3.11.1: earlier editors would create new learning trees
# below app_data() instead of user_data().
if (sys.platform == "win32" and
os.path.exists(os.path.join(filesystem.app_data(),
"learningtrees"))):
if not os.path.exists(os.path.join(filesystem.user_data(), "learningtrees")):
os.makedirs(os.path.join(filesystem.user_data(), "learningtrees"))
for fn in os.listdir(os.path.join(filesystem.app_data(), "learningtrees")):
if not os.path.exists(os.path.join(filesystem.user_data(), "learningtrees", fn)):
shutil.move(os.path.join(filesystem.app_data(), "learningtrees", fn),
os.path.join(filesystem.user_data(), "learningtrees"))
else:
# We add the .bak exstention if the file already exists.
shutil.move(os.path.join(filesystem.app_data(), "learningtrees", fn),
os.path.join(filesystem.user_data(), "learningtrees", u"%s.bak" % fn))
os.rmdir(os.path.join(os.path.join(filesystem.app_data(), "learningtrees")))
item = self.g_ui_manager.get_widget("/Menubar/FileMenu/FrontPagesMenu")
item.connect('activate', lambda s: self.create_frontpage_menu())
try:
i18n.locale_setup_failed
print >> sys.stderr, "\n".join(textwrap.wrap("Translations are disabled because your locale settings are broken. This is not a bug in GNU Solfege, so don't report it. The README file distributed with the program has some more details."))
except AttributeError:
pass
for filename in lessonfile.infocache.frontpage.iter_old_format_files():
gu.dialog_ok(_("Cannot load front page file"), None,
_(u"The file «%s» is saved in an old file format. The file can be converted by editing and saving it with an older version of Solfege. Versions from 3.16.0 to 3.20.4 should do the job.") % filename)
def activate_exercise(self, module, urlobj=None):
self.show_view(module)
# We need this test because not all exercises use a notebook.
if self.get_view().g_notebook:
if urlobj and urlobj.action in ['practise', 'config', 'statistics']:
self.get_view().g_notebook.set_current_page(
['practise', 'config', 'statistics'].index(urlobj.action))
else:
self.get_view().g_notebook.set_current_page(0)
self.set_title("Solfege - " + self.get_view().m_t.m_P.header.title)
def display_docfile(self, fn):
"""
Display the HTML file named by fn in the help browser window.
"""
for lang in solfege.app.m_userman_language, "C":
filename = os.path.join(os.getcwdu(), u"help", lang, fn)
if os.path.isfile(filename):
break
try:
webbrowser.open(filename)
except Exception, e:
self.display_error_message2(_("Error opening web browser"), str(e))
def display_user_exercises(self, w):
col = frontpage.Column()
page = frontpage.Page(_('User exercises'), col)
curdir = None
linklist = None
for filename in lessonfile.infocache.iter_user_files(only_user_collection=True):
dir, fn = os.path.split(filename)
if dir != curdir:
curdir = dir
linklist = frontpage.LinkList(dir)
col.append(linklist)
linklist.append(filename)
if os.path.isdir(filesystem.user_lessonfiles()):
linklist = None
col.append(frontpage.Paragraph(_('You really should move the following directory to a directory below <span font_family="monospace">%s</span>. Future versions of GNU Solfege will not display files in the old location. The user manual have details on where to place the files.') % os.path.join(filesystem.user_data(), u'exercises')))
# Added just to be nice with people not moving their files from
# pre 3.15.3 location:
for filename in os.listdir(filesystem.user_lessonfiles()):
if not linklist:
linklist = frontpage.LinkList(filesystem.user_lessonfiles())
linklist.append(os.path.join(filesystem.user_lessonfiles(), filename))
# only display the linklist if there are any files.
if linklist:
col.append(linklist)
self.display_frontpage(page)
def display_recent_exercises(self, w):
data = frontpage.Page(_('Recent exercises'),
[frontpage.Column(
[frontpage.LinkList(_('Recent exercises'),
solfege.db.recent(8))])])
self.display_frontpage(data, show_topics=True)
self.get_view().g_searchbox.hide()
def display_recent_tests(self, w):
data = frontpage.Page(_('Recent tests'),
[frontpage.Column(
[frontpage.LinkList(_('Recent tests'),
solfege.db.recent_tests(8))])])
self.display_testpage(data, show_topics=True)
self.get_view().g_searchbox.hide()
def display_testpage(self, data=None, show_topics=False):
"""
Display the front page of the data in solfege.app.m_frontpage_data
"""
self.set_title("GNU Solfege - tests")
if not self.show_view('testspage'):
p = TestsView()
p.connect('link-clicked', self.history_handler)
self.add_view(p, 'testspage')
self.get_view().g_searchbox.show()
if not data:
data = solfege.app.m_frontpage_data
self.trim_history(self.get_view(), data)
self.get_view().display_data(data, show_topics=show_topics)
def on_search_all_exercises(self, widget=None):
self.set_title("GNU Solfege")
if not self.show_view('searchview'):
self.add_view(SearchView(_('Search the exercise titles of all lesson files found by the program, not just the active front page with sub pages.')), 'searchview')
def display_frontpage(self, data=None, show_topics=False):
"""
Display the front page of the data in solfege.app.m_frontpage_data
"""
if solfege.app.m_options.profile:
self.set_title("GNU Solfege - %s" % solfege.app.m_options.profile)
else:
self.set_title("GNU Solfege")
if not self.show_view('frontpage'):
p = FrontPage()
p.connect('link-clicked', self.history_handler)
self.add_view(p, 'frontpage')
self.get_view().g_searchbox.show()
if not data:
data = solfege.app.m_frontpage_data
self.trim_history(self.get_view(), data)
self.get_view().display_data(data, show_topics=show_topics)
def trim_history(self, new_viewer, new_page):
# First check if the page we want to display is in m_history.
# If so, we will trunkate history after it.
for i, (viewer, page) in enumerate(self.m_history):
if (new_viewer != viewer) or (new_page == page):
self.m_history = self.m_history[:i]
break
def history_handler(self, *w):
self.m_history.append(w)
def initialise_exercise(self, teacher):
"""
Create a Gui object for the exercise and add it to
the box_dict dict.
"""
assert teacher.m_exname not in self.box_dict
self.get_view().hide()
m = solfege.app.import_module(teacher.m_exname)
self.add_view(m.Gui(teacher), teacher.m_exname)
def on_key_press_event(self, widget, event):
try:
view = self.get_view()
except KeyError:
return
if (event.type == Gdk.EventType.KEY_PRESS
and event.get_state() & Gdk.ModifierType.MOD1_MASK == Gdk.ModifierType.MOD1_MASK# Alt key
and event.keyval in (Gdk.KEY_KP_Left, Gdk.KEY_Left)
and self.m_history
and not solfege.app.m_test_mode):
obj, page = self.m_history[-1]
self.trim_history(obj, page)
# Find the box_dict key for obj
for k, o in self.box_dict.items():
if o == obj:
obj.display_data(page)
self.show_view(k)
break
return True
view.on_key_press_event(widget, event)
def open_profile_manager(self, widget=None):
p = ChangeProfileDialog(solfege.app.m_options.profile)
if p.run() == Gtk.ResponseType.ACCEPT:
prof = p.get_profile()
else:
# The user presses cancel. This will use the same profile as
# before, but if the user has renamed the active profile, then
# we need to use the new name.
prof = p.m_default_profile
solfege.app.reset_exercise()
solfege.app.m_options.profile = prof
solfege.db.conn.commit()
solfege.db.conn.close()
solfege.db = statistics.DB(None, profile=prof)
cfg.set_string("app/last_profile", prof)
self.display_frontpage()
p.destroy()
def open_preferences_window(self, widget=None):
if not self.g_config_window:
self.g_config_window = ConfigWindow()
self.g_config_window.show()
else:
self.g_config_window.update_old_statistics_info()
self.g_config_window.update_statistics_info()
self.g_config_window.show()
def quit_program(self, *w):
can_quit = True
for dlg in gu.EditorDialogBase.instance_dict.values():
if dlg.close_window():
dlg.destroy()
else:
can_quit = False
break
if can_quit:
solfege.app.quit_program()
Gtk.main_quit()
else:
return True
def display_in_musicviewer(self, music):
if not self.g_musicviewer_window:
self.g_musicviewer_window = MusicViewerWindow()
self.g_musicviewer_window.show()
self.g_musicviewer_window.display_music(music)
def close_musicviewer(self, widget=None):
self.g_musicviewer_window.destroy()
self.g_musicviewer_window = None
def enter_test_mode(self):
if 'enter_test_mode' not in dir(self.get_view()):
gu.dialog_ok(_("The '%s' exercise module does not support test yet." % self.m_viewer))
return
self.m_action_groups['NotExit'].set_sensitive(False)
self.g = self.get_view().g_notebook.get_nth_page(0)
self.get_view().g_notebook.get_nth_page(0).reparent(self.main_box)
self.get_view().g_notebook.hide()
self.get_view().enter_test_mode()
def exit_test_mode(self):
solfege.app.m_test_mode = False
self.m_action_groups['NotExit'].set_sensitive(True)
box = Gtk.VBox()
self.get_view().g_notebook.insert_page(box, Gtk.Label(label=_("Practise")), 0)
self.g.reparent(box)
self.get_view().g_notebook.show()
self.get_view().g_notebook.get_nth_page(0).show()
self.get_view().g_notebook.set_current_page(0)
self.get_view().exit_test_mode()
def new_training_set_editor(self, widget):
dlg = TrainingSetDialog()
dlg.show_all()
def new_practisesheet_editor(self, widget):
dlg = PractiseSheetDialog()
dlg.show_all()
|
reisalex/test-sfm
|
setup.py
|
"""
Python-packaging for synbiomts
Copyright 2017 Alexander C. Reis, Howard M. Salis, all rights reserved.
"""
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='synbiomts',
version='1.0',
description='Test suite for DNA sequence-function models',
url='http://github.com/reisalex/SynBioMTS',
author='Alexander C. Reis',
author_email='alex.reis@psu.edu',
license='MIT',
packages=['synbiomts'],
# install_requires=['numpy','scipy','pandas','biopython'],
zip_safe=False)
|
NicovincX2/Python-3.5
|
Physique/Physique quantique/Mécanique quantique/principe_de_superposition_lineaire.py
|
# -*- coding: utf-8 -*-
import os
"""
Illustration d'un exercice de TD visant à montrer l'évolution temporelle de la
densité de probabilité pour la superposition équiprobable d'un état n=1 et
d'un état n quelconque (à fixer) pour le puits quantique infini.
Par souci de simplicité, on se débrouille pour que E_1/hbar = 1
"""
import numpy as np # Boîte à outils numériques
import matplotlib.pyplot as plt # Boîte à outils graphiques
from matplotlib import animation # Pour l'animation progressive
# Second état n observer (à fixer)
n = 2
# On met tous les paramètres à 1 (ou presque)
t0 = 0
dt = 0.1
L = 1
hbar = 1
h = hbar * 2 * np.pi
m = (2 * np.pi)**2
E1 = h**2 / (8 * m * L**2)
En = n * E1
x = np.linspace(0, L, 1000)
def psi1(x, t):
return np.sin(np.pi * x / L) * np.exp(1j * E1 * t / hbar)
def psin(x, t):
return np.sin(n * np.pi * x / L) * np.exp(1j * En * t / hbar)
def psi(x, t):
return 1 / L**0.5 * (psi1(x, t) + psin(x, t))
fig = plt.figure()
line, = plt.plot(x, abs(psi(x, t0))**2)
plt.title('$t={}$'.format(t0))
plt.ylabel('$|\psi(x,t)|^2$')
plt.xlabel('$x$')
plt.plot(x, abs(psi1(x, t0))**2, '--', label='$|\psi_1|^2$')
plt.plot(x, abs(psin(x, t0))**2, '--', label='$|\psi_{}|^2$'.format(n))
plt.legend()
def init():
pass
def animate(i):
t = i * dt + t0
line.set_ydata(abs(psi(x, t))**2)
plt.title('$t={}$'.format(t))
anim = animation.FuncAnimation(fig, animate, frames=1000, interval=20)
plt.show()
os.system("pause")
|
DragonRoman/rhevm-utils
|
3.0/hooks/directlun/before_vm_migrate_destination.py
|
#!/usr/bin/python
import os
import sys
import grp
import pwd
import traceback
import utils
import hooking
DEV_MAPPER_PATH = "/dev/mapper"
DEV_DIRECTLUN_PATH = '/dev/directlun'
def createdirectory(dirpath):
# we don't use os.mkdir/chown because we need sudo
command = ['/bin/mkdir', '-p', dirpath]
retcode, out, err = utils.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('directlun: error mkdir %s, err = %s\n' % (dirpath, err))
sys.exit(2)
mode = '755'
command = ['/bin/chmod', mode, dirpath]
if retcode != 0:
sys.stderr.write('directlun: error chmod %s %s, err = %s\n' % (dirpath, mode, err))
sys.exit(2)
def cloneDeviceNode(srcpath, devpath):
"""Clone a device node into a temporary private location."""
# we don't use os.remove/mknod/chmod/chown because we need sudo
command = ['/bin/rm', '-f', devpath]
retcode, out, err = utils.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('directlun: error rm -f %s, err = %s\n' % (devpath, err))
sys.exit(2)
stat = os.stat(srcpath)
major = os.major(stat.st_rdev)
minor = os.minor(stat.st_rdev)
command = ['/bin/mknod', devpath, 'b', str(major), str(minor)]
retcode, out, err = utils.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('directlun: error mknod %s, err = %s\n' % (devpath, err))
sys.exit(2)
mode = '660'
command = ['/bin/chmod', mode, devpath]
retcode, out, err = utils.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('directlun: error chmod %s to %s, err = %s\n' % (devpath, mode, err))
sys.exit(2)
group = grp.getgrnam('qemu')
gid = group.gr_gid
user = pwd.getpwnam('qemu')
uid = user.pw_uid
owner = str(uid) + ':' + str(gid)
command = ['/bin/chown', owner, devpath]
retcode, out, err = utils.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('directlun: error chown %s to %s, err = %s\n' % (devpath, owner, err))
sys.exit(2)
if os.environ.has_key('directlun'):
try:
luns = os.environ['directlun']
domxml = hooking.read_domxml()
createdirectory(DEV_DIRECTLUN_PATH)
for lun in luns.split(','):
try:
lun, options = lun.split(':')
except ValueError:
options = ''
options = options.split(';')
srcpath = DEV_MAPPER_PATH + '/' + lun
if not os.path.exists(srcpath):
sys.stderr.write('directlun before_vm_migration_destination: device not found %s\n' % srcpath)
sys.exit(2)
uuid = domxml.getElementsByTagName('uuid')[0]
uuid = uuid.childNodes[0].nodeValue
devpath = DEV_DIRECTLUN_PATH + '/' + lun + '-' + uuid
cloneDeviceNode(srcpath, devpath)
hooking.write_domxml(domxml)
except:
sys.stderr.write('directlun before_vm_migration_destination: [unexpected error]: %s\n' % traceback.format_exc())
sys.exit(2)
|
Eulercoder/fabulous
|
fabulous/services/google.py
|
"""~google <search term> will return three results from the google search for <search term>"""
import re
import requests
from random import shuffle
from googleapiclient.discovery import build
import logging
from secret_example import GOOGLE_CUSTOM_SEARCH_ENGINE, GOOGLE_SEARCH_API
"""fuction to fetch data from Google Custom Search Engine API"""
def google(searchterm, api_key, cse_id, **kwargs):
service = build("customsearch", "v1", developerKey=api_key, cache_discovery=False)
res = service.cse().list(q=searchterm, cx=cse_id, **kwargs).execute()
return res['items']
"""fuction to return first three search results"""
def google_search(searchterm):
results = google(searchterm, GOOGLE_SEARCH_API, GOOGLE_CUSTOM_SEARCH_ENGINE, num=10)
length = len(results)
retval = ""
if length < 3:
for index in range(length):
retval += results[index]['link'] + "\n"
else:
for index in range(3):
retval += results[index]['link'] + "\n"
return retval
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"~google (.*)", text)
if not match:
return
searchterm = match[0]
return google_search(searchterm)
on_bot_message = on_message
|
molpopgen/fwdpy11
|
fwdpy11/_functions/simplify_tables.py
|
from typing import List, Tuple, Union
import fwdpy11._fwdpy11
import fwdpy11._types
import numpy as np
def simplify(pop, samples):
"""
Simplify a TableCollection stored in a Population.
:param pop: A :class:`fwdpy11.DiploidPopulation`
:param samples: A list of samples (node indexes).
:return: The simplified tables and array mapping input sample IDs to output IDS
:rtype: tuple
Note that the samples argument is agnostic with respect to the time of
the nodes in the input tables. Thus, you may do things like simplify
to a set of "currently-alive" nodes plus some or all ancient samples by
including some node IDs from
:attr:`fwdpy11.DiploidPopulation.ancient_sample_metadata`.
If the input contains ancient samples, and you wish to include them in the output,
then you need to include their IDs in the samples argument.
.. note::
Due to node ID remapping, the metadata corresponding to nodes becomes a bit more
difficult to look up. You need to use the output ID map, the original IDs, and
the population's metadata containers.
.. deprecated:: 0.3.0
Prefer :func:`fwdpy11.simplify_tables`
.. versionchanged:: 0.3.0
Ancient samples are no longer kept by default
.. versionchanged:: 0.5.0
No longer requires a :class:`MutationVector` argument.
"""
import warnings
warnings.warn(
"This function is deprecated and will be removed soon. Please use fwdpy11.simplify_tables instead",
category=FutureWarning,
)
ll_t, idmap = fwdpy11._fwdpy11._simplify(pop, samples)
return fwdpy11._types.TableCollection(ll_t), idmap
def simplify_tables(
tables: fwdpy11._types.TableCollection, samples: Union[List, np.ndarray]
) -> Tuple[fwdpy11._types.TableCollection, np.ndarray]:
"""
Simplify a TableCollection.
:param pop: A table collection.
:type pop: :class:`fwdpy11.TableCollection`
:param samples: list of samples
:type list: list-like or array-like
:returns: A simplified TableCollection and an array containing remapped sample ids.
:rtype: tuple
.. versionadded:: 0.3.0
"""
ll_t, idmap = fwdpy11._fwdpy11._simplify_tables(tables, samples)
return fwdpy11._types.TableCollection(ll_t), idmap
|
GooogIe/VarasTG
|
plugins/btc.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Btc plugin for Varas
Author: Neon & A Sad Loner
Last modified: November 2016
"""
import urllib2
from plugin import Plugin
name = 'Bitcoin'
class Bitcoin(Plugin):
def __init__(self):
Plugin.__init__(self,"bitcoin","<wallet> Return current balance from a Bitcoin wallet","A Sad Loners",1.0)
def run(self,address):
#1btc = 100000000satoshi
print "https://blockchain.info/it/q/addressbalance/"+address
try:
api = urllib2.urlopen("https://blockchain.info/it/q/addressbalance/"+address)
except:
return "Unknown Error"
resp = api.read()
satoshi = float(resp)
btc = satoshi/100000000
return "Balance: " + str(btc)
|
berndf/avg_q
|
python/avg_q/Presentation.py
|
# Copyright (C) 2013 Bernd Feige
# This file is part of avg_q and released under the GPL v3 (see avg_q/COPYING).
"""
Presentation utilities.
"""
from . import trgfile
class PresLog(object):
# Basic log file reading.
def __init__(self,logfile,part='events'):
'''part can be 'events' or 'trials' for the first or second part'''
self.logfile=logfile
self.log=open(self.logfile,"r")
fileheader=next(self.log).rstrip('\r\n')
if not fileheader.startswith('Scenario -'):
raise Exception("PresLog: File doesn't start with 'Scenario'")
self.scenario=fileheader[11:]
#print("Scenario: %s" % self.scenario)
fileheader2=next(self.log).rstrip('\r\n')
#print("fileheader2: %s" % fileheader2)
if fileheader2.startswith('Logfile written - '):
import datetime
self.timestamp=datetime.datetime.strptime(fileheader2[18:],"%m/%d/%Y %H:%M:%S")
#print(self.timestamp)
else:
self.timestamp=None
table_start=['Subject','Trial'] if part=='events' else ['Event Type']
self.header_fields=None
for line in self.log:
fields=line.rstrip('\r\n').split('\t')
if len(fields)<=1: continue
if self.header_fields is None:
# The first table is skipped...
if fields[0] in table_start:
self.header_fields=fields
self.atstart=True
break
def __iter__(self):
for line in self.log:
fields=line.rstrip('\r\n').split('\t')
if len(fields)<=1:
# Only at the start skip empty line(s)
if self.atstart: continue
else: break
self.atstart=False
yield fields
def __del__(self):
self.close()
def close(self):
if self.log:
self.log.close()
self.log=None
class PresLogfile(trgfile.trgfile):
def __init__(self,logfile,part='events'):
self.PL=PresLog(logfile,part)
trgfile.trgfile.__init__(self,self.PL)
self.preamble['Sfreq']=10000.0
def rdr(self):
for fields in self.reader:
data=dict(zip(self.PL.header_fields,fields))
point=int(data['Time'])
description=data['Event Type']
try:
code=int(data['Code'])
except:
code= -1
description=' '.join([description,data['Code']])
yield (point, code, description)
def close(self):
if self.PL:
self.PL.close()
self.PL=None
def gettuples_abstime(self):
# We are calculating backwards from the time the log was written, which is given
# in local time, and it may happen that a DST switch occurred between start and end.
# Most plots, simply working for a given time from the start, are totally okay if you don't
# mind that the end times are still in the old frame, but since the local time here may
# already be in the new frame we have to correct to achieve this "work-from-start" behavior.
import pytz
tuples=self.gettuples()
sfreq=float(self.preamble.get('Sfreq'))
last_s=pytz.datetime.timedelta(seconds=tuples[-1][0]/sfreq)
tz_aware_end=pytz.timezone('Europe/Berlin').localize(self.PL.timestamp)
# This computes the correct local start time considering a possible DST switch and
# converts it to the TZ-unaware local time we really want...
self.start_datetime=tz_aware_end.tzinfo.normalize(tz_aware_end-last_s).replace(tzinfo=None)
return trgfile.trgfile.gettuples_abstime(self)
|
gjbex/parameter-weaver
|
src/fortran_parser_test.py
|
#!/usr/bin/env python
#
# ParameterWeaver: a code generator to handle command line parameters
# and configuration files for C/C++/Fortran/R/Octave
# Copyright (C) 2013 Geert Jan Bex <geertjan.bex@uhasselt.be>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''Module to test the parameter definition parser for Fortran'''
import unittest
from vsc.parameter_weaver.params import Parameter, ParameterParser, WeaverError
from vsc.parameter_weaver.base_validator import BaseValidator, ParameterDefinitionError
from vsc.parameter_weaver.fortran.types import Integer, DoublePrecision, CharacterArray
from vsc.parameter_weaver.fortran.validator import Validator
class FortranParserTest(unittest.TestCase):
'''Tests for Fortran parameter definition parser'''
def setUp(self):
'''Set up parameter list to be expected from valid viles'''
self._parameters = [
Parameter(Integer(), 'a', '10'),
Parameter(DoublePrecision(), 'f', '0.19D00'),
Parameter(CharacterArray(), 'str', 'abcde')
]
self._parameters_w_description = [
Parameter(Integer(), 'a', '10'),
Parameter(DoublePrecision(), 'f', '0.19D00', 'relative error'),
Parameter(CharacterArray(), 'str', 'a;bcde', 'string to print')
]
self._parser = ParameterParser(Validator())
def test_simple_tab_separated_valid(self):
'''Parse a simple file that is well-formed and valid'''
file_name = 'tests/good_fortran.txt'
t = CharacterArray()
try:
self.assertEqual(self._parameters, self._parser.parse(file_name))
except ParameterDefinitionError as error:
self.fail(str(error))
if __name__ == '__main__':
unittest.main()
|
bckwltn/SickRage
|
sickbeard/providers/womble.py
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import time
import sickbeard
import generic
from sickbeard import logger
from sickbeard import tvcache
from sickbeard.exceptions import AuthException
class WombleProvider(generic.NZBProvider):
def __init__(self):
generic.NZBProvider.__init__(self, "Womble's Index")
self.enabled = False
self.cache = WombleCache(self)
self.urls = {'base_url': 'https://newshost.co.za/'}
self.url = self.urls['base_url']
def isEnabled(self):
return self.enabled
class WombleCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll Womble's Index every 15 minutes max
self.minTime = 15
def updateCache(self):
# check if we should update
if not self.shouldUpdate():
return
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
cl = []
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
for item in self.getRSSFeed(url)['entries'] or []:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _checkAuth(self, data):
return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None
provider = WombleProvider()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.